Skip to content

Commit

Permalink
organize data struct and add fiat shamir
Browse files Browse the repository at this point in the history
  • Loading branch information
Ubuntu committed Nov 30, 2023
1 parent 43f6bee commit 0352b05
Show file tree
Hide file tree
Showing 5 changed files with 103 additions and 110 deletions.
9 changes: 8 additions & 1 deletion core/data.go
Original file line number Diff line number Diff line change
Expand Up @@ -161,9 +161,16 @@ func (cb Bundles) Serialize() ([][][]byte, error) {
return data, nil
}

// Sample is a chunk with associated metadata used by the Universal Batch Verifier
type Sample struct {
Commitment *Commitment
Chunk *Chunk // contain proof and coeffs
Chunk *Chunk
EvalIndex ChunkNumber
BlobIndex int
}

// SubBatch is a part of the whole Batch with identical Encoding Parameters, i.e. (ChunkLen, NumChunk)
type SubBatch struct {
Samples []Sample
NumBlobs int
}
26 changes: 1 addition & 25 deletions core/encoding/encoder.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,7 @@ package encoding

import (
"crypto/sha256"
"errors"
"fmt"
"log"

"github.com/Layr-Labs/eigenda/core"
"github.com/Layr-Labs/eigenda/pkg/encoding/encoder"
Expand Down Expand Up @@ -82,24 +80,6 @@ func (e *Encoder) Encode(data []byte, params core.EncodingParams) (core.BlobComm
Coeffs: frame.Coeffs,
Proof: frame.Proof,
}

q, _ := encoder.GetLeadingCosetIndex(uint64(ind), uint64(len(chunks)))
lc := enc.Fs.ExpandedRootsOfUnity[uint64(q)]
ok := frame.Verify(enc.Ks, commit, &lc)
if !ok {
log.Fatalf("Proof %v failed\n", ind)
} else {

fmt.Println("proof", frame.Proof.String())
fmt.Println("commitment", commit.String())
for i := 0; i < len(frame.Coeffs); i++ {
fmt.Printf("%v ", frame.Coeffs[i].String())
}
fmt.Println("q", q, lc.String())

fmt.Println("***************tested frame and pass")
}

}

length := uint(len(encoder.ToFrArray(data)))
Expand Down Expand Up @@ -170,11 +150,7 @@ func (e *Encoder) UniversalVerifyChunks(params core.EncodingParams, samplesCore
samples[i] = sample
}

if e.EncoderGroup.UniversalVerify(encParams, samples, numBlobs) {
return nil
} else {
return errors.New("Universal Verify wrong")
}
return e.EncoderGroup.UniversalVerify(encParams, samples, numBlobs)
}

// Decode takes in the chunks, indices, and encoding parameters and returns the decoded blob
Expand Down
33 changes: 14 additions & 19 deletions core/validator.go
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@ package core

import (
"errors"
"fmt"
)

var (
Expand Down Expand Up @@ -119,8 +118,7 @@ func (v *chunkValidator) UpdateOperatorID(operatorID OperatorID) {

func (v *chunkValidator) ValidateBatch(blobs []*BlobMessage, operatorState *OperatorState) error {

batchGroup := make(map[EncodingParams][]Sample)
numBlobMap := make(map[EncodingParams]int)
subBatchMap := make(map[EncodingParams]SubBatch)

for i, blob := range blobs {
if len(blob.Bundles) != len(blob.BlobHeader.QuorumInfos) {
Expand Down Expand Up @@ -180,18 +178,8 @@ func (v *chunkValidator) ValidateBatch(blobs []*BlobMessage, operatorState *Oper
// Get Encoding Params
params := EncodingParams{ChunkLength: chunkLength, NumChunks: info.TotalChunks}

// ToDo add a struct
_, ok := batchGroup[params]
if !ok {
batchGroup[params] = make([]Sample, 0)
numBlobMap[params] = 1
} else {
numBlobMap[params] += 1
}

// Check the received chunks against the commitment
indices := assignment.GetIndices()
fmt.Println("indices", indices)
samples := make([]Sample, 0)
for ind := range chunks {
sample := Sample{
Expand All @@ -202,15 +190,22 @@ func (v *chunkValidator) ValidateBatch(blobs []*BlobMessage, operatorState *Oper
}
samples = append(samples, sample)
}
batchGroup[params] = append(batchGroup[params], samples...)

// Sort into subBatch
subBatch, ok := subBatchMap[params]
if !ok {
subBatch.Samples = samples
subBatch.NumBlobs = 1
} else {
subBatch.Samples = append(subBatch.Samples, samples...)
subBatch.NumBlobs += 1
}
}
}

// ToDo parallelize
fmt.Println("num batchGroup", len(batchGroup))
for params, samples := range batchGroup {
numBlobs, _ := numBlobMap[params]
err := v.encoder.UniversalVerifyChunks(params, samples, numBlobs)
// ToDo add parallelization for verification for each subBatch
for params, subBatch := range subBatchMap {
err := v.encoder.UniversalVerifyChunks(params, subBatch.Samples, subBatch.NumBlobs)
if err != nil {
return err
}
Expand Down
137 changes: 72 additions & 65 deletions pkg/encoding/kzgEncoder/multiframe.go
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
package kzgEncoder

import (
"bytes"
"encoding/gob"
"errors"
"fmt"
"log"

rs "github.com/Layr-Labs/eigenda/pkg/encoding/encoder"
kzg "github.com/Layr-Labs/eigenda/pkg/kzg"
bls "github.com/Layr-Labs/eigenda/pkg/kzg/bn254"
)

Expand All @@ -17,49 +18,58 @@ type Sample struct {
X uint // X is int , at which index is evaluated
}

// m is number of blob
func (group *KzgEncoderGroup) UniversalVerify(params rs.EncodingParams, samples []Sample, m int) bool {
verifier, _ := group.GetKzgVerifier(params)
ks := verifier.Ks

for ind, s := range samples {
q, err := rs.GetLeadingCosetIndex(
uint64(s.X),
params.NumChunks,
)
if err != nil {
return false
}
// generate a random value using Fiat Shamir transform
func GenRandomness(params rs.EncodingParams, samples []Sample, m int) (bls.Fr, error) {

lc := ks.FFTSettings.ExpandedRootsOfUnity[uint64(q)]
var buffer bytes.Buffer
enc := gob.NewEncoder(&buffer)
err := enc.Encode(samples)
if err != nil {
return bls.ZERO, err
}

ok := SingleVerify(ks, &s.Commitment, &lc, s.Coeffs, s.Proof)
if !ok {
fmt.Println("proof", s.Proof.String())
fmt.Println("commitment", s.Commitment.String())
err = enc.Encode(params)
if err != nil {
return bls.ZERO, err
}

for i := 0; i < len(s.Coeffs); i++ {
fmt.Printf("%v ", s.Coeffs[i].String())
}
fmt.Println("q", q, lc.String())
err = enc.Encode(m)
if err != nil {
return bls.ZERO, err
}

log.Fatalf("Proof %v failed\n", ind)
} else {
var randomFr bls.Fr

fmt.Println("&&&&&&&&&&&&&&&&&&tested frame and pass", ind)
}
err = bls.HashToSingleField(&randomFr, buffer.Bytes())
if err != nil {
return bls.ZERO, err
}
return randomFr, nil
}

D := len(samples[0].Coeffs) // chunkLen
// m is number of blob
func (group *KzgEncoderGroup) UniversalVerify(params rs.EncodingParams, samples []Sample, m int) error {
verifier, _ := group.GetKzgVerifier(params)
ks := verifier.Ks

D := params.ChunkLen

n := len(samples)

rInt := uint64(22894)
var r bls.Fr
bls.AsFr(&r, rInt)
//rInt := uint64(22894)
//var r bls.Fr
//bls.AsFr(&r, rInt)

r, err := GenRandomness(params, samples, m)
if err != nil {
return err
}

randomsFr := make([]bls.Fr, n)
bls.AsFr(&randomsFr[0], rInt)
//bls.AsFr(&randomsFr[0], rInt)
bls.CopyFr(&randomsFr[0], &r)

fmt.Println("random", r.String())

// lhs
var tmp bls.Fr
Expand Down Expand Up @@ -109,7 +119,7 @@ func (group *KzgEncoderGroup) UniversalVerify(params rs.EncodingParams, samples
coeffs := samples[k].Coeffs

rk := randomsFr[k]
for j := 0; j < D; j++ {
for j := uint64(0); j < D; j++ {
bls.MulModFr(&tmp, &coeffs[j], &rk)
bls.AddModFr(&stCoeffs[j], &stCoeffs[j], &tmp)
}
Expand All @@ -128,14 +138,14 @@ func (group *KzgEncoderGroup) UniversalVerify(params rs.EncodingParams, samples
params.NumChunks,
)
if err != nil {
return false
return err
}

h := ks.ExpandedRootsOfUnity[x]
var hPow bls.Fr
bls.CopyFr(&hPow, &bls.ONE)

for j := 0; j < D; j++ {
for j := uint64(0); j < D; j++ {
bls.MulModFr(&tmp, &hPow, &h)
bls.CopyFr(&hPow, &tmp)
}
Expand All @@ -153,39 +163,36 @@ func (group *KzgEncoderGroup) UniversalVerify(params rs.EncodingParams, samples
bls.SubG1(&rhsG1, ftG1, stG1)
bls.AddG1(&rhsG1, &rhsG1, ttG1)

return bls.PairingsVerify(lhsG1, lhsG2, &rhsG1, rhsG2)
if bls.PairingsVerify(lhsG1, lhsG2, &rhsG1, rhsG2) {
return nil
} else {
return errors.New("Universal Verify Incorrect paring")
}
}

func SingleVerify(ks *kzg.KZGSettings, commitment *bls.G1Point, x *bls.Fr, coeffs []bls.Fr, proof bls.G1Point) bool {
var xPow bls.Fr
bls.CopyFr(&xPow, &bls.ONE)
//func SingleVerify(ks *kzg.KZGSettings, commitment *bls.G1Point, x *bls.Fr, coeffs []bls.Fr, proof bls.G1Point) bool {
// var xPow bls.Fr
// bls.CopyFr(&xPow, &bls.ONE)

var tmp bls.Fr
for i := 0; i < len(coeffs); i++ {
bls.MulModFr(&tmp, &xPow, x)
bls.CopyFr(&xPow, &tmp)
}

// [x^n]_2
var xn2 bls.G2Point
bls.MulG2(&xn2, &bls.GenG2, &xPow)
// var tmp bls.Fr
// for i := 0; i < len(coeffs); i++ {
// bls.MulModFr(&tmp, &xPow, x)
// bls.CopyFr(&xPow, &tmp)
// }

// [s^n - x^n]_2
var xnMinusYn bls.G2Point
bls.SubG2(&xnMinusYn, &ks.Srs.G2[len(coeffs)], &xn2)
// [x^n]_2
// var xn2 bls.G2Point
// bls.MulG2(&xn2, &bls.GenG2, &xPow)

// [interpolation_polynomial(s)]_1
is1 := bls.LinCombG1(ks.Srs.G1[:len(coeffs)], coeffs)
// [commitment - interpolation_polynomial(s)]_1 = [commit]_1 - [interpolation_polynomial(s)]_1
var commitMinusInterpolation bls.G1Point
bls.SubG1(&commitMinusInterpolation, commitment, is1)
// [s^n - x^n]_2
// var xnMinusYn bls.G2Point
// bls.SubG2(&xnMinusYn, &ks.Srs.G2[len(coeffs)], &xn2)

// Verify the pairing equation
//
// e([commitment - interpolation_polynomial(s)], [1]) = e([proof], [s^n - x^n])
// equivalent to
// e([commitment - interpolation_polynomial]^(-1), [1]) * e([proof], [s^n - x^n]) = 1_T
//
// [interpolation_polynomial(s)]_1
// is1 := bls.LinCombG1(ks.Srs.G1[:len(coeffs)], coeffs)
// [commitment - interpolation_polynomial(s)]_1 = [commit]_1 - [interpolation_polynomial(s)]_1
// var commitMinusInterpolation bls.G1Point
// bls.SubG1(&commitMinusInterpolation, commitment, is1)

return bls.PairingsVerify(&commitMinusInterpolation, &bls.GenG2, &proof, &xnMinusYn)
}
// return bls.PairingsVerify(&commitMinusInterpolation, &bls.GenG2, &proof, &xnMinusYn)
//}
8 changes: 8 additions & 0 deletions pkg/kzg/bn254/bignum_gnark.go
Original file line number Diff line number Diff line change
Expand Up @@ -50,6 +50,14 @@ func AsFr(dst *Fr, i uint64) {
(*fr.Element)(dst).SetUint64(i)
}

func HashToSingleField(dst *Fr, msg []byte) error {
DST := []byte("-")
randomFr, err := fr.Hash(msg, DST, 1)
randomFrBytes := (randomFr[0]).Bytes()
FrSetBytes(dst, randomFrBytes[:])
return err
}

func FrStr(b *Fr) string {
if b == nil {
return "<nil>"
Expand Down

0 comments on commit 0352b05

Please sign in to comment.