Skip to content

Commit

Permalink
refactor multiframe code
Browse files Browse the repository at this point in the history
  • Loading branch information
Ubuntu committed Dec 12, 2023
1 parent 9bfef6f commit 9563204
Show file tree
Hide file tree
Showing 3 changed files with 112 additions and 66 deletions.
2 changes: 1 addition & 1 deletion core/encoding/encoder.go
Original file line number Diff line number Diff line change
Expand Up @@ -140,7 +140,7 @@ func (e *Encoder) UniversalVerifySubBatch(params core.EncodingParams, samplesCor
sample := kzgEncoder.Sample{
Commitment: *sc.Commitment.G1Point,
Proof: sc.Chunk.Proof,
Row: sc.BlobIndex,
RowIndex: sc.BlobIndex,
Coeffs: sc.Chunk.Coeffs,
X: sc.AssignmentIndex,
}
Expand Down
4 changes: 0 additions & 4 deletions core/validator.go
Original file line number Diff line number Diff line change
Expand Up @@ -140,10 +140,6 @@ func (v *chunkValidator) ValidateBatch(blobs []*BlobMessage, operatorState *Oper
}

// Validate the blob length
// err := v.encoder.VerifyBlobLength(blob.BlobHeader.BlobCommitments)
//if err != nil {
// return err
//}

blobCommitmentList[k] = blob.BlobHeader.BlobCommitments

Expand Down
172 changes: 111 additions & 61 deletions pkg/encoding/kzgEncoder/multiframe.go
Original file line number Diff line number Diff line change
Expand Up @@ -7,6 +7,7 @@ import (
"fmt"

rs "github.com/Layr-Labs/eigenda/pkg/encoding/encoder"
kzg "github.com/Layr-Labs/eigenda/pkg/kzg"
bls "github.com/Layr-Labs/eigenda/pkg/kzg/bn254"
)

Expand All @@ -15,9 +16,9 @@ import (
type Sample struct {
Commitment bls.G1Point
Proof bls.G1Point
Row int // corresponds to a row in the verification matrix
RowIndex int // corresponds to a row in the verification matrix
Coeffs []bls.Fr
X uint // X is assignment
X uint // X is the same assignment index of chunk in EigenDa
}

// generate a random value using Fiat Shamir transform
Expand All @@ -44,126 +45,175 @@ func GenRandomness(samples []Sample) (bls.Fr, error) {
return randomFr, nil
}

// UniversalVerify implements batch verification on a set of chunks given the same chunk dimension (chunkLen, numChunk).
// The details is given in Ethereum Research post whose authors are George Kadianakis, Ansgar Dietrichs, Dankrad Feist
// https://ethresear.ch/t/a-universal-verification-equation-for-data-availability-sampling/13240
//
// m is number of blob, samples is a list of chunks
// Inside the code, ft stands for first term; st for the second term; tt for the third term
func (group *KzgEncoderGroup) UniversalVerify(params rs.EncodingParams, samples []Sample, m int) error {
// precheck
for i, s := range samples {
if s.Row >= m {
fmt.Printf("sample %v has %v Row, but there are only %v blobs\n", i, s.Row, m)
return errors.New("sample.Row and numBlob is inconsistent")
}
}

verifier, _ := group.GetKzgVerifier(params)
ks := verifier.Ks

D := params.ChunkLen

n := len(samples)
fmt.Printf("Batch verify %v frames of %v symbols out of %v blobs \n", n, params.ChunkLen, m)

// Every sample has its own randomness, even though multiple samples can come from identical blob
// Randomnesss for each sample is computed by repeatedly raising the power of the root randomness
func GenRandomnessVector(samples []Sample) ([]bls.Fr, error) {
// root randomness
r, err := GenRandomness(samples)
if err != nil {
return err
return nil, err
}

n := len(samples)

randomsFr := make([]bls.Fr, n)
bls.CopyFr(&randomsFr[0], &r)

// lhs
var tmp bls.Fr

// power of r
for j := 0; j < n-1; j++ {
bls.MulModFr(&randomsFr[j+1], &randomsFr[j], &r)
}
return randomsFr, nil
}

// array of proofs
proofs := make([]bls.G1Point, n)
for i := 0; i < n; i++ {
bls.CopyG1(&proofs[i], &samples[i].Proof)
}

// lhs g1
lhsG1 := bls.LinCombG1(proofs, randomsFr)

// lhs g2
lhsG2 := &ks.Srs.G2[D]
// the rhsG1 comprises of three terms, see https://ethresear.ch/t/a-universal-verification-equation-for-data-availability-sampling/13240/1
func genRhsG1(samples []Sample, randomsFr []bls.Fr, m int, params rs.EncodingParams, ks *kzg.KZGSettings, proofs []bls.G1Point) (*bls.G1Point, error) {
n := len(samples)
commits := make([]bls.G1Point, m)
D := params.ChunkLen

// rhs g2
rhsG2 := &bls.GenG2
var tmp bls.Fr

// rhs g1
// get commitments
commits := make([]bls.G1Point, m)
// get coeffs
ftCoeffs := make([]bls.Fr, m)
// first term
// get coeffs to compute the aggregated commitment
// note the coeff is affected by how many chunks are validated per blob
// if x chunks are sampled from one blob, we need to compute the sum of all x random field element corresponding to each sample
aggCommitCoeffs := make([]bls.Fr, m)
for k := 0; k < n; k++ {
s := samples[k]
row := s.Row
bls.AddModFr(&ftCoeffs[row], &ftCoeffs[row], &randomsFr[k])
row := s.RowIndex
bls.AddModFr(&aggCommitCoeffs[row], &aggCommitCoeffs[row], &randomsFr[k])
bls.CopyG1(&commits[row], &s.Commitment)
}

ftG1 := bls.LinCombG1(commits, ftCoeffs)
aggCommit := bls.LinCombG1(commits, aggCommitCoeffs)

// second term
stCoeffs := make([]bls.Fr, D)
// compute the aggregated interpolation polynomial
aggPolyCoeffs := make([]bls.Fr, D)

// we sum over the weighted coefficients (by the random field element) over all D monomial in all n samples
for k := 0; k < n; k++ {
coeffs := samples[k].Coeffs

rk := randomsFr[k]
// for each monomial in a given polynomial, multiply its coefficient with the corresponding random field,
// then sum it with others. Given ChunkLen (D) is identical for all samples in a subBatch.
// The operation is always valid.
for j := uint64(0); j < D; j++ {
bls.MulModFr(&tmp, &coeffs[j], &rk)
bls.AddModFr(&stCoeffs[j], &stCoeffs[j], &tmp)
bls.AddModFr(&aggPolyCoeffs[j], &aggPolyCoeffs[j], &tmp)
}
}
stG1 := bls.LinCombG1(ks.Srs.G1[:D], stCoeffs)

// All samples in a subBatch has identical chunkLen
aggPolyG1 := bls.LinCombG1(ks.Srs.G1[:D], aggPolyCoeffs)

// third term
ttCoeffs := make([]bls.Fr, n)
// leading coset is an evaluation index, here we compute the weighted leading coset evaluation by random fields
lcCoeffs := make([]bls.Fr, n)

// get leading coset powers
leadingDs := make([]bls.Fr, n)

for k := 0; k < n; k++ {
// It is important to obtain the leading coset here
// It is important to obtain the leading coset index here
// As the params from the eigenda Core might not have NumChunks be the power of 2
x, err := rs.GetLeadingCosetIndex(
uint64(samples[k].X),
params.NumChunks,
)
if err != nil {
return err
return nil, err
}

// got the leading coset field element
h := ks.ExpandedRootsOfUnity[x]
var hPow bls.Fr
bls.CopyFr(&hPow, &bls.ONE)

// raising the power for each leading coset
for j := uint64(0); j < D; j++ {
bls.MulModFr(&tmp, &hPow, &h)
bls.CopyFr(&hPow, &tmp)
}
bls.CopyFr(&leadingDs[k], &hPow)
}

// applying the random weights to leading coset elements
for k := 0; k < n; k++ {
rk := randomsFr[k]
bls.MulModFr(&ttCoeffs[k], &rk, &leadingDs[k])
bls.MulModFr(&lcCoeffs[k], &rk, &leadingDs[k])
}
ttG1 := bls.LinCombG1(proofs, ttCoeffs)

offsetG1 := bls.LinCombG1(proofs, lcCoeffs)

var rhsG1 bls.G1Point
bls.SubG1(&rhsG1, ftG1, stG1)
bls.AddG1(&rhsG1, &rhsG1, ttG1)
bls.SubG1(&rhsG1, aggCommit, aggPolyG1)
bls.AddG1(&rhsG1, &rhsG1, offsetG1)
return &rhsG1, nil
}

// UniversalVerify implements batch verification on a set of chunks given the same chunk dimension (chunkLen, numChunk).
// The details is given in Ethereum Research post whose authors are George Kadianakis, Ansgar Dietrichs, Dankrad Feist
// https://ethresear.ch/t/a-universal-verification-equation-for-data-availability-sampling/13240
//
// m is number of blob, samples is a list of chunks
//
// The order of samples do not matter.
// Each sample need not have unique row, it is possible that multiple chunks of the same blob are validated altogether
func (group *KzgEncoderGroup) UniversalVerify(params rs.EncodingParams, samples []Sample, m int) error {
// precheck
for i, s := range samples {
if s.RowIndex >= m {
fmt.Printf("sample %v has %v Row, but there are only %v blobs\n", i, s.RowIndex, m)
return errors.New("sample.Row and numBlob is inconsistent")
}
}

verifier, _ := group.GetKzgVerifier(params)
ks := verifier.Ks

D := params.ChunkLen

n := len(samples)
fmt.Printf("Batch verify %v frames of %v symbols out of %v blobs \n", n, params.ChunkLen, m)

// generate random field elements to aggregate equality check
randomsFr, err := GenRandomnessVector(samples)
if err != nil {
return err
}

// array of proofs
proofs := make([]bls.G1Point, n)
for i := 0; i < n; i++ {
bls.CopyG1(&proofs[i], &samples[i].Proof)
}

// lhs g1
lhsG1 := bls.LinCombG1(proofs, randomsFr)

// lhs g2
lhsG2 := &ks.Srs.G2[D]

// rhs g2
rhsG2 := &bls.GenG2

// rhs g1
rhsG1, err := genRhsG1(
samples,
randomsFr,
m,
params,
ks,
proofs,
)
if err != nil {
return err
}

if bls.PairingsVerify(lhsG1, lhsG2, &rhsG1, rhsG2) {
if bls.PairingsVerify(lhsG1, lhsG2, rhsG1, rhsG2) {
return nil
} else {
return errors.New("Universal Verify Incorrect paring")
Expand Down

0 comments on commit 9563204

Please sign in to comment.