Skip to content

Commit

Permalink
Merge pull request #170 from 0xPolygonHermez/feature/reduce-blobdata
Browse files Browse the repository at this point in the history
Feature/reduce blobdata
  • Loading branch information
laisolizq authored Apr 24, 2024
2 parents b64c875 + 78877bd commit 0502a81
Show file tree
Hide file tree
Showing 5 changed files with 65 additions and 5 deletions.
2 changes: 1 addition & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
"test:blockinfo": "npx mocha ./test/processor.test.js --blockinfo",
"test:selfdestruct": "npx mocha ./test/processor.test.js --selfdestruct",
"eslint": "npx eslint 'src/**/**.js' 'test/**/**.test.js' && npx eslint tools",
"eslint:fix": "npx 'eslint src/**/**.js' 'test/**/**.test.js' --fix && npx eslint tools --fix",
"eslint:fix": "npx eslint 'src/**/**.js' 'test/**/**.test.js' --fix && npx eslint tools --fix",
"test:update": "./tools/update-tests/update-tests.sh",
"test:database": "npx mocha ./test/database.test.js",
"build:inputs": "npx mocha ./test/processor.test.js --update --geninputs && npx mocha ./test/processor.test.js --etrog --update --geninputs"
Expand Down
7 changes: 4 additions & 3 deletions src/blob-inner/blob-processor.js
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@ const getKzg = require('./kzg-utils');
const {
isHex, computeBlobAccInputHash, computeBlobL2HashData,
computeBatchL2HashData, computeBatchAccInputHash, computeBlobDataFromBatches, parseBlobData,
computeVersionedHash,
computeVersionedHash, reduceBlobData,
} = require('./blob-utils');
const blobConstants = require('./blob-constants');

Expand Down Expand Up @@ -259,10 +259,11 @@ module.exports = class BlobProcessor {
// blobL2HashData not used
this.blobL2HashData = Constants.ZERO_BYTES32;
// compute kzg data
this.kzgCommitment = this.kzg.blobToKzgCommitment(this.blobData);
const reducedBlobData = reduceBlobData(this.blobData);
this.kzgCommitment = this.kzg.blobToKzgCommitment(reducedBlobData);
this.versionedHash = computeVersionedHash(this.kzgCommitment);
this.pointZ = await this.kzg.computePointZ(this.kzgCommitment, this.blobData);
const { proof, pointY } = this.kzg.computeKzgProof(this.blobData, this.pointZ);
const { proof, pointY } = this.kzg.computeKzgProof(reducedBlobData, this.pointZ);
this.pointY = pointY;
this.kzgProof = proof;
} else {
Expand Down
19 changes: 19 additions & 0 deletions src/blob-inner/blob-utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -351,6 +351,24 @@ function parseBlobData(blobData, blobType) {
return { isInvalid, batches };
}

/**
* Apply mod FrBLS12381 to each element in blobData
* @param {String} _blobData blob data
* @returns {String} - blobData with each element % FrBLA12_381
*/
function reduceBlobData(_blobData) {
const blobData = _blobData.startsWith('0x') ? _blobData.slice(2) : _blobData;
let blobDataReduced = '0x';

for (let i = 0; i < blobConstants.FIELD_ELEMENTS_PER_BLOB; i++) {
const fieldBlobData = Scalar.e(`0x${blobData.slice(i * (blobConstants.BYTES_PER_FIELD_ELEMENT * 2), (i + 1) * (blobConstants.BYTES_PER_FIELD_ELEMENT * 2))}`);
const final = (Scalar.mod(fieldBlobData, frBLS12381.p)).toString(16).padStart(blobConstants.BYTES_PER_FIELD_ELEMENT * 2, '0');
blobDataReduced += final;
}

return blobDataReduced;
}

module.exports = {
isHex,
computeBlobAccInputHash,
Expand All @@ -363,4 +381,5 @@ module.exports = {
computeBlobDataFromBatches,
parseBlobData,
computeVersionedHash,
reduceBlobData,
};
36 changes: 35 additions & 1 deletion test/blob-inner/blob-utils.test.js
Original file line number Diff line number Diff line change
@@ -1,17 +1,51 @@
const fs = require('fs');
const path = require('path');
const { argv } = require('yargs');
const { expect } = require('chai');

const {
utils,
} = require('../../index').blobInner;
const { pathTestVectors } = require('../helpers/test-utils');

// eslint-disable-next-line prefer-arrow-callback
describe('blob utils', async function () {
let testVectors;
let update;

const pathReduceBlobTests = path.join(pathTestVectors, 'blob-inner/blob-reduce-data.json');

before(async () => {
testVectors = JSON.parse(fs.readFileSync(pathReduceBlobTests));

update = (argv.update === true);
});

it('computeVersionedHash', async () => {
const expectedValue = '0x01ef9c6c1dae51f6f666f0b8633e81101fa03033830366d19c56401313422502';
const expectedValue = '0x017c04f3aff3266e4df00b2643a329439ee56323ba9fbe1aedac75b5b0bc759d';

const kzgCommitment = '0x1dcc4de8dec75d7aab85b567b6ccd41ad312451b948a7413f0a142fd40d493471dcc4de8dec75d7aab85b567b6ccd41a';
const versionedHash = utils.computeVersionedHash(kzgCommitment);

expect(versionedHash).to.be.equal(expectedValue);
});

it('reduce blob', async () => {
for (let i = 0; i < testVectors.length; i++) {
const {
blobData,
expectedBlobDataReduced,
} = testVectors[i];

const reducedBlob = utils.reduceBlobData(blobData);

if (!update) {
expect(reducedBlob).to.be.equal(expectedBlobDataReduced);
} else {
testVectors[i].expectedBlobDataReduced = reducedBlob;

fs.writeFileSync(pathReduceBlobTests, JSON.stringify(testVectors, null, 2));
}
}
});
});
6 changes: 6 additions & 0 deletions test/helpers/test-vectors/blob-inner/blob-reduce-data.json

Large diffs are not rendered by default.

0 comments on commit 0502a81

Please sign in to comment.