Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

add blob type 3: validium #171

Draft
wants to merge 2 commits into
base: develop-feijoa
Choose a base branch
from
Draft
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
3 changes: 2 additions & 1 deletion package.json
Original file line number Diff line number Diff line change
Expand Up @@ -14,7 +14,8 @@
"eslint:fix": "npx eslint 'src/**/**.js' 'test/**/**.test.js' --fix && npx eslint tools --fix",
"test:update": "./tools/update-tests/update-tests.sh",
"test:database": "npx mocha ./test/database.test.js",
"build:inputs": "npx mocha ./test/processor.test.js --update --geninputs && npx mocha ./test/processor.test.js --etrog --update --geninputs"
"build:inputs": "npx mocha ./test/processor.test.js --update --geninputs && npx mocha ./test/processor.test.js --etrog --update --geninputs",
"build:blob-inputs": "npx mocha ./test/blob-inner/blob-inner-processor.test.js --update --geninput"
},
"repository": {
"type": "git",
Expand Down
1 change: 1 addition & 0 deletions src/blob-inner/blob-constants.js
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@ module.exports.BLOB_TYPE = {
CALLDATA: 0,
EIP4844: 1,
FORCED: 2,
VALIDIUM: 3,
};

// Blob compression type
Expand Down
16 changes: 13 additions & 3 deletions src/blob-inner/blob-processor.js
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@ const { getCurrentDB } = require('../smt-utils');
const getKzg = require('./kzg-utils');

const {
isHex, computeBlobAccInputHash, computeBlobL2HashData,
isHex, computeBlobAccInputHash, computeBlobL2HashKData, computeBlobL2HashPData,
computeBatchL2HashData, computeBatchAccInputHash, computeBlobDataFromBatches, parseBlobData,
computeVersionedHash, reduceBlobData,
} = require('./blob-utils');
Expand Down Expand Up @@ -184,7 +184,8 @@ module.exports = class BlobProcessor {
_checkBlobType() {
if (this.blobType !== blobConstants.BLOB_TYPE.CALLDATA
&& this.blobType !== blobConstants.BLOB_TYPE.EIP4844
&& this.blobType !== blobConstants.BLOB_TYPE.FORCED) {
&& this.blobType !== blobConstants.BLOB_TYPE.FORCED
&& this.blobType !== blobConstants.BLOB_TYPE.VALIDIUM) {
if (this.addingBatchData === true) {
throw new Error('BlobProcessor:executeBlob: invalid blob type not compatible with batch data');
}
Expand Down Expand Up @@ -243,7 +244,7 @@ module.exports = class BlobProcessor {
// compute points Z & Y dependng on the blob type. Otherwise, compute batchL2HashData
if (this.blobType === blobConstants.BLOB_TYPE.CALLDATA || this.blobType === blobConstants.BLOB_TYPE.FORCED) {
// compute blobL2HashData
this.blobL2HashData = await computeBlobL2HashData(this.blobData);
this.blobL2HashData = await computeBlobL2HashKData(this.blobData);
// points not used
this.kzgCommitment = Constants.ZERO_BYTES32;
this.versionedHash = Constants.ZERO_BYTES32;
Expand All @@ -261,6 +262,15 @@ module.exports = class BlobProcessor {
const { proof, pointY } = this.kzg.computeKzgProof(reducedBlobData, this.pointZ);
this.pointY = pointY;
this.kzgProof = proof;
} else if (this.blobType === blobConstants.BLOB_TYPE.VALIDIUM) {
// compute blobL2HashData
this.blobL2HashData = await computeBlobL2HashPData(this.blobData);
// points not used
this.kzgCommitment = Constants.ZERO_BYTES32;
this.versionedHash = Constants.ZERO_BYTES32;
this.pointZ = Constants.ZERO_BYTES32;
this.pointY = Constants.ZERO_BYTES32;
this.proof = Constants.ZERO_BYTES32;
} else {
// enter here only if blobType is invalid. Hence, blobData has been added previously
// blobL2HashData not used
Expand Down
32 changes: 25 additions & 7 deletions src/blob-inner/blob-utils.js
Original file line number Diff line number Diff line change
Expand Up @@ -105,11 +105,11 @@ async function computeBatchAccInputHash(
}

/**
* Blob hash data
* Blob hash data (keccak256)
* @param {String} blobData - Blob data
* @returns {String} - Blob hash data
* @returns {String} - Blob hash data (keccak256)
*/
function computeBlobL2HashData(blobData) {
function computeBlobL2HashKData(blobData) {
blobData = blobData.startsWith('0x') ? blobData : `0x${blobData}`;

return ethers.utils.solidityKeccak256(
Expand All @@ -118,6 +118,17 @@ function computeBlobL2HashData(blobData) {
);
}

/**
* Blob hash data (poseidon)
* @param {String} blobData - Blob data
* @returns {String} - Blob hash data (poseidon)
*/
function computeBlobL2HashPData(blobData) {
blobData = blobData.startsWith('0x') ? blobData : `0x${blobData}`;

return linearPoseidon(blobData);
}

/**
* Batch hash data
* @param {String} batchL2Data - Batch L2 data input in hex string
Expand Down Expand Up @@ -233,7 +244,9 @@ function computeBlobDataFromBatches(batches, blobType) {
resBlobdata += batchesData;

let blobData;
if (blobType === blobConstants.BLOB_TYPE.CALLDATA || blobType === blobConstants.BLOB_TYPE.FORCED) {
if (blobType === blobConstants.BLOB_TYPE.CALLDATA
|| blobType === blobConstants.BLOB_TYPE.FORCED
|| blobType === blobConstants.BLOB_TYPE.VALIDIUM) {
blobData = resBlobdata;
} else if (blobType === blobConstants.BLOB_TYPE.EIP4844) {
// build blob data with no spaces and then add 0x00 each 32 bytes
Expand Down Expand Up @@ -266,7 +279,9 @@ function parseBlobData(blobData, blobType) {
const batches = [];

// if blobData is calldata or forced, no need to check and remove MSB each 32 bytes
if (blobType === blobConstants.BLOB_TYPE.CALLDATA || blobType === blobConstants.BLOB_TYPE.FORCED) {
if (blobType === blobConstants.BLOB_TYPE.CALLDATA
|| blobType === blobConstants.BLOB_TYPE.FORCED
|| blobType === blobConstants.BLOB_TYPE.VALIDIUM) {
tmpBlobdata = blobData;
} else if (blobType === blobConstants.BLOB_TYPE.EIP4844) {
// assure the most significant byte is '00' each slot of 32 bytes
Expand Down Expand Up @@ -320,7 +335,9 @@ function parseBlobData(blobData, blobType) {
const bodyLen = Scalar.e(parseInt(tmpBlobdata.slice(offsetBytes, offsetBytes + blobConstants.BLOB_ENCODING.BYTES_BODY_LENGTH * 2), 16));
offsetBytes += blobConstants.BLOB_ENCODING.BYTES_BODY_LENGTH * 2;

if (blobType === blobConstants.BLOB_TYPE.CALLDATA || blobType === blobConstants.BLOB_TYPE.FORCED) {
if (blobType === blobConstants.BLOB_TYPE.CALLDATA
|| blobType === blobConstants.BLOB_TYPE.FORCED
|| blobType === blobConstants.BLOB_TYPE.VALIDIUM) {
const firstBytes = blobConstants.BLOB_ENCODING.BYTES_COMPRESSION_TYPE + blobConstants.BLOB_ENCODING.BYTES_BODY_LENGTH;
if (bodyLen !== Scalar.e(blobData.length / 2 - firstBytes)) {
isInvalid = true;
Expand Down Expand Up @@ -399,7 +416,8 @@ function reduceBlobData(_blobData) {
module.exports = {
isHex,
computeBlobAccInputHash,
computeBlobL2HashData,
computeBlobL2HashKData,
computeBlobL2HashPData,
buildPointZData,
computePointZ,
computePointY,
Expand Down
40 changes: 38 additions & 2 deletions test/helpers/test-vectors/blob-inner/blob-inner-data.json

Large diffs are not rendered by default.