diff --git a/.github/workflows/all-tools.yml b/.github/workflows/all-tools.yml index d5adfd576..edcfc7fe1 100644 --- a/.github/workflows/all-tools.yml +++ b/.github/workflows/all-tools.yml @@ -16,7 +16,7 @@ on: jobs: changes: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Filter commit changes outputs: all-tools: ${{ steps.filter.outputs['all-tools'] }} @@ -39,7 +39,7 @@ jobs: uses: ./.github/workflows/reuse-store-image-name-and-tags.yml check_image_tags_exist: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Check image tags exist needs: [ changes, store_image_name_and_tags ] if: ${{ needs.changes.outputs['all-tools'] == 'false' }} @@ -56,7 +56,7 @@ jobs: docker_password: ${{ secrets.DOCKERHUB_TOKEN }} all-tools-tag-only: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: All tools tag only needs: [ changes, store_image_name_and_tags, check_image_tags_exist ] if: ${{ github.event_name != 'pull_request' && needs.changes.outputs['all-tools'] == 'false' }} @@ -83,7 +83,7 @@ jobs: build-and-publish: needs: [ changes, store_image_name_and_tags, all-tools-tag-only ] if: ${{ always() && (needs.changes.outputs['all-tools'] == 'true' || needs.all-tools-tag-only.result != 'success' || needs.all-tools-tag-only.outputs.image_tagged != 'true') }} - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] env: COMMIT_TAG: ${{ needs.store_image_name_and_tags.outputs.commit_tag }} DEVELOP_TAG: ${{ needs.store_image_name_and_tags.outputs.develop_tag }} diff --git a/.github/workflows/bridge-ui-e2e-tests.yml b/.github/workflows/bridge-ui-e2e-tests.yml index fc6f1139d..190895bb8 100644 --- a/.github/workflows/bridge-ui-e2e-tests.yml +++ b/.github/workflows/bridge-ui-e2e-tests.yml @@ -14,7 +14,7 @@ on: jobs: run-e2e-tests: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] steps: - name: Checkout uses: actions/checkout@v4 diff --git a/.github/workflows/bridge-ui-publish.yml b/.github/workflows/bridge-ui-publish.yml index fd0f2975e..7b63d6e58 100644 --- a/.github/workflows/bridge-ui-publish.yml +++ b/.github/workflows/bridge-ui-publish.yml @@ -14,8 +14,7 @@ on: jobs: publish: - runs-on: [self-hosted, ubuntu-22.04, X64, small] - if: github.event_name != 'pull_request' || (github.event_name == 'pull_request' && github.event.pull_request.draft == false) + runs-on: [self-hosted, ubuntu-20.04, X64, small] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/.github/workflows/cache-docker-images.yml b/.github/workflows/cache-docker-images.yml index 4c8658737..863d4130c 100644 --- a/.github/workflows/cache-docker-images.yml +++ b/.github/workflows/cache-docker-images.yml @@ -9,7 +9,7 @@ on: jobs: changes: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Filter commit changes outputs: cache-images: ${{ steps.filter.outputs.cache-images }} @@ -31,7 +31,7 @@ jobs: pull-and-cache-images: needs: [ changes ] if: ${{ always() && needs.changes.outputs.cache-images == 'true' }} - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] steps: - name: Checkout uses: actions/checkout@v3 diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 906b8f3d5..f182c60b5 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -18,7 +18,7 @@ on: jobs: analyze: name: Analyze - runs-on: [self-hosted, ubuntu-22.04, X64, medium] + runs-on: [self-hosted, ubuntu-20.04, X64, medium] permissions: actions: read contents: read diff --git a/.github/workflows/coordinator-build-and-publish.yml b/.github/workflows/coordinator-build-and-publish.yml index 84545997e..022bddd1d 100644 --- a/.github/workflows/coordinator-build-and-publish.yml +++ b/.github/workflows/coordinator-build-and-publish.yml @@ -33,7 +33,7 @@ concurrency: jobs: build-and-publish: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Coordinator build environment: ${{ github.ref != 'refs/heads/main' && 'docker-build-and-e2e' || '' }} env: @@ -55,7 +55,6 @@ jobs: run: | ./gradlew coordinator:app:distZip --no-daemon - name: Login to Docker Hub - if: github.event_name != 'pull_request' uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} diff --git a/.github/workflows/finalized-tag-updater-github-release.yml b/.github/workflows/finalized-tag-updater-github-release.yml index 1cdc41354..f9eecc4a5 100644 --- a/.github/workflows/finalized-tag-updater-github-release.yml +++ b/.github/workflows/finalized-tag-updater-github-release.yml @@ -15,7 +15,7 @@ on: jobs: release: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] steps: - name: Checkout code uses: actions/checkout@v4 diff --git a/.github/workflows/load-test.yml b/.github/workflows/load-test.yml index 009473da8..295aac845 100644 --- a/.github/workflows/load-test.yml +++ b/.github/workflows/load-test.yml @@ -27,7 +27,7 @@ concurrency: jobs: run-load-test: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Run Load Test steps: - name: Checkout diff --git a/.github/workflows/main.yml b/.github/workflows/main.yml index 21aa31b20..97815b0bb 100644 --- a/.github/workflows/main.yml +++ b/.github/workflows/main.yml @@ -1,14 +1,17 @@ name: main on: + pull_request: push: + branches: + - main jobs: store-image-name-and-tags: uses: ./.github/workflows/reuse-store-image-name-and-tags.yml filter-commit-changes: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Filter commit changes outputs: coordinator: ${{ steps.filter.outputs.coordinator }} @@ -191,7 +194,7 @@ jobs: cleanup-deployments: needs: [ run-e2e-tests, run-e2e-tests-geth-tracing ] if: ${{ always() }} - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] steps: - uses: strumwolf/delete-deployment-environment@v2 with: diff --git a/.github/workflows/maven-release.yml b/.github/workflows/maven-release.yml index f42a6ae09..0872606f0 100644 --- a/.github/workflows/maven-release.yml +++ b/.github/workflows/maven-release.yml @@ -18,7 +18,7 @@ on: jobs: release: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] steps: - name: Checkout code uses: actions/checkout@v4 diff --git a/.github/workflows/postman-build-and-publish.yml b/.github/workflows/postman-build-and-publish.yml index 7fc081506..114829d09 100644 --- a/.github/workflows/postman-build-and-publish.yml +++ b/.github/workflows/postman-build-and-publish.yml @@ -33,7 +33,7 @@ concurrency: jobs: build-and-publish: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Postman build environment: ${{ github.ref != 'refs/heads/main' && 'docker-build-and-e2e' || '' }} env: diff --git a/.github/workflows/postman-testing.yml b/.github/workflows/postman-testing.yml index e8d42ba7b..64071e2f1 100644 --- a/.github/workflows/postman-testing.yml +++ b/.github/workflows/postman-testing.yml @@ -14,7 +14,7 @@ concurrency: jobs: run-tests: - runs-on: [self-hosted, Linux, X64, large] + runs-on: [self-hosted, ubuntu-22.04, X64, large] name: Postman & SDK tests steps: - name: Checkout diff --git a/.github/workflows/prover-build-and-publish.yml b/.github/workflows/prover-build-and-publish.yml index 57a00dc44..353719b22 100644 --- a/.github/workflows/prover-build-and-publish.yml +++ b/.github/workflows/prover-build-and-publish.yml @@ -36,7 +36,7 @@ env: jobs: build-and-publish: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Prover build environment: ${{ github.ref != 'refs/heads/main' && 'docker-build-and-e2e' || '' }} env: diff --git a/.github/workflows/prover-native-lib-blob-compressor-release.yml b/.github/workflows/prover-native-lib-blob-compressor-release.yml index a24a8b8c3..6874cf214 100644 --- a/.github/workflows/prover-native-lib-blob-compressor-release.yml +++ b/.github/workflows/prover-native-lib-blob-compressor-release.yml @@ -22,7 +22,7 @@ on: jobs: build-linux: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] steps: - name: Checkout code uses: actions/checkout@v4 @@ -56,7 +56,7 @@ jobs: path: ./prover/target build-linux-arm64: - runs-on: besu-arm64 + runs-on: [self-hosted, ubuntu-20.04, ARM64, small] steps: - name: Checkout code uses: actions/checkout@v4 @@ -128,7 +128,7 @@ jobs: release_artefacts: name: Release artefacts needs: [ build-linux, build-linux-arm64, build-mac-os] - runs-on: [self-hosted,ubuntu-22.04ARM64, small] + runs-on: [self-hosted, ubuntu-20.04, ARM64, small] steps: - name: Load cached binaries uses: actions/download-artifact@v4 diff --git a/.github/workflows/prover-testing.yml b/.github/workflows/prover-testing.yml index 03b4dfe70..1427eeffb 100644 --- a/.github/workflows/prover-testing.yml +++ b/.github/workflows/prover-testing.yml @@ -17,7 +17,7 @@ concurrency: jobs: staticcheck: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Prover static check steps: - name: install Go @@ -44,6 +44,7 @@ jobs: - name: golangci-lint uses: golangci/golangci-lint-action@v3 with: + version: v1.61.0 working-directory: prover args: --timeout=5m - name: generated files should not be modified @@ -58,7 +59,7 @@ jobs: strategy: matrix: go-version: [1.23.x] - runs-on: [self-hosted, Linux, X64, large] + runs-on: [self-hosted, ubuntu-22.04, X64, large] name: Prover testing needs: - staticcheck @@ -99,7 +100,7 @@ jobs: needs: - staticcheck - test - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] steps: - name: Notify slack -- workflow failed id: slack @@ -122,7 +123,7 @@ jobs: needs: - staticcheck - test - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] steps: - name: Notify slack -- workflow succeeded id: slack diff --git a/.github/workflows/reuse-check-images-tags-and-push.yml b/.github/workflows/reuse-check-images-tags-and-push.yml index 9d97cbd07..ab2e4d6e6 100644 --- a/.github/workflows/reuse-check-images-tags-and-push.yml +++ b/.github/workflows/reuse-check-images-tags-and-push.yml @@ -50,7 +50,7 @@ concurrency: jobs: check_image_tags_exist: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Check image tags exist outputs: last_commit_tag_exists_coordinator: ${{ steps.check_image_tags_exist_coordinator.outputs.last_commit_tag_exists }} @@ -110,7 +110,7 @@ jobs: docker_password: ${{ secrets.DOCKERHUB_TOKEN }} image_tag_push: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Tag and push images needs: [ check_image_tags_exist ] outputs: diff --git a/.github/workflows/reuse-run-e2e-tests.yml b/.github/workflows/reuse-run-e2e-tests.yml index 2db3ade1e..c646e9053 100644 --- a/.github/workflows/reuse-run-e2e-tests.yml +++ b/.github/workflows/reuse-run-e2e-tests.yml @@ -75,7 +75,7 @@ jobs: GITHUB_TOKEN: ${{ secrets._GITHUB_TOKEN_RELEASE_ACCESS }} outputs: tests_outcome: ${{ steps.run_e2e_tests.outcome }} - runs-on: [self-hosted, Linux, X64, large] + runs-on: [self-hosted, ubuntu-22.04, X64, large] environment: ${{ github.ref != 'refs/heads/main' && 'docker-build-and-e2e' || '' }} steps: - name: Setup upterm session @@ -88,7 +88,6 @@ jobs: with: pnpm-install-options: '--frozen-lockfile --prefer-offline' - name: Login to Docker Hub - if: github.event_name != 'pull_request' uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} diff --git a/.github/workflows/reuse-store-image-name-and-tags.yml b/.github/workflows/reuse-store-image-name-and-tags.yml index df9959e54..bad0023fc 100644 --- a/.github/workflows/reuse-store-image-name-and-tags.yml +++ b/.github/workflows/reuse-store-image-name-and-tags.yml @@ -19,7 +19,7 @@ concurrency: jobs: store_image_name_and_tags: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Compute version tags env: # REF_NAME: ${{ github.ref_name }} diff --git a/.github/workflows/reuse-tag-without-untested-suffix.yml b/.github/workflows/reuse-tag-without-untested-suffix.yml index 7f3cd38bc..2eb3f943f 100644 --- a/.github/workflows/reuse-tag-without-untested-suffix.yml +++ b/.github/workflows/reuse-tag-without-untested-suffix.yml @@ -22,14 +22,13 @@ on: jobs: tag-without-untested-suffix: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: tag without untested suffix strategy: matrix: image_name: ${{ fromJSON(inputs.image_names) }} steps: - name: Login to Docker Hub - if: github.event_name != 'pull_request' uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} diff --git a/.github/workflows/run-smc-tests.yml b/.github/workflows/run-smc-tests.yml index 53122092c..ebb237db9 100644 --- a/.github/workflows/run-smc-tests.yml +++ b/.github/workflows/run-smc-tests.yml @@ -1,7 +1,14 @@ name: Smart contracts test on: + pull_request: + paths: + - 'contracts/**' + - 'testdata/**' + - 'prover/**' push: + branches: + - main paths: - 'contracts/**' - 'testdata/**' @@ -12,7 +19,7 @@ env: jobs: run-contract-tests: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Run smart contracts tests steps: - uses: actions/checkout@v4 @@ -61,7 +68,7 @@ jobs: token: ${{ secrets.CODECOV_TOKEN }} solidity-format-check: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Solidity format check steps: - uses: actions/checkout@v4 diff --git a/.github/workflows/security-report-to-csv.yml b/.github/workflows/security-report-to-csv.yml index 4e76df856..1b4dc1901 100644 --- a/.github/workflows/security-report-to-csv.yml +++ b/.github/workflows/security-report-to-csv.yml @@ -2,7 +2,7 @@ name: Export Security Report to CSV on: workflow_dispatch jobs: data_gathering: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] steps: - name: CSV export uses: advanced-security/ghas-to-csv@v2 diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index e12dfc43b..6222a6f82 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -11,7 +11,7 @@ permissions: jobs: stale: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] steps: - uses: actions/stale@v8 with: diff --git a/.github/workflows/traces-api-facade-build-and-publish.yml b/.github/workflows/traces-api-facade-build-and-publish.yml index 441a408c2..f9079b454 100644 --- a/.github/workflows/traces-api-facade-build-and-publish.yml +++ b/.github/workflows/traces-api-facade-build-and-publish.yml @@ -33,7 +33,7 @@ concurrency: jobs: build-and-publish: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Traces api facade build environment: ${{ github.ref != 'refs/heads/main' && 'docker-build-and-e2e' || '' }} env: @@ -58,7 +58,6 @@ jobs: ./gradlew traces-api-facade:app:shadowJar echo ${{ github.workspace }} - name: Login to Docker Hub - if: github.event_name != 'pull_request' uses: docker/login-action@v3 with: username: ${{ secrets.DOCKERHUB_USERNAME }} diff --git a/.github/workflows/traces-api-facade-testing.yml b/.github/workflows/traces-api-facade-testing.yml index ca6bce09f..8b9b0823b 100644 --- a/.github/workflows/traces-api-facade-testing.yml +++ b/.github/workflows/traces-api-facade-testing.yml @@ -21,7 +21,7 @@ concurrency: jobs: run-tests: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] name: Traces api facade tests steps: - name: Checkout diff --git a/.github/workflows/valid-audit-pr-has-tags.yml b/.github/workflows/valid-audit-pr-has-tags.yml index a5a25471f..94966055f 100644 --- a/.github/workflows/valid-audit-pr-has-tags.yml +++ b/.github/workflows/valid-audit-pr-has-tags.yml @@ -9,7 +9,7 @@ on: jobs: check: - runs-on: [self-hosted, ubuntu-22.04, X64, small] + runs-on: [self-hosted, ubuntu-20.04, X64, small] steps: - name: Checkout code diff --git a/docs/architecture-description.md b/docs/architecture-description.md index 6164147ab..b418cecd0 100644 --- a/docs/architecture-description.md +++ b/docs/architecture-description.md @@ -109,7 +109,7 @@ Transactions exceeding trace limits are added to an unexecutableTxList in-memory Priority transactions are prioritized over normal ones. Priority transactions are those sent by a user whose address is in a predefined list. It typically corresponds to transactions triggered by the Linea system. -Note that in case no transaction is received in the block window, no block is generated. This behavior differs from Ethereum mainnet behavior given that it’s not required to avoid attacks. +Note that if no transactions are received within the block window, no block is generated. This behavior differs from Ethereum mainnet, where empty blocks are still produced to maintain chain continuity and prevent certain attacks. If a transaction could not be included in the current block, it will remain as a candidate for inclusion in the next block. @@ -475,7 +475,7 @@ The paragraphs highlights the roles of the different proofs that are generated. ### Execution proofs -It validates the correct execution of transactions within the Ethereum Virtual Machine (EVM). The proof system for the execution has a complex structure which involves the Vortex proof system, GKR and Plonk. The final proof takes the form of a BLS12-377-based Plonk proof. +It validates the correct execution of transactions within the Ethereum Virtual Machine (EVM). The proof system for the execution has a complex structure which involves the Vortex proof system, GKR and PLONK. The final proof takes the form of a BLS12-377-based PLONK proof. An execution request proof is a file stored in the shared filesystem under the repository: with file name pattern: `$startBlockNumber-$endBlockNumber-etv$tracesVersion-stv$stateManagerVersion-getZkProof.json` @@ -557,7 +557,7 @@ RollingHashUpdatedEvent ### Compression proof -Verifies the effective compression of a byte stream of data, which represents the inputs for the EVM execution circuit. This circuit ensures that the compressed data submitted on Ethereum can be accurately decompressed, revealing the necessary inputs for validation. The proof system used for generating the compression proof is Plonk and is based on the curve BLS12-377. \ +Verifies the effective compression of a byte stream of data, which represents the inputs for the EVM execution circuit. This circuit ensures that the compressed data submitted on Ethereum can be accurately decompressed, revealing the necessary inputs for validation. The proof system used for generating the compression proof is PLONK and is based on the curve BLS12-377. \ File name @@ -627,7 +627,7 @@ BlobCompressionProofJsonResponse ### Aggregation proof -Serves as the cornerstone of Linea's proof system, recursively verifying proofs from N execution circuits and M compression circuit instances. This circuit encapsulates the primary statement of Linea's prover and is the sole circuit subjected to external verification. The proof system used is a combination of several Plonk circuits on BW6, BLS12-377 and BN254 which tactically profits from the 2-chained curves BLS12-377 and BW6 to efficiently recurse the proofs. The final proof takes the form of a BN254 curve that can be efficiently verified on Ethereum thanks to the available precompiles. +Serves as the cornerstone of Linea's proof system, recursively verifying proofs from N execution circuits and M compression circuit instances. This circuit encapsulates the primary statement of Linea's prover and is the sole circuit subjected to external verification. The proof system used is a combination of several PLONK circuits on BW6, BLS12-377 and BN254 which tactically profits from the 2-chained curves BLS12-377 and BW6 to efficiently recurse the proofs. The final proof takes the form of a BN254 curve that can be efficiently verified on Ethereum thanks to the available precompiles. File name @@ -911,7 +911,7 @@ On finalization the value of the final (last in rollup data being finalized) Rol Whenever a transaction is executed on L2 to send a message to L1, a MessageSent event is emitted. -Txs to send L2 -> L1 messages are sent to the L2 Message service function: +Txs to send L2 -> L1 messages are sent to the L2 Message Service function: ``` diff --git a/docs/development-guidelines.md b/docs/development-guidelines.md index 1d848131a..19d333000 100644 --- a/docs/development-guidelines.md +++ b/docs/development-guidelines.md @@ -93,7 +93,7 @@ With **Escape newlines** enabled in Grafana, the stack trace will be displayed a ### Java/Kotlin Guidelines -These guidelines reelect some specificities of our logging library [Log4J2](https://logging.apache.org/log4j/2.x/) +These guidelines reflect some specificities of our logging library [Log4J2](https://logging.apache.org/log4j/2.x/) #### Favor argument placeholder instead of String templating ```kotlin diff --git a/prover/circuits/pi-interconnection/bench/main.go b/prover/circuits/pi-interconnection/bench/main.go index 3ce42492b..e136255d4 100644 --- a/prover/circuits/pi-interconnection/bench/main.go +++ b/prover/circuits/pi-interconnection/bench/main.go @@ -26,7 +26,6 @@ func main() { c, err := pi_interconnection.Compile(config.PublicInput{ MaxNbDecompression: 400, MaxNbExecution: 400, - MaxNbKeccakF: 10000, ExecutionMaxNbMsg: 16, L2MsgMerkleDepth: 5, L2MsgMaxNbMerkle: 10, diff --git a/prover/circuits/pi-interconnection/circuit.go b/prover/circuits/pi-interconnection/circuit.go index 0f4394dc3..4e87bd29e 100644 --- a/prover/circuits/pi-interconnection/circuit.go +++ b/prover/circuits/pi-interconnection/circuit.go @@ -229,7 +229,7 @@ func Compile(c config.PublicInput, wizardCompilationOpts ...func(iop *wizard.Com c.L2MsgMaxNbMerkle = (c.MaxNbExecution*c.ExecutionMaxNbMsg + merkleNbLeaves - 1) / merkleNbLeaves } - sh := newKeccakCompiler(c).Compile(c.MaxNbKeccakF, wizardCompilationOpts...) + sh := newKeccakCompiler(c).Compile(wizardCompilationOpts...) shc, err := sh.GetCircuit() if err != nil { return nil, err @@ -261,7 +261,6 @@ func (c *Compiled) getConfig() (config.PublicInput, error) { return config.PublicInput{ MaxNbDecompression: len(c.Circuit.DecompressionFPIQ), MaxNbExecution: len(c.Circuit.ExecutionFPIQ), - MaxNbKeccakF: c.Keccak.MaxNbKeccakF(), ExecutionMaxNbMsg: executionNbMsg, L2MsgMerkleDepth: c.Circuit.L2MessageMerkleDepth, L2MsgMaxNbMerkle: c.Circuit.L2MessageMaxNbMerkle, diff --git a/prover/circuits/pi-interconnection/circuit_test.go b/prover/circuits/pi-interconnection/circuit_test.go index 45c804012..5183cb5b5 100644 --- a/prover/circuits/pi-interconnection/circuit_test.go +++ b/prover/circuits/pi-interconnection/circuit_test.go @@ -118,7 +118,6 @@ func TestMaxNbCircuitsSum(t *testing.T) { MaxNbDecompression: maxNbDecompression, MaxNbExecution: maxNbExecution, MaxNbCircuits: 20, - MaxNbKeccakF: 200, ExecutionMaxNbMsg: 2, L2MsgMerkleDepth: 5, L2MsgMaxNbMerkle: 2, diff --git a/prover/circuits/pi-interconnection/compile/test_compile.go b/prover/circuits/pi-interconnection/compile/test_compile.go index 7b0112658..dd307aafd 100644 --- a/prover/circuits/pi-interconnection/compile/test_compile.go +++ b/prover/circuits/pi-interconnection/compile/test_compile.go @@ -21,7 +21,6 @@ func main() { c, err := pi_interconnection.Compile(config.PublicInput{ MaxNbDecompression: 400, MaxNbExecution: 400, - MaxNbKeccakF: 10000, ExecutionMaxNbMsg: 16, L2MsgMerkleDepth: 5, L2MsgMaxNbMerkle: 10, diff --git a/prover/circuits/pi-interconnection/e2e_test.go b/prover/circuits/pi-interconnection/e2e_test.go index 35aa5d3af..4b0e187e2 100644 --- a/prover/circuits/pi-interconnection/e2e_test.go +++ b/prover/circuits/pi-interconnection/e2e_test.go @@ -30,7 +30,7 @@ import ( // some of the execution data are faked func TestSingleBlockBlob(t *testing.T) { - testPI(t, 103, pitesting.AssignSingleBlockBlob(t), withSlack(0, 1, 2)) + testPI(t, pitesting.AssignSingleBlockBlob(t), withSlack(0, 1, 2)) } func TestSingleBlobBlobE2E(t *testing.T) { @@ -38,7 +38,6 @@ func TestSingleBlobBlobE2E(t *testing.T) { cfg := config.PublicInput{ MaxNbDecompression: len(req.Decompressions), MaxNbExecution: len(req.Executions), - MaxNbKeccakF: 100, ExecutionMaxNbMsg: 1, L2MsgMerkleDepth: 5, L2MsgMaxNbMerkle: 1, @@ -124,7 +123,7 @@ func TestTinyTwoBatchBlob(t *testing.T) { }, } - testPI(t, 100, req, withSlack(0, 1, 2)) + testPI(t, req, withSlack(0, 1, 2)) } func TestTwoTwoBatchBlobs(t *testing.T) { @@ -205,13 +204,13 @@ func TestTwoTwoBatchBlobs(t *testing.T) { }, } - testPI(t, 101, req, withSlack(0, 1, 2)) + testPI(t, req, withSlack(0, 1, 2)) } func TestEmpty(t *testing.T) { const hexZeroBlock = "0x0000000000000000000000000000000000000000000000000000000000000000" - testPI(t, 50, pi_interconnection.Request{ + testPI(t, pi_interconnection.Request{ Aggregation: public_input.Aggregation{ FinalShnarf: hexZeroBlock, ParentAggregationFinalShnarf: hexZeroBlock, @@ -242,7 +241,7 @@ func withSlack(slack ...int) testPIOption { } } -func testPI(t *testing.T, maxNbKeccakF int, req pi_interconnection.Request, options ...testPIOption) { +func testPI(t *testing.T, req pi_interconnection.Request, options ...testPIOption) { var cfg testPIConfig for _, o := range options { o(&cfg) @@ -267,7 +266,6 @@ func testPI(t *testing.T, maxNbKeccakF int, req pi_interconnection.Request, opti cfg := config.PublicInput{ MaxNbDecompression: len(req.Decompressions) + slack[0], MaxNbExecution: len(req.Executions) + slack[1], - MaxNbKeccakF: maxNbKeccakF, ExecutionMaxNbMsg: 1 + slack[2], L2MsgMerkleDepth: 5, L2MsgMaxNbMerkle: 1 + slack[3], diff --git a/prover/circuits/pi-interconnection/keccak/assign.go b/prover/circuits/pi-interconnection/keccak/assign.go index 52ca07918..bc46b8a97 100644 --- a/prover/circuits/pi-interconnection/keccak/assign.go +++ b/prover/circuits/pi-interconnection/keccak/assign.go @@ -29,7 +29,7 @@ import ( // Finally, the gnark sub-circuit can produce a SNARK hasher to be used inside the circuit.Define function. // TODO Perhaps a permutation argument would help usability -// i.e. compute \prod (r+ inLen + in_0 s + in_1 s^2 + ... + in_{maxInLen-1} s^{maxInLen} + out_0 s^{maxInLen+1} + ... + out_31 s^{maxInLen+32) +// i.e. compute ∏ (r+ inLen + in₀ s + in₁ s² + ... + in_{maxInLen-1} sᵐᵃˣᴵⁿᴸᵉⁿ + out_0 sᵐᵃˣᴵⁿᴸᵉⁿ⁺¹ + ... + out₃₁ sᵐᵃˣᴵⁿᴸᵉⁿ⁺³² // on both sides and assert their equality // (can pack the in-outs first to reduce constraints slightly) @@ -79,13 +79,20 @@ func (h *StrictHasherCompiler) WithHashLengths(l ...int) *StrictHasherCompiler { return h } -func (h *StrictHasherCompiler) Compile(maxNbKeccakF int, wizardCompilationOpts ...func(iop *wizard.CompiledIOP)) CompiledStrictHasher { // TODO compute maxNbKeccakF instead of taking as param - wc := NewWizardVerifierSubCircuit(maxNbKeccakF, wizardCompilationOpts...) +func (h *StrictHasherCompiler) Compile(wizardCompilationOpts ...func(iop *wizard.CompiledIOP)) CompiledStrictHasher { + nbKeccakF := 0 // Since the output size is smaller than the block size the squeezing phase is trivial TODO @Tabaie check with @azam.soleimanian that this is correct + + const blockNbBytesIn = lanesPerBlock * 8 + for _, l := range *h { + nbKeccakF += l/blockNbBytesIn + 1 // extra room for padding + } + + wc := NewWizardVerifierSubCircuit(nbKeccakF, wizardCompilationOpts...) return CompiledStrictHasher{ wc: *wc, lengths: *h, - maxNbKeccakF: maxNbKeccakF, + maxNbKeccakF: nbKeccakF, } } diff --git a/prover/circuits/pi-interconnection/keccak/assign_test.go b/prover/circuits/pi-interconnection/keccak/assign_test.go index b4adccc92..f002e29a4 100644 --- a/prover/circuits/pi-interconnection/keccak/assign_test.go +++ b/prover/circuits/pi-interconnection/keccak/assign_test.go @@ -15,7 +15,7 @@ import ( func TestAssign(t *testing.T) { compiler := NewStrictHasherCompiler(1) - compiled := compiler.WithHashLengths(32).Compile(10, dummy.Compile) + compiled := compiler.WithHashLengths(32).Compile(dummy.Compile) var zero [32]byte diff --git a/prover/circuits/pi-interconnection/keccak/snark.go b/prover/circuits/pi-interconnection/keccak/snark.go index 9a09d16ab..2d01fab76 100644 --- a/prover/circuits/pi-interconnection/keccak/snark.go +++ b/prover/circuits/pi-interconnection/keccak/snark.go @@ -28,7 +28,8 @@ type BlockHasher interface { Sum(nbIn frontend.Variable, bytess ...[32]frontend.Variable) [32]frontend.Variable } -// Hasher is stateless from the user's perspective, but in the background it prepares columns for the Vortex prover +// Hasher prepares the input columns for the Vortex verifier in a SNARK circuit. +// It is stateless from the user's perspective, but it does its works as it is being fed input. type Hasher struct { api frontend.API nbLanes int diff --git a/prover/config/config.go b/prover/config/config.go index 2c327f35e..093c2bb11 100644 --- a/prover/config/config.go +++ b/prover/config/config.go @@ -255,7 +255,6 @@ type PublicInput struct { MaxNbDecompression int `mapstructure:"max_nb_decompression" validate:"gte=0"` MaxNbExecution int `mapstructure:"max_nb_execution" validate:"gte=0"` MaxNbCircuits int `mapstructure:"max_nb_circuits" validate:"gte=0"` // if not set, will be set to MaxNbDecompression + MaxNbExecution - MaxNbKeccakF int `mapstructure:"max_nb_keccakf" validate:"gte=0"` ExecutionMaxNbMsg int `mapstructure:"execution_max_nb_msg" validate:"gte=0"` L2MsgMerkleDepth int `mapstructure:"l2_msg_merkle_depth" validate:"gte=0"` L2MsgMaxNbMerkle int `mapstructure:"l2_msg_max_nb_merkle" validate:"gte=0"` // if not explicitly provided (i.e. non-positive) it will be set to maximum diff --git a/prover/go.mod b/prover/go.mod index 57dcf18f8..58bff804a 100644 --- a/prover/go.mod +++ b/prover/go.mod @@ -9,7 +9,7 @@ require ( github.com/consensys/compress v0.2.5 github.com/consensys/gnark v0.11.1-0.20240910135928-e8cb61d0be1d github.com/consensys/gnark-crypto v0.14.1-0.20240909204211-78a6dc12799c - github.com/consensys/go-corset v0.0.0-20240920085445-2d0aad43bfbd + github.com/consensys/go-corset v0.0.0-20241009181119-b687f2ec84ed github.com/crate-crypto/go-kzg-4844 v1.1.0 github.com/dlclark/regexp2 v1.11.2 github.com/fxamacker/cbor/v2 v2.7.0 diff --git a/prover/go.sum b/prover/go.sum index 80b3b8933..ca334f16b 100644 --- a/prover/go.sum +++ b/prover/go.sum @@ -100,8 +100,8 @@ github.com/consensys/gnark v0.11.1-0.20240910135928-e8cb61d0be1d h1:TmNupI1+K5/L github.com/consensys/gnark v0.11.1-0.20240910135928-e8cb61d0be1d/go.mod h1:f9CH911SPCrbSZp5z9LYzJ3rZvI7mOUzzf48lCZO/5o= github.com/consensys/gnark-crypto v0.14.1-0.20240909204211-78a6dc12799c h1:fkRJCyz4EBjDNhiNTyyyEJBEW7RsFzmDVd/ot4jtSrE= github.com/consensys/gnark-crypto v0.14.1-0.20240909204211-78a6dc12799c/go.mod h1:AL8vs/7MyZ0P93tcNDkUWVwf2rWLUGFUP/1iqiF7h4E= -github.com/consensys/go-corset v0.0.0-20240920085445-2d0aad43bfbd h1:yNUrtBL6JEGq9lpTHyjTZ5i3VGhqkKhoYokTVnt0Iqs= -github.com/consensys/go-corset v0.0.0-20240920085445-2d0aad43bfbd/go.mod h1:J64guTfpmfXl4Yk2D7lsWdYg0ilP+N8JWPudP7+sZpA= +github.com/consensys/go-corset v0.0.0-20241009181119-b687f2ec84ed h1:tA+JpjGO3tB2+Q7lsrlDper2L5BcvgS2sNd6DLS2ViM= +github.com/consensys/go-corset v0.0.0-20241009181119-b687f2ec84ed/go.mod h1:J64guTfpmfXl4Yk2D7lsWdYg0ilP+N8JWPudP7+sZpA= github.com/coreos/go-semver v0.3.0/go.mod h1:nnelYz7RCh+5ahJtPPxZlU+153eP4D4r3EedlOD2RNk= github.com/coreos/go-systemd/v22 v22.3.2/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= github.com/coreos/go-systemd/v22 v22.5.0/go.mod h1:Y58oyj3AT4RCenI/lSvhwexgC+NSVTIJ3seZv2GcEnc= diff --git a/prover/zkevm/arithmetization/assignment.go b/prover/zkevm/arithmetization/assignment.go index 125e4a846..ab5f9fe98 100644 --- a/prover/zkevm/arithmetization/assignment.go +++ b/prover/zkevm/arithmetization/assignment.go @@ -40,14 +40,14 @@ func AssignFromLtTraces(run *wizard.ProverRuntime, schema *air.Schema, expTraces if uint(limit) < height { level = logrus.ErrorLevel - err77 = errors.Join(err77, fmt.Errorf("limit overflow: module %q overflows its limit height=%v limit=%v ratio=%v", name, height, limit, ratio)) + err77 = errors.Join(err77, fmt.Errorf("limit overflow: module '%s' overflows its limit height=%v limit=%v ratio=%v", name, height, limit, ratio)) } logrus.StandardLogger().Logf(level, "module utilization module=%v height=%v limit=%v ratio=%v", name, height, limit, ratio) } if err77 != nil { - logrus.Errorf("Error code 77: \n%v", err77) + logrus.Errorf("Error code 77 %v", err77) os.Exit(TraceOverflowExitCode) } diff --git a/prover/zkevm/arithmetization/definition.go b/prover/zkevm/arithmetization/definition.go index 2adb618dd..51e517512 100644 --- a/prover/zkevm/arithmetization/definition.go +++ b/prover/zkevm/arithmetization/definition.go @@ -87,7 +87,7 @@ func (s *schemaScanner) scanConstraints() { corsetCSs := s.Schema.Constraints().Collect() for _, corsetCS := range corsetCSs { - name := corsetCS.String() + name := fmt.Sprintf("%v", corsetCS) if s.Comp.QueriesNoParams.Exists(ifaces.QueryID(name)) { continue } @@ -230,8 +230,8 @@ func (s *schemaScanner) castExpression(expr air.Expr) *symbolic.Expression { return symbolic.NewVariable(c) default: - - panic(fmt.Sprintf("unsupported type: %T for %v", e, e.String())) + eStr := fmt.Sprintf("%v", e) + panic(fmt.Sprintf("unsupported type: %T for %v", e, eStr)) } }