diff --git a/.github/workflows/acceptance-tests.yml b/.github/workflows/acceptance-tests.yml index b915f81e891..4767c53b140 100644 --- a/.github/workflows/acceptance-tests.yml +++ b/.github/workflows/acceptance-tests.yml @@ -14,7 +14,7 @@ jobs: ${{ github.event_name == 'pull_request_target' && github.event.pull_request.head.repo.full_name != github.repository && 'external' || 'internal' }} - runs-on: ubuntu-latest + runs-on: ubuntu-22.04 steps: - run: true @@ -31,16 +31,16 @@ jobs: runner_index: [0,1,2,3,4,5,6,7,8,9,10,11,12,13,14,15] steps: - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: ref: ${{ github.event.pull_request.head.sha || github.ref }} - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: temurin java-version: 17 - name: get acceptance test report - uses: dawidd6/action-download-artifact@v3 + uses: docker/login-action@e7466d1a7587ed14867642c2ca74b5bcc1e19a2d@v3 with: branch: main name_is_regexp: true @@ -48,10 +48,10 @@ jobs: path: tmp/junit-xml-reports-downloaded if_no_artifact_found: true - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: Split tests id: split-tests - uses: r7kamura/split-tests-by-timings@v0 + uses: r7kamura/split-tests-by-timings@9322bd292d9423e2bc5a65bec548901801341e3f with: reports: tmp/junit-xml-reports-downloaded glob: 'acceptance-tests/tests/src/test/java/org/hyperledger/besu/tests/acceptance/**/*Test.java' @@ -68,12 +68,12 @@ jobs: - name: cleanup tempfiles run: rm testList.txt gradleArgs.txt - name: Upload Acceptance Test Results - uses: actions/upload-artifact@v3.1.0 + uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 with: name: acceptance-node-${{matrix.runner_index}}-test-results path: 'acceptance-tests/tests/build/test-results/acceptanceTest/TEST-*.xml' - name: Publish Test Report - uses: mikepenz/action-junit-report@v4 + uses: mikepenz/action-junit-report@5f47764eec0e1c1f19f40c8e60a5ba47e47015c5 if: (success() || failure()) # always run even if the build step fails with: report_paths: 'acceptance-tests/tests/build/test-results/acceptanceTest/TEST-*.xml' diff --git a/.github/workflows/artifacts.yml b/.github/workflows/artifacts.yml index 3b33f43bce1..91abbbc4438 100644 --- a/.github/workflows/artifacts.yml +++ b/.github/workflows/artifacts.yml @@ -13,14 +13,14 @@ jobs: contents: write steps: - name: checkout - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Set up JDK 17 - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: 'temurin' java-version: '17' - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: assemble distributions run: ./gradlew -Prelease.releaseVersion=${{github.ref_name}} assemble -Dorg.gradle.parallel=true -Dorg.gradle.caching=true @@ -31,17 +31,17 @@ jobs: echo "zipSha=$(shasum -a 256 besu*.zip)" >> $GITHUB_OUTPUT echo "tarSha=$(shasum -a 256 besu*.tar.gz)" >> $GITHUB_OUTPUT - name: upload tarball - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 with: path: 'build/distributions/besu*.tar.gz' name: besu-${{ github.ref_name }}.tar.gz - name: upload zipfile - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 with: path: 'build/distributions/besu*.zip' name: besu-${{ github.ref_name }}.zip - name: Upload Release assets - uses: softprops/action-gh-release@v1 + uses: softprops/action-gh-release@de2c0eb89ae2a093876385947365aca7b0e5f844 with: append_body: true files: | @@ -57,12 +57,12 @@ jobs: if: ${{ github.actor != 'dependabot[bot]' }} steps: - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: adopt java-version: 17 - name: Download zip - uses: actions/download-artifact@v3 + uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a with: name: besu-${{ github.ref_name }}.zip - name: test Besu diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index f8842123330..da937f24257 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -31,15 +31,15 @@ jobs: security-events: write steps: - name: Checkout repository - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: 'temurin' java-version: 17 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v2 + uses: github/codeql-action/init@2f93e4319b2f04a2efc38fa7f78bd681bc3f7b2f with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -48,9 +48,9 @@ jobs: queries: security-and-quality,security-extended - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: compileJava noscan run: | JAVA_OPTS="-Xmx2048M" ./gradlew --no-scan compileJava - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v2 + uses: github/codeql-action/analyze@2f93e4319b2f04a2efc38fa7f78bd681bc3f7b2f diff --git a/.github/workflows/docker.yml b/.github/workflows/docker.yml index c487ed84b28..dbab691f965 100644 --- a/.github/workflows/docker.yml +++ b/.github/workflows/docker.yml @@ -11,14 +11,14 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: temurin java-version: 17 - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: hadoLint_openj9-jdk_17 run: docker run --rm -i hadolint/hadolint < docker/openj9-jdk-17/Dockerfile - name: hadoLint_openjdk_17 @@ -55,33 +55,33 @@ jobs: echo "ARCH=arm64" >> $GITHUB_OUTPUT fi - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: short sha id: shortSha run: echo "sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: temurin java-version: 17 - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: install goss run: | mkdir -p docker/reports curl -L https://github.com/aelsabbahy/goss/releases/download/v0.4.4/goss-${{ steps.prep.outputs.PLATFORM_PAIR }} -o ./docker/tests/goss-${{ steps.prep.outputs.PLATFORM_PAIR }} - - name: build and test docker - uses: gradle/gradle-build-action@v2.12.0 - env: - architecture: ${{ steps.prep.outputs.ARCH }} - with: - arguments: testDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Prelease.releaseVersion=${{ github.ref_name }} - name: login to ghcr - uses: docker/login-action@v3.0.0 + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d with: registry: ${{ env.registry }} username: ${{ github.actor }} password: ${{ secrets.GITHUB_TOKEN }} + - name: build and test docker + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa + env: + architecture: ${{ steps.prep.outputs.ARCH }} + with: + arguments: testDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Prelease.releaseVersion=${{ github.ref_name }} - name: publish env: architecture: ${{ steps.prep.outputs.ARCH }} @@ -94,16 +94,16 @@ jobs: packages: write steps: - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: temurin java-version: 17 - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: login to ghcr - uses: docker/login-action@v3.0.0 + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d with: registry: ${{ env.registry }} username: ${{ github.actor }} diff --git a/.github/workflows/integration-tests.yml b/.github/workflows/integration-tests.yml index 68115ea40bf..63297b5f930 100644 --- a/.github/workflows/integration-tests.yml +++ b/.github/workflows/integration-tests.yml @@ -9,63 +9,36 @@ env: GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false" jobs: - shouldRun: - name: checks to ensure we should run + authorize: + environment: + ${{ github.event_name == 'pull_request_target' && + github.event.pull_request.head.repo.full_name != github.repository && + 'external' || 'internal' }} runs-on: ubuntu-22.04 - outputs: - shouldRun: ${{steps.shouldRun.outputs.result}} steps: - - name: required check - id: shouldRun - uses: actions/github-script@v7.0.1 - env: - # fun fact, this changes based on incoming event, it will be different when we run this on pushes to main - RELEVANT_SHA: ${{ github.event.pull_request.head.sha || github.sha }} - with: - script: | - const { RELEVANT_SHA } = process.env; - const { data: { statuses } } = await github.rest.repos.getCombinedStatusForRef({ - owner: context.repo.owner, - repo: context.repo.repo, - ref: RELEVANT_SHA, - }); - - const intTested = statuses && statuses.filter(({ context }) => context === 'integration-tests'); - const alreadyRun = intTested && intTested.find(({ state }) => state === 'success') > 0; - const { data: reviews } = await github.rest.pulls.listReviews({ - owner: context.repo.owner, - repo: context.repo.repo, - pull_number: context.issue.number, - }); - const approvingReviews = reviews && reviews.filter(review => review.state === 'APPROVED'); - const shouldRun = !alreadyRun && github.actor != 'dependabot[bot]' && (approvingReviews.length > 0); - - console.log("tests should be run = %j", shouldRun); - console.log("alreadyRun = %j", alreadyRun); - console.log("approvingReviews = %j", approvingReviews.length); - - return shouldRun; + - run: true integration-tests: runs-on: ubuntu-22.04 - needs: shouldRun - if: ${{ needs.shouldRun.outputs.shouldRun == 'true' }} + needs: authorize permissions: statuses: write checks: write steps: - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: temurin java-version: 17 - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: run integration tests run: ./gradlew integrationTest compileJmh -Dorg.gradle.parallel=true -Dorg.gradle.caching=true - name: Publish Test Report - uses: mikepenz/action-junit-report@v4 + uses: mikepenz/action-junit-report@5f47764eec0e1c1f19f40c8e60a5ba47e47015c5 if: (success() || failure()) with: report_paths: '**/build/test-results/integrationTest/TEST-*.xml' diff --git a/.github/workflows/nightly.yml b/.github/workflows/nightly.yml index 9d2778fba47..90e43417c46 100644 --- a/.github/workflows/nightly.yml +++ b/.github/workflows/nightly.yml @@ -16,14 +16,14 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: temurin java-version: 17 - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: hadoLint_openj9-jdk_17 run: docker run --rm -i hadolint/hadolint < docker/openj9-jdk-17/Dockerfile - name: hadoLint_openjdk_17 @@ -59,19 +59,19 @@ jobs: echo "ARCH=arm64" >> $GITHUB_OUTPUT fi - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: short sha id: shortSha run: echo "sha=$(git rev-parse --short HEAD)" >> $GITHUB_OUTPUT - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: temurin java-version: 17 - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: build image - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa with: arguments: distDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main - name: install goss @@ -79,13 +79,13 @@ jobs: mkdir -p docker/reports curl -L https://github.com/aelsabbahy/goss/releases/download/v0.4.4/goss-${{ steps.prep.outputs.PLATFORM_PAIR }} -o ./docker/tests/goss-${{ steps.prep.outputs.PLATFORM_PAIR }} - name: test docker - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa env: architecture: ${{ steps.prep.outputs.ARCH }} with: arguments: testDocker -PdockerOrgName=${{ env.registry }}/${{ github.repository_owner }} -Pbranch=main - name: login to ghcr - uses: docker/login-action@v3.0.0 + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d with: registry: ${{ env.registry }} username: ${{ github.actor }} @@ -102,16 +102,16 @@ jobs: runs-on: ubuntu-22.04 steps: - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: temurin java-version: 17 - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: Login to DockerHub - uses: docker/login-action@v3.0.0 + uses: docker/login-action@343f7c4344506bcbf9b4de18042ae17996df046d with: registry: ${{ env.registry }} username: ${{ github.actor }} diff --git a/.github/workflows/parallel-unit-tests.yml b/.github/workflows/parallel-unit-tests.yml deleted file mode 100644 index b12fa43655b..00000000000 --- a/.github/workflows/parallel-unit-tests.yml +++ /dev/null @@ -1,49 +0,0 @@ -name: parallel-unit-tests -#experimental work in progress - trying to figure out how to split tests across multi-modules by runtime -on: - workflow_dispatch: - -env: - GRADLE_OPTS: "-Dorg.gradle.daemon=false" - total-runners: 4 -jobs: - junit: - runs-on: ubuntu-latest - strategy: - fail-fast: false - matrix: - runner_index: - - 0 - - 1 - - 2 - - 3 - steps: - - name: Checkout Repo - uses: actions/checkout@v4.1.1 - - name: Split tests - id: split-tests - uses: chaosaffe/split-tests@v1-alpha.1 - with: - glob: '**/src/test/java/**/*.java' - split-total: ${{ env.total-runners }} - split-index: ${{ matrix.runner_index }} - line-count: true - - name: Set up Java - uses: actions/setup-java@v4.0.0 - with: - distribution: adopt - java-version: 17 - cache: gradle - - name: write out test list - run: echo "${{ steps.split-tests.outputs.test-suite }}" >> testList.txt - - name: debug testfile paths - run: cat testList.txt - - name: format gradle args - # regex means: truncate file paths to align with package name, replacing with tests switch, then drop file extension, - # then swap path delimiter with package delimiter - run: cat testList.txt | sed -e 's/[^ ]*src\/test\/java\//--tests\ /g' -e 's/\.java//g' -e 's/\//\./g' >> gradleArgs.txt - - name: debug test class list - run: cat gradleArgs.txt - - name: run unit tests - run: ./gradlew test `cat gradleArgs.txt` - diff --git a/.github/workflows/pr-checklist-on-open.yml b/.github/workflows/pr-checklist-on-open.yml index f849ba760bb..d0b26592670 100644 --- a/.github/workflows/pr-checklist-on-open.yml +++ b/.github/workflows/pr-checklist-on-open.yml @@ -10,7 +10,7 @@ jobs: permissions: pull-requests: write steps: - - uses: actions/github-script@v7.0.1 + - uses: actions/github-script@60a0d83039c74a4aee543508d2ffcb1c3799cdea with: github-token: ${{secrets.GITHUB_TOKEN}} script: | diff --git a/.github/workflows/pre-review.yml b/.github/workflows/pre-review.yml index 5abbcb6d0a3..2baca446251 100644 --- a/.github/workflows/pre-review.yml +++ b/.github/workflows/pre-review.yml @@ -1,7 +1,9 @@ name: pre-review on: - pull_request: + pull_request_target: + branches: + - main workflow_dispatch: permissions: @@ -9,34 +11,49 @@ permissions: checks: write jobs: + authorize: + environment: + ${{ github.event_name == 'pull_request_target' && + github.event.pull_request.head.repo.full_name != github.repository && + 'external' || 'internal' }} + runs-on: ubuntu-22.04 + steps: + - run: true + repolint: name: "Repository Linting" runs-on: ubuntu-22.04 container: ghcr.io/todogroup/repolinter:v0.11.2 steps: - name: Checkout Code - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} - name: Lint Repo run: bundle exec /app/bin/repolinter.js --rulesetUrl https://raw.githubusercontent.com/hyperledger-labs/hyperledger-community-management-tools/main/repo_structure/repolint.json --format markdown gradle-wrapper: name: "Gradle Wrapper Validation" runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4.1.1 - - uses: gradle/wrapper-validation-action@v1.1.0 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} + - uses: gradle/wrapper-validation-action@56b90f209b02bf6d1deae490e9ef18b21a389cd4 spotless: runs-on: ubuntu-22.04 if: ${{ github.actor != 'dependabot[bot]' }} steps: - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: temurin java-version: 17 - name: Setup Gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: run spotless run: ./gradlew spotlessCheck -Dorg.gradle.parallel=true -Dorg.gradle.caching=true compile: @@ -45,21 +62,23 @@ jobs: needs: [spotless, gradle-wrapper, repolint] steps: - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: temurin java-version: 17 - name: Setup Gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: Gradle Compile run: ./gradlew build -x test -x spotlessCheck -Dorg.gradle.parallel=true -Dorg.gradle.caching=true unitTests: env: GRADLEW_UNIT_TEST_ARGS: ${{matrix.gradle_args}} runs-on: ubuntu-22.04 - needs: [ compile ] + needs: [ compile, authorize ] permissions: checks: write statuses: write @@ -75,19 +94,21 @@ jobs: - "ethereum:core:test" steps: - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: temurin java-version: 17 - name: Setup Gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: run unit tests id: unitTest run: ./gradlew $GRADLEW_UNIT_TEST_ARGS -Dorg.gradle.parallel=true -Dorg.gradle.caching=true - name: Publish Test Report - uses: mikepenz/action-junit-report@v4 + uses: mikepenz/action-junit-report@5f47764eec0e1c1f19f40c8e60a5ba47e47015c5 if: success() || failure() # always run even if the build step fails with: report_paths: '**/test-results/**/TEST-*.xml' diff --git a/.github/workflows/reference-tests.yml b/.github/workflows/reference-tests.yml index 55710028916..e690b68a635 100644 --- a/.github/workflows/reference-tests.yml +++ b/.github/workflows/reference-tests.yml @@ -1,76 +1,35 @@ name: reference-tests on: - pull_request: - pull_request_review: - types: - - submitted + pull_request_target: + branches: + - main env: GRADLE_OPTS: "-Xmx6g -Dorg.gradle.daemon=false" total-runners: 6 jobs: - shouldRun: - name: checks to ensure we should run - # necessary because there is no single PR approved event, need to check all comments/approvals/denials - # might also be a job running, and additional approvals - runs-on: ubuntu-22.04 - outputs: - shouldRun: ${{steps.shouldRun.outputs.result}} - steps: - - name: required check - id: shouldRun - uses: actions/github-script@v7.0.1 - env: - # fun fact, this changes based on incoming event, it will be different when we run this on pushes to main - RELEVANT_SHA: ${{ github.event.pull_request.head.sha || github.sha }} - with: - script: | - const { RELEVANT_SHA } = process.env; - const { data: { statuses } } = await github.rest.repos.getCombinedStatusForRef({ - owner: context.repo.owner, - repo: context.repo.repo, - ref: RELEVANT_SHA, - }); - - - const refTested = statuses && statuses.filter(({ context }) => context === 'reference-tests'); - const alreadyRun = refTested && refTested.find(({ state }) => state === 'success') > 0; - const { data: reviews } = await github.rest.pulls.listReviews({ - owner: context.repo.owner, - repo: context.repo.repo, - pull_number: context.issue.number, - }); - const approvingReviews = reviews && reviews.filter(review => review.state === 'APPROVED'); - const shouldRun = !alreadyRun && github.actor != 'dependabot[bot]' && (approvingReviews.length > 0); - - console.log("tests should be run = %j", shouldRun); - console.log("alreadyRun = %j", alreadyRun); - console.log("approvingReviews = %j", approvingReviews.length); - - return shouldRun; - prepareReferenceTestEthereum: runs-on: ubuntu-22.04 needs: shouldRun - if: ${{ needs.shouldRun.outputs.shouldRun == 'true' }} steps: - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} submodules: recursive set-safe-directory: true - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: temurin java-version: 17 - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: execute generate reference tests run: ./gradlew ethereum:referencetests:blockchainReferenceTests ethereum:referencetests:generalstateReferenceTests ethereum:referencetests:generalstateRegressionReferenceTests -Dorg.gradle.parallel=true -Dorg.gradle.caching=true - name: store generated tests - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 with: name: 'reference-tests' path: 'ethereum/referencetests/build/generated/sources/reference-test/**/*.java' @@ -82,28 +41,28 @@ jobs: checks: write needs: - prepareReferenceTestEthereum - if: ${{ needs.shouldRun.outputs.shouldRun == 'true' }} strategy: fail-fast: true matrix: runner_index: [0,1,2,3,4,5] steps: - name: Checkout Repo - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 with: + ref: ${{ github.event.pull_request.head.sha || github.ref }} submodules: recursive - name: Set up Java - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: adopt-openj9 java-version: 17 - name: retrieve generated tests - uses: actions/download-artifact@v3.0.2 + uses: actions/download-artifact@9bc31d5ccc31df68ecc42ccf4149144866c47d8a with: name: 'reference-tests' path: 'ethereum/referencetests/build/generated/sources/reference-test/' - name: get reference test report - uses: dawidd6/action-download-artifact@v3 + uses: docker/login-action@e7466d1a7587ed14867642c2ca74b5bcc1e19a2d@v3 with: branch: main name_is_regexp: true @@ -111,10 +70,10 @@ jobs: path: tmp/ref-xml-reports-downloaded if_no_artifact_found: true - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: Split tests id: split-tests - uses: r7kamura/split-tests-by-timings@v0 + uses: r7kamura/split-tests-by-timings@9322bd292d9423e2bc5a65bec548901801341e3f with: reports: tmp/ref-xml-reports-downloaded glob: 'ethereum/referencetests/build/generated/sources/reference-test/**/*.java' @@ -125,13 +84,13 @@ jobs: - name: run reference tests run: ./gradlew ethereum:referenceTests:referenceTests `cat refTestArgs.txt` -Dorg.gradle.parallel=true -Dorg.gradle.caching=true - name: Upload Test Report - uses: actions/upload-artifact@v3 + uses: actions/upload-artifact@a8a3f3ad30e3422c9c7b888a15615d19a852ae32 if: always() # always run even if the previous step fails with: name: reference-test-node-${{matrix.runner_index}}-results path: '**/build/test-results/referenceTests/TEST-*.xml' - name: Publish Test Report - uses: mikepenz/action-junit-report@v4 + uses: mikepenz/action-junit-report@5f47764eec0e1c1f19f40c8e60a5ba47e47015c5 if: success() || failure() # always run even if the build step fails with: report_paths: '**/build/test-results/referenceTest/TEST-*.xml' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index e0c4f0bb298..54c28bea64a 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -7,8 +7,8 @@ jobs: dockerPromoteX64: runs-on: ubuntu-22.04 steps: - - uses: actions/checkout@v4.1.1 - - uses: actions/setup-java@v4.0.0 + - uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 + - uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: 'temurin' # See 'Supported distributions' for available options java-version: '17' @@ -16,7 +16,7 @@ jobs: - name: Login to DockerHub run: echo '${{ secrets.DOCKER_PASSWORD_RW }}' | docker login -u '${{ secrets.DOCKER_USER_RW }}' --password-stdin - name: Setup Gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: Docker upload run: ./gradlew "-Prelease.releaseVersion=${{ github.ref_name }}" "-PdockerOrgName=${{ secrets.DOCKER_ORG }}" dockerUploadRelease - name: Docker manifest diff --git a/.github/workflows/sonarcloud.yml b/.github/workflows/sonarcloud.yml index 7c4acee7e40..04bb0405b87 100644 --- a/.github/workflows/sonarcloud.yml +++ b/.github/workflows/sonarcloud.yml @@ -16,20 +16,20 @@ jobs: runs-on: ubuntu-latest steps: - name: checkout - uses: actions/checkout@v4.1.1 + uses: actions/checkout@b4ffde65f46336ab88eb53be808477a3936bae11 - name: Set up JDK 17 - uses: actions/setup-java@v4.0.0 + uses: actions/setup-java@387ac29b308b003ca37ba93a6cab5eb57c8f5f93 with: distribution: 'temurin' java-version: '17' - name: Cache SonarCloud packages - uses: actions/cache@v3 + uses: actions/cache@e12d46a63a90f2fae62d114769bbf2a179198b5c with: path: ~/.sonar/cache key: ${{ runner.os }}-sonar restore-keys: ${{ runner.os }}-sonar - name: setup gradle - uses: gradle/gradle-build-action@v2.12.0 + uses: gradle/gradle-build-action@a8f75513eafdebd8141bd1cd4e30fcd194af8dfa - name: Build and analyze env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} # Needed to get PR information, if any diff --git a/CHANGELOG.md b/CHANGELOG.md index f49917101ba..89d8ca54964 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -11,6 +11,7 @@ ### Deprecations +- `--Xsnapsync-synchronizer-flat-db-healing-enabled` is deprecated (always enabled). [#6499](https://github.com/hyperledger/besu/pull/6499) ### Additions and Improvements - Upgrade Prometheus and Opentelemetry dependencies [#6422](https://github.com/hyperledger/besu/pull/6422) diff --git a/besu/src/main/java/org/hyperledger/besu/cli/BesuCommand.java b/besu/src/main/java/org/hyperledger/besu/cli/BesuCommand.java index b24a8e1c2a1..b4dcefbc210 100644 --- a/besu/src/main/java/org/hyperledger/besu/cli/BesuCommand.java +++ b/besu/src/main/java/org/hyperledger/besu/cli/BesuCommand.java @@ -48,6 +48,7 @@ import org.hyperledger.besu.cli.error.BesuParameterExceptionHandler; import org.hyperledger.besu.cli.options.MiningOptions; import org.hyperledger.besu.cli.options.TransactionPoolOptions; +import org.hyperledger.besu.cli.options.stable.ApiConfigurationOptions; import org.hyperledger.besu.cli.options.stable.DataStorageOptions; import org.hyperledger.besu.cli.options.stable.EthstatsOptions; import org.hyperledger.besu.cli.options.stable.GraphQlOptions; @@ -107,7 +108,6 @@ import org.hyperledger.besu.enclave.EnclaveFactory; import org.hyperledger.besu.ethereum.GasLimitCalculator; import org.hyperledger.besu.ethereum.api.ApiConfiguration; -import org.hyperledger.besu.ethereum.api.ImmutableApiConfiguration; import org.hyperledger.besu.ethereum.api.graphql.GraphQLConfiguration; import org.hyperledger.besu.ethereum.api.jsonrpc.JsonRpcConfiguration; import org.hyperledger.besu.ethereum.api.jsonrpc.RpcApis; @@ -859,43 +859,9 @@ static class MetricsOptionGroup { description = "Path to PID file (optional)") private final Path pidPath = null; - @CommandLine.Option( - names = {"--api-gas-price-blocks"}, - description = "Number of blocks to consider for eth_gasPrice (default: ${DEFAULT-VALUE})") - private final Long apiGasPriceBlocks = 100L; - - @CommandLine.Option( - names = {"--api-gas-price-percentile"}, - description = "Percentile value to measure for eth_gasPrice (default: ${DEFAULT-VALUE})") - private final Double apiGasPricePercentile = 50.0; - - @CommandLine.Option( - names = {"--api-gas-price-max"}, - description = "Maximum gas price for eth_gasPrice (default: ${DEFAULT-VALUE})") - private final Long apiGasPriceMax = 500_000_000_000L; - - @CommandLine.Option( - names = {"--api-gas-and-priority-fee-limiting-enabled"}, - hidden = true, - description = - "Set to enable gas price and minimum priority fee limit in eth_getGasPrice and eth_feeHistory (default: ${DEFAULT-VALUE})") - private final Boolean apiGasAndPriorityFeeLimitingEnabled = false; - - @CommandLine.Option( - names = {"--api-gas-and-priority-fee-lower-bound-coefficient"}, - hidden = true, - description = - "Coefficient for setting the lower limit of gas price and minimum priority fee in eth_getGasPrice and eth_feeHistory (default: ${DEFAULT-VALUE})") - private final Long apiGasAndPriorityFeeLowerBoundCoefficient = - ApiConfiguration.DEFAULT_LOWER_BOUND_GAS_AND_PRIORITY_FEE_COEFFICIENT; - - @CommandLine.Option( - names = {"--api-gas-and-priority-fee-upper-bound-coefficient"}, - hidden = true, - description = - "Coefficient for setting the upper limit of gas price and minimum priority fee in eth_getGasPrice and eth_feeHistory (default: ${DEFAULT-VALUE})") - private final Long apiGasAndPriorityFeeUpperBoundCoefficient = - ApiConfiguration.DEFAULT_UPPER_BOUND_GAS_AND_PRIORITY_FEE_COEFFICIENT; + // API Configuration Option Group + @CommandLine.ArgGroup(validate = false, heading = "@|bold API Configuration Options|@%n") + ApiConfigurationOptions apiConfigurationOptions = new ApiConfigurationOptions(); @CommandLine.Option( names = {"--static-nodes-file"}, @@ -904,29 +870,11 @@ static class MetricsOptionGroup { "Specifies the static node file containing the static nodes for this node to connect to") private final Path staticNodesFile = null; - @CommandLine.Option( - names = {"--rpc-max-logs-range"}, - description = - "Specifies the maximum number of blocks to retrieve logs from via RPC. Must be >=0. 0 specifies no limit (default: ${DEFAULT-VALUE})") - private final Long rpcMaxLogsRange = 5000L; - - @CommandLine.Option( - names = {"--rpc-gas-cap"}, - description = - "Specifies the gasLimit cap for transaction simulation RPC methods. Must be >=0. 0 specifies no limit (default: ${DEFAULT-VALUE})") - private final Long rpcGasCap = 0L; - @CommandLine.Option( names = {"--cache-last-blocks"}, description = "Specifies the number of last blocks to cache (default: ${DEFAULT-VALUE})") private final Integer numberOfblocksToCache = 0; - @Option( - names = {"--rpc-max-trace-filter-range"}, - description = - "Specifies the maximum number of blocks for the trace_filter method. Must be >=0. 0 specifies no limit (default: $DEFAULT-VALUE)") - private final Long maxTraceFilterRange = 1000L; - @Mixin private P2PTLSConfigOptions p2pTLSConfigOptions; @Mixin private PkiBlockCreationOptions pkiBlockCreationOptions; @@ -1481,10 +1429,15 @@ private void validateOptions() { validateTransactionPoolOptions(); validateDataStorageOptions(); validateGraphQlOptions(); + validateApiOptions(); p2pTLSConfigOptions.checkP2PTLSOptionsDependencies(logger, commandLine); pkiBlockCreationOptions.checkPkiBlockCreationOptionsDependencies(logger, commandLine); } + private void validateApiOptions() { + apiConfigurationOptions.validate(commandLine, logger); + } + private void validateTransactionPoolOptions() { transactionPoolOptions.validate(commandLine, getActualGenesisConfigOptions()); } @@ -1568,17 +1521,6 @@ private void validateDnsOptionsParams() { } } - private void checkApiOptionsDependencies() { - CommandLineUtils.checkOptionDependencies( - logger, - commandLine, - "--api-gas-and-priority-fee-limiting-enabled", - !apiGasAndPriorityFeeLimitingEnabled, - asList( - "--api-gas-and-priority-fee-upper-bound-coefficient", - "--api-gas-and-priority-fee-lower-bound-coefficient")); - } - private void ensureValidPeerBoundParams() { maxPeers = p2PDiscoveryOptionGroup.maxPeers; peersLowerBound = unstableNetworkingOptions.toDomainObject().getPeerLowerBound(); @@ -1677,14 +1619,6 @@ && isOptionSet(commandLine, "--sync-min-peers")) { SyncMode.X_CHECKPOINT.equals(getDefaultSyncModeIfNotSet()), singletonList("--Xcheckpoint-post-merge-enabled")); - CommandLineUtils.failIfOptionDoesntMeetRequirement( - commandLine, - "--Xsnapsync-synchronizer-flat option can only be used when -Xsnapsync-synchronizer-flat-db-healing-enabled is true", - unstableSynchronizerOptions.isSnapsyncFlatDbHealingEnabled(), - asList( - "--Xsnapsync-synchronizer-flat-account-healed-count-per-request", - "--Xsnapsync-synchronizer-flat-slot-healed-count-per-request")); - if (!securityModuleName.equals(DEFAULT_SECURITY_MODULE) && nodePrivateKeyFileOption.getNodePrivateKeyFile() != null) { logger.warn( @@ -1739,7 +1673,7 @@ private void configure() throws Exception { unstableIpcOptions.isEnabled(), unstableIpcOptions.getIpcPath(), unstableIpcOptions.getRpcIpcApis()); - apiConfiguration = apiConfiguration(); + apiConfiguration = apiConfigurationOptions.apiConfiguration(getMiningParameters()); // hostsWhitelist is a hidden option. If it is specified, add the list to hostAllowlist if (!hostsWhitelist.isEmpty()) { // if allowlist == default values, remove the default values @@ -1917,32 +1851,6 @@ private void checkPrivacyTlsOptionsDependencies() { "--privacy-tls-known-enclave-file")); } - private ApiConfiguration apiConfiguration() { - checkApiOptionsDependencies(); - var builder = - ImmutableApiConfiguration.builder() - .gasPriceBlocks(apiGasPriceBlocks) - .gasPricePercentile(apiGasPricePercentile) - .gasPriceMinSupplier( - getMiningParameters().getMinTransactionGasPrice().getAsBigInteger()::longValueExact) - .gasPriceMax(apiGasPriceMax) - .maxLogsRange(rpcMaxLogsRange) - .gasCap(rpcGasCap) - .isGasAndPriorityFeeLimitingEnabled(apiGasAndPriorityFeeLimitingEnabled) - .maxTraceFilterRange(maxTraceFilterRange); - if (apiGasAndPriorityFeeLimitingEnabled) { - if (apiGasAndPriorityFeeLowerBoundCoefficient > apiGasAndPriorityFeeUpperBoundCoefficient) { - throw new ParameterException( - this.commandLine, - "--api-gas-and-priority-fee-lower-bound-coefficient cannot be greater than the value of --api-gas-and-priority-fee-upper-bound-coefficient"); - } - builder - .lowerBoundGasAndPriorityFeeCoefficient(apiGasAndPriorityFeeLowerBoundCoefficient) - .upperBoundGasAndPriorityFeeCoefficient(apiGasAndPriorityFeeUpperBoundCoefficient); - } - return builder.build(); - } - /** * Metrics Configuration for Besu * diff --git a/besu/src/main/java/org/hyperledger/besu/cli/config/ProfileName.java b/besu/src/main/java/org/hyperledger/besu/cli/config/ProfileName.java index 823dd9f142c..bc5aeea4c9d 100644 --- a/besu/src/main/java/org/hyperledger/besu/cli/config/ProfileName.java +++ b/besu/src/main/java/org/hyperledger/besu/cli/config/ProfileName.java @@ -14,8 +14,13 @@ */ package org.hyperledger.besu.cli.config; +import org.apache.commons.lang3.StringUtils; + /** Enum for profile names. Each profile corresponds to a configuration file. */ public enum ProfileName { + + /** The 'MINIMALIST_STAKER' profile */ + MINIMALIST_STAKER("profiles/minimalist-staker.toml"), /** The 'DEV' profile. Corresponds to the 'profiles/dev.toml' configuration file. */ DEV("profiles/dev.toml"); @@ -38,4 +43,9 @@ public enum ProfileName { public String getConfigFile() { return configFile; } + + @Override + public String toString() { + return StringUtils.capitalize(name().replaceAll("_", " ").toLowerCase()); + } } diff --git a/besu/src/main/java/org/hyperledger/besu/cli/options/stable/ApiConfigurationOptions.java b/besu/src/main/java/org/hyperledger/besu/cli/options/stable/ApiConfigurationOptions.java new file mode 100644 index 00000000000..269e1424f20 --- /dev/null +++ b/besu/src/main/java/org/hyperledger/besu/cli/options/stable/ApiConfigurationOptions.java @@ -0,0 +1,142 @@ +/* + * Copyright Hyperledger Besu Contributors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + */ +package org.hyperledger.besu.cli.options.stable; + +import static java.util.Arrays.asList; + +import org.hyperledger.besu.cli.util.CommandLineUtils; +import org.hyperledger.besu.ethereum.api.ApiConfiguration; +import org.hyperledger.besu.ethereum.api.ImmutableApiConfiguration; +import org.hyperledger.besu.ethereum.core.MiningParameters; + +import org.slf4j.Logger; +import picocli.CommandLine; + +/** + * Handles configuration options for the API in Besu, including gas price settings, RPC log range, + * and trace filter range. + */ +public class ApiConfigurationOptions { + + @CommandLine.Option( + names = {"--api-gas-price-blocks"}, + description = "Number of blocks to consider for eth_gasPrice (default: ${DEFAULT-VALUE})") + private final Long apiGasPriceBlocks = 100L; + + @CommandLine.Option( + names = {"--api-gas-price-percentile"}, + description = "Percentile value to measure for eth_gasPrice (default: ${DEFAULT-VALUE})") + private final Double apiGasPricePercentile = 50.0; + + @CommandLine.Option( + names = {"--api-gas-price-max"}, + description = "Maximum gas price for eth_gasPrice (default: ${DEFAULT-VALUE})") + private final Long apiGasPriceMax = 500_000_000_000L; + + @CommandLine.Option( + names = {"--api-gas-and-priority-fee-limiting-enabled"}, + hidden = true, + description = + "Set to enable gas price and minimum priority fee limit in eth_getGasPrice and eth_feeHistory (default: ${DEFAULT-VALUE})") + private final Boolean apiGasAndPriorityFeeLimitingEnabled = false; + + @CommandLine.Option( + names = {"--api-gas-and-priority-fee-lower-bound-coefficient"}, + hidden = true, + description = + "Coefficient for setting the lower limit of gas price and minimum priority fee in eth_getGasPrice and eth_feeHistory (default: ${DEFAULT-VALUE})") + private final Long apiGasAndPriorityFeeLowerBoundCoefficient = + ApiConfiguration.DEFAULT_LOWER_BOUND_GAS_AND_PRIORITY_FEE_COEFFICIENT; + + @CommandLine.Option( + names = {"--api-gas-and-priority-fee-upper-bound-coefficient"}, + hidden = true, + description = + "Coefficient for setting the upper limit of gas price and minimum priority fee in eth_getGasPrice and eth_feeHistory (default: ${DEFAULT-VALUE})") + private final Long apiGasAndPriorityFeeUpperBoundCoefficient = + ApiConfiguration.DEFAULT_UPPER_BOUND_GAS_AND_PRIORITY_FEE_COEFFICIENT; + + @CommandLine.Option( + names = {"--rpc-max-logs-range"}, + description = + "Specifies the maximum number of blocks to retrieve logs from via RPC. Must be >=0. 0 specifies no limit (default: ${DEFAULT-VALUE})") + private final Long rpcMaxLogsRange = 5000L; + + @CommandLine.Option( + names = {"--rpc-gas-cap"}, + description = + "Specifies the gasLimit cap for transaction simulation RPC methods. Must be >=0. 0 specifies no limit (default: ${DEFAULT-VALUE})") + private final Long rpcGasCap = 0L; + + @CommandLine.Option( + names = {"--rpc-max-trace-filter-range"}, + description = + "Specifies the maximum number of blocks for the trace_filter method. Must be >=0. 0 specifies no limit (default: $DEFAULT-VALUE)") + private final Long maxTraceFilterRange = 1000L; + + /** + * Validates the API options. + * + * @param commandLine CommandLine instance + * @param logger Logger instance + */ + public void validate(final CommandLine commandLine, final Logger logger) { + if (apiGasAndPriorityFeeLimitingEnabled) { + if (apiGasAndPriorityFeeLowerBoundCoefficient > apiGasAndPriorityFeeUpperBoundCoefficient) { + throw new CommandLine.ParameterException( + commandLine, + "--api-gas-and-priority-fee-lower-bound-coefficient cannot be greater than the value of --api-gas-and-priority-fee-upper-bound-coefficient"); + } + } + checkApiOptionsDependencies(commandLine, logger); + } + + private void checkApiOptionsDependencies(final CommandLine commandLine, final Logger logger) { + CommandLineUtils.checkOptionDependencies( + logger, + commandLine, + "--api-gas-and-priority-fee-limiting-enabled", + !apiGasAndPriorityFeeLimitingEnabled, + asList( + "--api-gas-and-priority-fee-upper-bound-coefficient", + "--api-gas-and-priority-fee-lower-bound-coefficient")); + } + + /** + * Creates an ApiConfiguration based on the provided options. + * + * @param miningParameters The mining parameters + * @return An ApiConfiguration instance + */ + public ApiConfiguration apiConfiguration(final MiningParameters miningParameters) { + var builder = + ImmutableApiConfiguration.builder() + .gasPriceBlocks(apiGasPriceBlocks) + .gasPricePercentile(apiGasPricePercentile) + .gasPriceMinSupplier( + miningParameters.getMinTransactionGasPrice().getAsBigInteger()::longValueExact) + .gasPriceMax(apiGasPriceMax) + .maxLogsRange(rpcMaxLogsRange) + .gasCap(rpcGasCap) + .isGasAndPriorityFeeLimitingEnabled(apiGasAndPriorityFeeLimitingEnabled) + .maxTraceFilterRange(maxTraceFilterRange); + if (apiGasAndPriorityFeeLimitingEnabled) { + builder + .lowerBoundGasAndPriorityFeeCoefficient(apiGasAndPriorityFeeLowerBoundCoefficient) + .upperBoundGasAndPriorityFeeCoefficient(apiGasAndPriorityFeeUpperBoundCoefficient); + } + return builder.build(); + } +} diff --git a/besu/src/main/java/org/hyperledger/besu/cli/options/stable/DataStorageOptions.java b/besu/src/main/java/org/hyperledger/besu/cli/options/stable/DataStorageOptions.java index a1f4b950bbb..72331394889 100644 --- a/besu/src/main/java/org/hyperledger/besu/cli/options/stable/DataStorageOptions.java +++ b/besu/src/main/java/org/hyperledger/besu/cli/options/stable/DataStorageOptions.java @@ -74,7 +74,7 @@ public static class Unstable { @CommandLine.Option( hidden = true, - names = {BONSAI_LIMIT_TRIE_LOGS_ENABLED}, + names = {BONSAI_LIMIT_TRIE_LOGS_ENABLED, "--Xbonsai-trie-log-pruning-enabled"}, description = "Limit the number of trie logs that are retained. (default: ${DEFAULT-VALUE})") private boolean bonsaiLimitTrieLogsEnabled = DEFAULT_BONSAI_LIMIT_TRIE_LOGS_ENABLED; diff --git a/besu/src/main/java/org/hyperledger/besu/cli/options/unstable/SynchronizerOptions.java b/besu/src/main/java/org/hyperledger/besu/cli/options/unstable/SynchronizerOptions.java index 8760258fba2..2825811e525 100644 --- a/besu/src/main/java/org/hyperledger/besu/cli/options/unstable/SynchronizerOptions.java +++ b/besu/src/main/java/org/hyperledger/besu/cli/options/unstable/SynchronizerOptions.java @@ -106,7 +106,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = DOWNLOADER_CHANGE_TARGET_THRESHOLD_BY_HEIGHT_FLAG, hidden = true, - defaultValue = "200", paramLabel = "", description = "Minimum height difference before switching fast sync download peers (default: ${DEFAULT-VALUE})") @@ -116,7 +115,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = DOWNLOADER_CHANGE_TARGET_THRESHOLD_BY_TD_FLAG, hidden = true, - defaultValue = "1000000000000000000", paramLabel = "", description = "Minimum total difficulty difference before switching fast sync download peers (default: ${DEFAULT-VALUE})") @@ -126,7 +124,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = DOWNLOADER_HEADER_REQUEST_SIZE_FLAG, hidden = true, - defaultValue = "200", paramLabel = "", description = "Number of headers to request per packet (default: ${DEFAULT-VALUE})") private int downloaderHeaderRequestSize = @@ -135,7 +132,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = DOWNLOADER_CHECKPOINT_TIMEOUTS_PERMITTED_FLAG, hidden = true, - defaultValue = "5", paramLabel = "", description = "Number of tries to attempt to download checkpoints before stopping (default: ${DEFAULT-VALUE})") @@ -145,7 +141,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = DOWNLOADER_CHAIN_SEGMENT_SIZE_FLAG, hidden = true, - defaultValue = "200", paramLabel = "", description = "Distance between checkpoint headers (default: ${DEFAULT-VALUE})") private int downloaderChainSegmentSize = @@ -154,7 +149,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = DOWNLOADER_PARALLELISM_FLAG, hidden = true, - defaultValue = "4", paramLabel = "", description = "Number of threads to provide to chain downloader (default: ${DEFAULT-VALUE})") private int downloaderParallelism = SynchronizerConfiguration.DEFAULT_DOWNLOADER_PARALLELISM; @@ -162,7 +156,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = TRANSACTIONS_PARALLELISM_FLAG, hidden = true, - defaultValue = "2", paramLabel = "", description = "Number of threads to commit to transaction processing (default: ${DEFAULT-VALUE})") @@ -179,7 +172,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = PIVOT_DISTANCE_FROM_HEAD_FLAG, hidden = true, - defaultValue = "50", paramLabel = "", description = "Distance from initial chain head to fast sync target (default: ${DEFAULT-VALUE})") @@ -188,7 +180,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = FULL_VALIDATION_RATE_FLAG, hidden = true, - defaultValue = "0.1", paramLabel = "", description = "Fraction of headers fast sync will fully validate (default: ${DEFAULT-VALUE})") private float fastSyncFullValidationRate = SynchronizerConfiguration.DEFAULT_FULL_VALIDATION_RATE; @@ -196,7 +187,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = WORLD_STATE_HASH_COUNT_PER_REQUEST_FLAG, hidden = true, - defaultValue = "384", paramLabel = "", description = "Fast sync world state hashes queried per request (default: ${DEFAULT-VALUE})") private int worldStateHashCountPerRequest = @@ -205,7 +195,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = WORLD_STATE_REQUEST_PARALLELISM_FLAG, hidden = true, - defaultValue = "10", paramLabel = "", description = "Number of concurrent requests to use when downloading fast sync world state (default: ${DEFAULT-VALUE})") @@ -215,7 +204,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = WORLD_STATE_MAX_REQUESTS_WITHOUT_PROGRESS_FLAG, hidden = true, - defaultValue = "1000", paramLabel = "", description = "Number of world state requests accepted without progress before considering the download stalled (default: ${DEFAULT-VALUE})") @@ -225,7 +213,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = WORLD_STATE_MIN_MILLIS_BEFORE_STALLING_FLAG, hidden = true, - defaultValue = "300000", paramLabel = "", description = "Minimum time in ms without progress before considering a world state download as stalled (default: ${DEFAULT-VALUE})") @@ -235,7 +222,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = WORLD_STATE_TASK_CACHE_SIZE_FLAG, hidden = true, - defaultValue = "1000000", paramLabel = "", description = "The max number of pending node data requests cached in-memory during fast sync world state download. (default: ${DEFAULT-VALUE})") @@ -245,7 +231,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = SNAP_PIVOT_BLOCK_WINDOW_VALIDITY_FLAG, hidden = true, - defaultValue = "126", paramLabel = "", description = "The size of the pivot block window before having to change it (default: ${DEFAULT-VALUE})") @@ -255,7 +240,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = SNAP_PIVOT_BLOCK_DISTANCE_BEFORE_CACHING_FLAG, hidden = true, - defaultValue = "60", paramLabel = "", description = "The distance from the head before loading a pivot block into the cache to have a ready pivot block when the window is finished (default: ${DEFAULT-VALUE})") @@ -265,7 +249,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = SNAP_STORAGE_COUNT_PER_REQUEST_FLAG, hidden = true, - defaultValue = "384", paramLabel = "", description = "Snap sync storage queried per request (default: ${DEFAULT-VALUE})") private int snapsyncStorageCountPerRequest = @@ -274,7 +257,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = SNAP_BYTECODE_COUNT_PER_REQUEST_FLAG, hidden = true, - defaultValue = "84", paramLabel = "", description = "Snap sync bytecode queried per request (default: ${DEFAULT-VALUE})") private int snapsyncBytecodeCountPerRequest = @@ -283,7 +265,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = SNAP_TRIENODE_COUNT_PER_REQUEST_FLAG, hidden = true, - defaultValue = "384", paramLabel = "", description = "Snap sync trie node queried per request (default: ${DEFAULT-VALUE})") private int snapsyncTrieNodeCountPerRequest = @@ -292,7 +273,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = SNAP_FLAT_ACCOUNT_HEALED_COUNT_PER_REQUEST_FLAG, hidden = true, - defaultValue = "128", paramLabel = "", description = "Snap sync flat accounts verified and healed per request (default: ${DEFAULT-VALUE})") @@ -302,7 +282,6 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = SNAP_FLAT_STORAGE_HEALED_COUNT_PER_REQUEST_FLAG, hidden = true, - defaultValue = "1024", paramLabel = "", description = "Snap sync flat slots verified and healed per request (default: ${DEFAULT-VALUE})") @@ -312,9 +291,9 @@ public void parseBlockPropagationRange(final String arg) { @CommandLine.Option( names = SNAP_FLAT_DB_HEALING_ENABLED_FLAG, hidden = true, - defaultValue = "false", paramLabel = "", - description = "Snap sync flat db healing enabled (default: ${DEFAULT-VALUE})") + description = + "(Deprecated) Always enabled: Snap sync flat db healing enabled (default: ${DEFAULT-VALUE})") private Boolean snapsyncFlatDbHealingEnabled = SnapSyncConfiguration.DEFAULT_IS_FLAT_DB_HEALING_ENABLED; @@ -336,15 +315,6 @@ public static SynchronizerOptions create() { return new SynchronizerOptions(); } - /** - * Flag to know whether the flat db healing feature is enabled or disabled. - * - * @return true is the flat db healing is enabled - */ - public boolean isSnapsyncFlatDbHealingEnabled() { - return snapsyncFlatDbHealingEnabled; - } - /** * Create synchronizer options from Synchronizer Configuration. * @@ -471,15 +441,11 @@ public List getCLIOptions() { SNAP_BYTECODE_COUNT_PER_REQUEST_FLAG, OptionParser.format(snapsyncBytecodeCountPerRequest), SNAP_TRIENODE_COUNT_PER_REQUEST_FLAG, - OptionParser.format(snapsyncTrieNodeCountPerRequest)); - if (isSnapsyncFlatDbHealingEnabled()) { - value.addAll( - Arrays.asList( - SNAP_FLAT_ACCOUNT_HEALED_COUNT_PER_REQUEST_FLAG, - OptionParser.format(snapsyncFlatAccountHealedCountPerRequest), - SNAP_FLAT_STORAGE_HEALED_COUNT_PER_REQUEST_FLAG, - OptionParser.format(snapsyncFlatStorageHealedCountPerRequest))); - } + OptionParser.format(snapsyncTrieNodeCountPerRequest), + SNAP_FLAT_ACCOUNT_HEALED_COUNT_PER_REQUEST_FLAG, + OptionParser.format(snapsyncFlatAccountHealedCountPerRequest), + SNAP_FLAT_STORAGE_HEALED_COUNT_PER_REQUEST_FLAG, + OptionParser.format(snapsyncFlatStorageHealedCountPerRequest)); return value; } } diff --git a/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbUsageHelper.java b/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbHelper.java similarity index 72% rename from besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbUsageHelper.java rename to besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbHelper.java index 4a11abcdf02..7f2fc609a1b 100644 --- a/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbUsageHelper.java +++ b/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbHelper.java @@ -18,17 +18,53 @@ import org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier; import java.io.PrintWriter; +import java.util.ArrayList; +import java.util.List; +import java.util.function.BiConsumer; import org.bouncycastle.util.Arrays; +import org.rocksdb.ColumnFamilyDescriptor; import org.rocksdb.ColumnFamilyHandle; +import org.rocksdb.Options; import org.rocksdb.RocksDB; import org.rocksdb.RocksDBException; import org.slf4j.Logger; import org.slf4j.LoggerFactory; -/** RocksDB Usage subcommand helper methods for formatting and printing. */ -public class RocksDbUsageHelper { - private static final Logger LOG = LoggerFactory.getLogger(RocksDbUsageHelper.class); +/** RocksDB subcommand helper methods. */ +public class RocksDbHelper { + private static final Logger LOG = LoggerFactory.getLogger(RocksDbHelper.class); + + static void forEachColumnFamily( + final String dbPath, final BiConsumer task) { + RocksDB.loadLibrary(); + Options options = new Options(); + options.setCreateIfMissing(true); + + // Open the RocksDB database with multiple column families + List cfNames; + try { + cfNames = RocksDB.listColumnFamilies(options, dbPath); + } catch (RocksDBException e) { + throw new RuntimeException(e); + } + final List cfHandles = new ArrayList<>(); + final List cfDescriptors = new ArrayList<>(); + for (byte[] cfName : cfNames) { + cfDescriptors.add(new ColumnFamilyDescriptor(cfName)); + } + try (final RocksDB rocksdb = RocksDB.openReadOnly(dbPath, cfDescriptors, cfHandles)) { + for (ColumnFamilyHandle cfHandle : cfHandles) { + task.accept(rocksdb, cfHandle); + } + } catch (RocksDBException e) { + throw new RuntimeException(e); + } finally { + for (ColumnFamilyHandle cfHandle : cfHandles) { + cfHandle.close(); + } + } + } static void printUsageForColumnFamily( final RocksDB rocksdb, final ColumnFamilyHandle cfHandle, final PrintWriter out) @@ -62,7 +98,7 @@ static void printUsageForColumnFamily( } } - private static String formatOutputSize(final long size) { + static String formatOutputSize(final long size) { if (size > (1024 * 1024 * 1024)) { long sizeInGiB = size / (1024 * 1024 * 1024); return sizeInGiB + " GiB"; diff --git a/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbSubCommand.java b/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbSubCommand.java index 461175a2282..0beddfdcbca 100644 --- a/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbSubCommand.java +++ b/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/RocksDbSubCommand.java @@ -19,13 +19,7 @@ import org.hyperledger.besu.cli.util.VersionProvider; import java.io.PrintWriter; -import java.util.ArrayList; -import java.util.List; -import org.rocksdb.ColumnFamilyDescriptor; -import org.rocksdb.ColumnFamilyHandle; -import org.rocksdb.Options; -import org.rocksdb.RocksDB; import org.rocksdb.RocksDBException; import picocli.CommandLine; import picocli.CommandLine.Command; @@ -82,34 +76,17 @@ public void run() { .concat("/") .concat(DATABASE_PATH); - RocksDB.loadLibrary(); - Options options = new Options(); - options.setCreateIfMissing(true); - - // Open the RocksDB database with multiple column families - List cfNames; - try { - cfNames = RocksDB.listColumnFamilies(options, dbPath); - } catch (RocksDBException e) { - throw new RuntimeException(e); - } - final List cfHandles = new ArrayList<>(); - final List cfDescriptors = new ArrayList<>(); - for (byte[] cfName : cfNames) { - cfDescriptors.add(new ColumnFamilyDescriptor(cfName)); - } - RocksDbUsageHelper.printTableHeader(out); - try (final RocksDB rocksdb = RocksDB.openReadOnly(dbPath, cfDescriptors, cfHandles)) { - for (ColumnFamilyHandle cfHandle : cfHandles) { - RocksDbUsageHelper.printUsageForColumnFamily(rocksdb, cfHandle, out); - } - } catch (RocksDBException e) { - throw new RuntimeException(e); - } finally { - for (ColumnFamilyHandle cfHandle : cfHandles) { - cfHandle.close(); - } - } + RocksDbHelper.printTableHeader(out); + + RocksDbHelper.forEachColumnFamily( + dbPath, + (rocksdb, cfHandle) -> { + try { + RocksDbHelper.printUsageForColumnFamily(rocksdb, cfHandle, out); + } catch (RocksDBException e) { + throw new RuntimeException(e); + } + }); } } } diff --git a/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogHelper.java b/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogHelper.java index 22efd97c86d..364e45fe8e7 100644 --- a/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogHelper.java +++ b/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogHelper.java @@ -60,11 +60,12 @@ public class TrieLogHelper { private static final int ROCKSDB_MAX_INSERTS_PER_TRANSACTION = 1000; private static final Logger LOG = LoggerFactory.getLogger(TrieLogHelper.class); - void prune( + boolean prune( final DataStorageConfiguration config, final BonsaiWorldStateKeyValueStorage rootWorldStateStorage, final MutableBlockchain blockchain, final Path dataDirectoryPath) { + final String batchFileNameBase = dataDirectoryPath.resolve(DATABASE_PATH).resolve(TRIE_LOG_FILE).toString(); @@ -82,10 +83,14 @@ void prune( lastBlockNumberToRetainTrieLogsFor, rootWorldStateStorage, layersToRetain)) { - return; + return false; } final long numberOfBatches = calculateNumberOfBatches(layersToRetain); + LOG.info( + "Starting pruning: retain {} trie logs, processing in {} batches...", + layersToRetain, + numberOfBatches); processTrieLogBatches( rootWorldStateStorage, @@ -102,7 +107,7 @@ void prune( .count(); if (countAfterPrune == layersToRetain) { if (deleteFiles(batchFileNameBase, numberOfBatches)) { - LOG.info("Prune ran successfully. Enjoy some disk space back! \uD83D\uDE80"); + return true; } else { throw new IllegalStateException( "There was an error deleting the trie log backup files. Please ensure besu is working before deleting them manually."); @@ -110,8 +115,11 @@ void prune( } else { throw new IllegalStateException( String.format( - "Remaining trie logs (%d) did not match %s (%d). Trie logs backup files have not been deleted, it is safe to rerun the subcommand.", - countAfterPrune, BONSAI_STORAGE_FORMAT_MAX_LAYERS_TO_LOAD, layersToRetain)); + "Remaining trie logs (%d) did not match %s (%d). Trie logs backup files (in %s) have not been deleted, it is safe to rerun the subcommand.", + countAfterPrune, + BONSAI_STORAGE_FORMAT_MAX_LAYERS_TO_LOAD, + layersToRetain, + batchFileNameBase)); } } @@ -131,7 +139,7 @@ private void processTrieLogBatches( final List trieLogKeys = getTrieLogKeysForBlocks(blockchain, firstBlockOfBatch, lastBlockOfBatch); - LOG.info("Saving trie logs to retain in file (batch {})...", batchNumber); + LOG.info("Saving trie logs to retain in file {} (batch {})...", batchFileName, batchNumber); saveTrieLogBatches(batchFileName, rootWorldStateStorage, trieLogKeys); } @@ -319,7 +327,7 @@ private void saveTrieLogsInFile( File file = new File(batchFileName); if (file.exists()) { - LOG.error("File already exists, skipping file creation"); + LOG.warn("File already exists {}, skipping file creation", batchFileName); return; } @@ -354,7 +362,7 @@ private void saveTrieLogsAsRlpInFile( final String batchFileName) { File file = new File(batchFileName); if (file.exists()) { - LOG.error("File already exists, skipping file creation"); + LOG.warn("File already exists {}, skipping file creation", batchFileName); return; } diff --git a/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogSubCommand.java b/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogSubCommand.java index a4a38737f91..548d44dfa81 100644 --- a/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogSubCommand.java +++ b/besu/src/main/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogSubCommand.java @@ -16,6 +16,9 @@ import static com.google.common.base.Preconditions.checkArgument; import static com.google.common.base.Preconditions.checkNotNull; +import static org.hyperledger.besu.cli.subcommands.storage.RocksDbHelper.formatOutputSize; +import static org.hyperledger.besu.controller.BesuController.DATABASE_PATH; +import static org.hyperledger.besu.ethereum.storage.keyvalue.KeyValueSegmentIdentifier.TRIE_LOG_STORAGE; import org.hyperledger.besu.cli.util.VersionProvider; import org.hyperledger.besu.controller.BesuController; @@ -31,10 +34,14 @@ import java.io.PrintWriter; import java.nio.file.Path; import java.nio.file.Paths; +import java.util.Arrays; import java.util.List; +import java.util.concurrent.atomic.AtomicLong; import org.apache.logging.log4j.Level; import org.apache.logging.log4j.core.config.Configurator; +import org.rocksdb.RocksDBException; +import org.slf4j.Logger; import org.slf4j.LoggerFactory; import picocli.CommandLine; import picocli.CommandLine.Command; @@ -54,6 +61,8 @@ }) public class TrieLogSubCommand implements Runnable { + private static final Logger LOG = LoggerFactory.getLogger(TrieLogSubCommand.class); + @SuppressWarnings("UnusedVariable") @ParentCommand private static StorageSubCommand parentCommand; @@ -124,12 +133,67 @@ public void run() { final Path dataDirectoryPath = Paths.get( TrieLogSubCommand.parentCommand.parentCommand.dataDir().toAbsolutePath().toString()); + + LOG.info("Estimating trie logs size before pruning..."); + long sizeBefore = estimatedSizeOfTrieLogs(); + LOG.info("Estimated trie logs size before pruning: {}", formatOutputSize(sizeBefore)); + final TrieLogHelper trieLogHelper = new TrieLogHelper(); - trieLogHelper.prune( - context.config(), - context.rootWorldStateStorage(), - context.blockchain(), - dataDirectoryPath); + boolean success = + trieLogHelper.prune( + context.config(), + context.rootWorldStateStorage(), + context.blockchain(), + dataDirectoryPath); + + if (success) { + LOG.info("Finished pruning. Re-estimating trie logs size..."); + final long sizeAfter = estimatedSizeOfTrieLogs(); + LOG.info( + "Estimated trie logs size after pruning: {} (0 B estimate is normal when using default settings)", + formatOutputSize(sizeAfter)); + long estimatedSaving = sizeBefore - sizeAfter; + LOG.info( + "Prune ran successfully. We estimate you freed up {}! \uD83D\uDE80", + formatOutputSize(estimatedSaving)); + spec.commandLine() + .getOut() + .printf( + "Prune ran successfully. We estimate you freed up %s! \uD83D\uDE80\n", + formatOutputSize(estimatedSaving)); + } + } + + private long estimatedSizeOfTrieLogs() { + final String dbPath = + TrieLogSubCommand.parentCommand + .parentCommand + .dataDir() + .toString() + .concat("/") + .concat(DATABASE_PATH); + + AtomicLong estimatedSaving = new AtomicLong(0L); + try { + RocksDbHelper.forEachColumnFamily( + dbPath, + (rocksdb, cfHandle) -> { + try { + if (Arrays.equals(cfHandle.getName(), TRIE_LOG_STORAGE.getId())) { + estimatedSaving.set( + Long.parseLong( + rocksdb.getProperty(cfHandle, "rocksdb.estimate-live-data-size"))); + } + } catch (RocksDBException | NumberFormatException e) { + throw new RuntimeException(e); + } + }); + } catch (Exception e) { + LOG.warn("Error while estimating trie log size, returning 0 for estimate", e); + return 0L; + } + + return estimatedSaving.get(); } } diff --git a/besu/src/test/java/org/hyperledger/besu/cli/BesuCommandTest.java b/besu/src/test/java/org/hyperledger/besu/cli/BesuCommandTest.java index b51dd5dc778..5d40ba00cc0 100644 --- a/besu/src/test/java/org/hyperledger/besu/cli/BesuCommandTest.java +++ b/besu/src/test/java/org/hyperledger/besu/cli/BesuCommandTest.java @@ -1039,109 +1039,6 @@ public void maxpeersOptionMustBeUsed() { assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); } - @Test - public void rpcMaxLogsRangeOptionMustBeUsed() { - - final long rpcMaxLogsRange = 150L; - parseCommand("--rpc-max-logs-range", Long.toString(rpcMaxLogsRange)); - - verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture()); - verify(mockRunnerBuilder).build(); - - assertThat(apiConfigurationCaptor.getValue()) - .isEqualTo(ImmutableApiConfiguration.builder().maxLogsRange((rpcMaxLogsRange)).build()); - - assertThat(commandOutput.toString(UTF_8)).isEmpty(); - assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); - } - - @Test - public void rpcGasCapOptionMustBeUsed() { - final long rpcGasCap = 150L; - parseCommand("--rpc-gas-cap", Long.toString(rpcGasCap)); - - verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture()); - verify(mockRunnerBuilder).build(); - - assertThat(apiConfigurationCaptor.getValue()) - .isEqualTo(ImmutableApiConfiguration.builder().gasCap((rpcGasCap)).build()); - - assertThat(commandOutput.toString(UTF_8)).isEmpty(); - assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); - } - - @Test - public void apiPriorityFeeLimitingEnabledOptionMustBeUsed() { - parseCommand("--api-gas-and-priority-fee-limiting-enabled"); - verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture()); - verify(mockRunnerBuilder).build(); - assertThat(apiConfigurationCaptor.getValue()) - .isEqualTo( - ImmutableApiConfiguration.builder().isGasAndPriorityFeeLimitingEnabled(true).build()); - - assertThat(commandOutput.toString(UTF_8)).isEmpty(); - assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); - } - - @Test - public void apiPriorityFeeLowerBoundCoefficientOptionMustBeUsed() { - final long lowerBound = 150L; - parseCommand( - "--api-gas-and-priority-fee-lower-bound-coefficient", - Long.toString(lowerBound), - "--api-gas-and-priority-fee-limiting-enabled"); - verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture()); - verify(mockRunnerBuilder).build(); - assertThat(apiConfigurationCaptor.getValue()) - .isEqualTo( - ImmutableApiConfiguration.builder() - .lowerBoundGasAndPriorityFeeCoefficient(lowerBound) - .isGasAndPriorityFeeLimitingEnabled(true) - .build()); - - assertThat(commandOutput.toString(UTF_8)).isEmpty(); - assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); - } - - @Test - public void - apiPriorityFeeLowerBoundCoefficients_MustNotBeGreaterThan_apiPriorityFeeUpperBoundCoefficient() { - final long lowerBound = 200L; - final long upperBound = 100L; - - parseCommand( - "--api-gas-and-priority-fee-limiting-enabled", - "--api-gas-and-priority-fee-lower-bound-coefficient", - Long.toString(lowerBound), - "--api-gas-and-priority-fee-upper-bound-coefficient", - Long.toString(upperBound)); - Mockito.verifyNoInteractions(mockRunnerBuilder); - assertThat(commandOutput.toString(UTF_8)).isEmpty(); - assertThat(commandErrorOutput.toString(UTF_8)) - .contains( - "--api-gas-and-priority-fee-lower-bound-coefficient cannot be greater than the value of --api-gas-and-priority-fee-upper-bound-coefficient"); - } - - @Test - public void apiPriorityFeeUpperBoundCoefficientsOptionMustBeUsed() { - final long upperBound = 200L; - parseCommand( - "--api-gas-and-priority-fee-upper-bound-coefficient", - Long.toString(upperBound), - "--api-gas-and-priority-fee-limiting-enabled"); - verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture()); - verify(mockRunnerBuilder).build(); - assertThat(apiConfigurationCaptor.getValue()) - .isEqualTo( - ImmutableApiConfiguration.builder() - .upperBoundGasAndPriorityFeeCoefficient(upperBound) - .isGasAndPriorityFeeLimitingEnabled(true) - .build()); - - assertThat(commandOutput.toString(UTF_8)).isEmpty(); - assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); - } - @Test public void p2pPeerUpperBound_without_p2pPeerLowerBound_shouldSetLowerBoundEqualToUpperBound() { @@ -3588,29 +3485,12 @@ public void txpoolTxPoolMinGasPriceMustNotBeGreaterThanMinGasPriceZero() { } @Test - public void snapsyncHealingOptionShouldBeDisabledByDefault() { - final TestBesuCommand besuCommand = parseCommand(); - assertThat(besuCommand.unstableSynchronizerOptions.isSnapsyncFlatDbHealingEnabled()).isFalse(); - } - - @Test - public void snapsyncHealingOptionShouldWork() { - final TestBesuCommand besuCommand = - parseCommand("--Xsnapsync-synchronizer-flat-db-healing-enabled", "true"); - assertThat(besuCommand.unstableSynchronizerOptions.isSnapsyncFlatDbHealingEnabled()).isTrue(); - } - - @Test - public void snapsyncForHealingFeaturesShouldFailWhenHealingIsNotEnabled() { + public void snapsyncForHealingFeaturesShouldFailWhenHealingIsNotSet_EnabledByDefault() { parseCommand("--Xsnapsync-synchronizer-flat-account-healed-count-per-request", "100"); - assertThat(commandErrorOutput.toString(UTF_8)) - .contains( - "--Xsnapsync-synchronizer-flat option can only be used when -Xsnapsync-synchronizer-flat-db-healing-enabled is true"); + assertThat(commandOutput.toString(UTF_8)).isEmpty(); parseCommand("--Xsnapsync-synchronizer-flat-slot-healed-count-per-request", "100"); - assertThat(commandErrorOutput.toString(UTF_8)) - .contains( - "--Xsnapsync-synchronizer-flat option can only be used when -Xsnapsync-synchronizer-flat-db-healing-enabled is true"); + assertThat(commandOutput.toString(UTF_8)).isEmpty(); } @Test diff --git a/besu/src/test/java/org/hyperledger/besu/cli/options/ApiConfigurationOptionsTest.java b/besu/src/test/java/org/hyperledger/besu/cli/options/ApiConfigurationOptionsTest.java new file mode 100644 index 00000000000..c12a718cb0f --- /dev/null +++ b/besu/src/test/java/org/hyperledger/besu/cli/options/ApiConfigurationOptionsTest.java @@ -0,0 +1,152 @@ +/* + * Copyright Hyperledger Besu Contributors. + * + * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with + * the License. You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on + * an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the + * specific language governing permissions and limitations under the License. + * + * SPDX-License-Identifier: Apache-2.0 + */ +package org.hyperledger.besu.cli.options; + +import static java.nio.charset.StandardCharsets.UTF_8; +import static org.assertj.core.api.Assertions.assertThat; +import static org.mockito.Mockito.verify; + +import org.hyperledger.besu.cli.CommandTestAbstract; +import org.hyperledger.besu.ethereum.api.ImmutableApiConfiguration; + +import org.junit.jupiter.api.Test; +import org.junit.jupiter.api.extension.ExtendWith; +import org.mockito.Mockito; +import org.mockito.junit.jupiter.MockitoExtension; + +@ExtendWith(MockitoExtension.class) +public class ApiConfigurationOptionsTest extends CommandTestAbstract { + + @Test + public void apiPriorityFeeLimitingEnabledOptionMustBeUsed() { + parseCommand("--api-gas-and-priority-fee-limiting-enabled"); + verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture()); + verify(mockRunnerBuilder).build(); + assertThat(apiConfigurationCaptor.getValue()) + .isEqualTo( + ImmutableApiConfiguration.builder().isGasAndPriorityFeeLimitingEnabled(true).build()); + + assertThat(commandOutput.toString(UTF_8)).isEmpty(); + assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); + } + + @Test + public void apiPriorityFeeLowerBoundCoefficientOptionMustBeUsed() { + final long lowerBound = 150L; + parseCommand( + "--api-gas-and-priority-fee-lower-bound-coefficient", + Long.toString(lowerBound), + "--api-gas-and-priority-fee-limiting-enabled"); + verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture()); + verify(mockRunnerBuilder).build(); + assertThat(apiConfigurationCaptor.getValue()) + .isEqualTo( + ImmutableApiConfiguration.builder() + .lowerBoundGasAndPriorityFeeCoefficient(lowerBound) + .isGasAndPriorityFeeLimitingEnabled(true) + .build()); + + assertThat(commandOutput.toString(UTF_8)).isEmpty(); + assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); + } + + @Test + public void + apiPriorityFeeLowerBoundCoefficients_MustNotBeGreaterThan_apiPriorityFeeUpperBoundCoefficient() { + final long lowerBound = 200L; + final long upperBound = 100L; + + parseCommand( + "--api-gas-and-priority-fee-limiting-enabled", + "--api-gas-and-priority-fee-lower-bound-coefficient", + Long.toString(lowerBound), + "--api-gas-and-priority-fee-upper-bound-coefficient", + Long.toString(upperBound)); + Mockito.verifyNoInteractions(mockRunnerBuilder); + assertThat(commandOutput.toString(UTF_8)).isEmpty(); + assertThat(commandErrorOutput.toString(UTF_8)) + .contains( + "--api-gas-and-priority-fee-lower-bound-coefficient cannot be greater than the value of --api-gas-and-priority-fee-upper-bound-coefficient"); + } + + @Test + public void apiPriorityFeeUpperBoundCoefficientsOptionMustBeUsed() { + final long upperBound = 200L; + parseCommand( + "--api-gas-and-priority-fee-upper-bound-coefficient", + Long.toString(upperBound), + "--api-gas-and-priority-fee-limiting-enabled"); + verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture()); + verify(mockRunnerBuilder).build(); + assertThat(apiConfigurationCaptor.getValue()) + .isEqualTo( + ImmutableApiConfiguration.builder() + .upperBoundGasAndPriorityFeeCoefficient(upperBound) + .isGasAndPriorityFeeLimitingEnabled(true) + .build()); + + assertThat(commandOutput.toString(UTF_8)).isEmpty(); + assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); + } + + @Test + public void rpcMaxLogsRangeOptionMustBeUsed() { + + final long rpcMaxLogsRange = 150L; + parseCommand("--rpc-max-logs-range", Long.toString(rpcMaxLogsRange)); + + verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture()); + verify(mockRunnerBuilder).build(); + + assertThat(apiConfigurationCaptor.getValue()) + .isEqualTo(ImmutableApiConfiguration.builder().maxLogsRange((rpcMaxLogsRange)).build()); + + assertThat(commandOutput.toString(UTF_8)).isEmpty(); + assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); + } + + @Test + public void rpcGasCapOptionMustBeUsed() { + final long rpcGasCap = 150L; + parseCommand("--rpc-gas-cap", Long.toString(rpcGasCap)); + + verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture()); + verify(mockRunnerBuilder).build(); + + assertThat(apiConfigurationCaptor.getValue()) + .isEqualTo(ImmutableApiConfiguration.builder().gasCap((rpcGasCap)).build()); + + assertThat(commandOutput.toString(UTF_8)).isEmpty(); + assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); + } + + @Test + public void rpcMaxTraceFilterOptionMustBeUsed() { + final long rpcMaxTraceFilterOption = 150L; + parseCommand("--rpc-max-trace-filter-range", Long.toString(rpcMaxTraceFilterOption)); + + verify(mockRunnerBuilder).apiConfiguration(apiConfigurationCaptor.capture()); + verify(mockRunnerBuilder).build(); + + assertThat(apiConfigurationCaptor.getValue()) + .isEqualTo( + ImmutableApiConfiguration.builder() + .maxTraceFilterRange((rpcMaxTraceFilterOption)) + .build()); + + assertThat(commandOutput.toString(UTF_8)).isEmpty(); + assertThat(commandErrorOutput.toString(UTF_8)).isEmpty(); + } +} diff --git a/besu/src/test/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogHelperTest.java b/besu/src/test/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogHelperTest.java index 22a7c37523d..88d68a8be91 100644 --- a/besu/src/test/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogHelperTest.java +++ b/besu/src/test/java/org/hyperledger/besu/cli/subcommands/storage/TrieLogHelperTest.java @@ -291,8 +291,8 @@ public void mismatchInPrunedTrieLogCountShouldNotDeleteFiles(final @TempDir Path nonValidatingTrieLogHelper.prune( dataStorageConfiguration, inMemoryWorldStateSpy, blockchain, dataDir)) .isInstanceOf(RuntimeException.class) - .hasMessage( - "Remaining trie logs (0) did not match --bonsai-historical-block-limit (3). Trie logs backup files have not been deleted, it is safe to rerun the subcommand."); + .hasMessageContaining( + "Remaining trie logs (0) did not match --bonsai-historical-block-limit (3)"); } @Test diff --git a/config/src/main/resources/profiles/minimalist-staker.toml b/config/src/main/resources/profiles/minimalist-staker.toml new file mode 100644 index 00000000000..f465b00bf43 --- /dev/null +++ b/config/src/main/resources/profiles/minimalist-staker.toml @@ -0,0 +1,4 @@ +sync-mode="X_CHECKPOINT" +data-storage-format="BONSAI" +bonsai-historical-block-limit=128 +max-peers=25 \ No newline at end of file diff --git a/ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/sync/snapsync/SnapSyncConfiguration.java b/ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/sync/snapsync/SnapSyncConfiguration.java index 6358590395b..d43fc9d5bd9 100644 --- a/ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/sync/snapsync/SnapSyncConfiguration.java +++ b/ethereum/eth/src/main/java/org/hyperledger/besu/ethereum/eth/sync/snapsync/SnapSyncConfiguration.java @@ -36,7 +36,7 @@ public class SnapSyncConfiguration { public static final int DEFAULT_LOCAL_FLAT_STORAGE_COUNT_TO_HEAL_PER_REQUEST = 1024; // The default number of flat slots entries to verify and heal per request. - public static final Boolean DEFAULT_IS_FLAT_DB_HEALING_ENABLED = Boolean.FALSE; + public static final Boolean DEFAULT_IS_FLAT_DB_HEALING_ENABLED = Boolean.TRUE; public static SnapSyncConfiguration getDefault() { return ImmutableSnapSyncConfiguration.builder().build();