diff --git a/.clang-format b/.clang-format index 828ec0ad86d2dc..4aad29c328abd4 100644 --- a/.clang-format +++ b/.clang-format @@ -9,11 +9,11 @@ AlignEscapedNewlines: Right AlignOperands: true AlignTrailingComments: true AllowAllParametersOfDeclarationOnNextLine: true -AllowShortBlocksOnASingleLine: Never +AllowShortBlocksOnASingleLine: false AllowShortCaseLabelsOnASingleLine: false AllowShortFunctionsOnASingleLine: Inline AllowShortIfStatementsOnASingleLine: true -AllowShortLoopsOnASingleLine: false +AllowShortLoopsOnASingleLine: true AlwaysBreakAfterDefinitionReturnType: None AlwaysBreakAfterReturnType: None AlwaysBreakBeforeMultilineStrings: false diff --git a/.configurations/configuration.dsc.yaml b/.configurations/configuration.dsc.yaml new file mode 100644 index 00000000000000..f428817fea4ddb --- /dev/null +++ b/.configurations/configuration.dsc.yaml @@ -0,0 +1,54 @@ +# yaml-language-server: $schema=https://aka.ms/configuration-dsc-schema/0.2 +# Reference: https://github.com/nodejs/node/blob/main/BUILDING.md#windows +properties: + resources: + - resource: Microsoft.WinGet.DSC/WinGetPackage + id: pythonPackage + directives: + description: Install Python 3.12 + module: Microsoft.WinGet.DSC + allowPrerelease: true + settings: + id: Python.Python.3.12 + source: winget + - resource: Microsoft.WinGet.DSC/WinGetPackage + id: vsPackage + directives: + description: Install Visual Studio 2022 Community + allowPrerelease: true + settings: + id: Microsoft.VisualStudio.2022.Community + source: winget + useLatest: true + - resource: Microsoft.VisualStudio.DSC/VSComponents + id: vsComponents + dependsOn: + - vsPackage + directives: + description: Install required VS workloads and components + allowPrerelease: true + settings: + productId: Microsoft.VisualStudio.Product.Community + channelId: VisualStudio.17.Release + includeRecommended: true + components: + - Microsoft.VisualStudio.Workload.NativeDesktop + - Microsoft.VisualStudio.Component.VC.Llvm.Clang + - Microsoft.VisualStudio.Component.VC.Llvm.ClangToolset + - resource: Microsoft.WinGet.DSC/WinGetPackage + id: gitPackage + directives: + description: Install Git + allowPrerelease: true + settings: + id: Git.Git + source: winget + - resource: Microsoft.WinGet.DSC/WinGetPackage + id: nasmPackage + directives: + description: Install NetWide Assembler (NASM) + allowPrerelease: true + settings: + id: Nasm.Nasm + source: winget + configurationVersion: 0.1.0 diff --git a/.configurations/configuration.vsEnterprise.dsc.yaml b/.configurations/configuration.vsEnterprise.dsc.yaml new file mode 100644 index 00000000000000..48ba6bb0354669 --- /dev/null +++ b/.configurations/configuration.vsEnterprise.dsc.yaml @@ -0,0 +1,54 @@ +# yaml-language-server: $schema=https://aka.ms/configuration-dsc-schema/0.2 +# Reference: https://github.com/nodejs/node/blob/main/BUILDING.md#windows +properties: + resources: + - resource: Microsoft.WinGet.DSC/WinGetPackage + id: pythonPackage + directives: + description: Install Python 3.12 + module: Microsoft.WinGet.DSC + allowPrerelease: true + settings: + id: Python.Python.3.12 + source: winget + - resource: Microsoft.WinGet.DSC/WinGetPackage + id: vsPackage + directives: + description: Install Visual Studio 2022 Enterprise + allowPrerelease: true + settings: + id: Microsoft.VisualStudio.2022.Enterprise + source: winget + useLatest: true + - resource: Microsoft.VisualStudio.DSC/VSComponents + id: vsComponents + dependsOn: + - vsPackage + directives: + description: Install required VS workloads and components + allowPrerelease: true + settings: + productId: Microsoft.VisualStudio.Product.Enterprise + channelId: VisualStudio.17.Release + includeRecommended: true + components: + - Microsoft.VisualStudio.Workload.NativeDesktop + - Microsoft.VisualStudio.Component.VC.Llvm.Clang + - Microsoft.VisualStudio.Component.VC.Llvm.ClangToolset + - resource: Microsoft.WinGet.DSC/WinGetPackage + id: gitPackage + directives: + description: Install Git + allowPrerelease: true + settings: + id: Git.Git + source: winget + - resource: Microsoft.WinGet.DSC/WinGetPackage + id: nasmPackage + directives: + description: Install NetWide Assembler (NASM) + allowPrerelease: true + settings: + id: Nasm.Nasm + source: winget + configurationVersion: 0.1.0 diff --git a/.configurations/configuration.vsProfessional.dsc.yaml b/.configurations/configuration.vsProfessional.dsc.yaml new file mode 100644 index 00000000000000..7c2c58bc0155e2 --- /dev/null +++ b/.configurations/configuration.vsProfessional.dsc.yaml @@ -0,0 +1,54 @@ +# yaml-language-server: $schema=https://aka.ms/configuration-dsc-schema/0.2 +# Reference: https://github.com/nodejs/node/blob/main/BUILDING.md#windows +properties: + resources: + - resource: Microsoft.WinGet.DSC/WinGetPackage + id: pythonPackage + directives: + description: Install Python 3.12 + module: Microsoft.WinGet.DSC + allowPrerelease: true + settings: + id: Python.Python.3.12 + source: winget + - resource: Microsoft.WinGet.DSC/WinGetPackage + id: vsPackage + directives: + description: Install Visual Studio 2022 Professional + allowPrerelease: true + settings: + id: Microsoft.VisualStudio.2022.Professional + source: winget + useLatest: true + - resource: Microsoft.VisualStudio.DSC/VSComponents + id: vsComponents + dependsOn: + - vsPackage + directives: + description: Install required VS workloads and components + allowPrerelease: true + settings: + productId: Microsoft.VisualStudio.Product.Professional + channelId: VisualStudio.17.Release + includeRecommended: true + components: + - Microsoft.VisualStudio.Workload.NativeDesktop + - Microsoft.VisualStudio.Component.VC.Llvm.Clang + - Microsoft.VisualStudio.Component.VC.Llvm.ClangToolset + - resource: Microsoft.WinGet.DSC/WinGetPackage + id: gitPackage + directives: + description: Install Git + allowPrerelease: true + settings: + id: Git.Git + source: winget + - resource: Microsoft.WinGet.DSC/WinGetPackage + id: nasmPackage + directives: + description: Install NetWide Assembler (NASM) + allowPrerelease: true + settings: + id: Nasm.Nasm + source: winget + configurationVersion: 0.1.0 diff --git a/.cpplint b/.cpplint index f6e10f03963405..5ea1e99e4be8a7 100644 --- a/.cpplint +++ b/.cpplint @@ -1,3 +1,3 @@ set noparent -filter=-build/include_alpha,-build/include_subdir,-build/include_what_you_use,-legal/copyright,-readability/nolint +filter=-build/include_alpha,-build/include_subdir,-build/include_what_you_use,-legal/copyright,-readability/nolint,-readability/braces linelength=80 diff --git a/.github/ISSUE_TEMPLATE/config.yml b/.github/ISSUE_TEMPLATE/config.yml index df5f12bb934f75..0ec17d486e7712 100644 --- a/.github/ISSUE_TEMPLATE/config.yml +++ b/.github/ISSUE_TEMPLATE/config.yml @@ -1,8 +1,14 @@ blank_issues_enabled: true contact_links: - - name: ⁉️ Need help with Node.js? + - name: ⁉️ Need general help with Node.js? url: https://github.com/nodejs/help about: Please file an issue in our help repo. + - name: 📦 Have an issue with npm? + url: https://github.com/npm/cli/issues + about: npm has a seperate issue tracker. + - name: 📡 Have an issue with undici? (`WebSocket`, `fetch`, etc.) + url: https://github.com/nodejs/undici/issues + about: Undici has a seperate issue tracker. - name: 🌐 Found a problem with nodejs.org beyond the API reference docs? url: https://github.com/nodejs/nodejs.org/issues/new/choose about: Please file an issue in the Node.js website repo. diff --git a/.github/workflows/auto-start-ci.yml b/.github/workflows/auto-start-ci.yml index 86a360d8c48476..f22a13b1abf188 100644 --- a/.github/workflows/auto-start-ci.yml +++ b/.github/workflows/auto-start-ci.yml @@ -45,12 +45,12 @@ jobs: if: needs.get-prs-for-ci.outputs.numbers != '' runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Install Node.js - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ env.NODE_VERSION }} diff --git a/.github/workflows/build-tarball.yml b/.github/workflows/build-tarball.yml index 0e4e54e2500f5e..3487b62d4c105a 100644 --- a/.github/workflows/build-tarball.yml +++ b/.github/workflows/build-tarball.yml @@ -42,7 +42,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -50,7 +50,7 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - name: Set up sccache - uses: mozilla-actions/sccache-action@89e9040de88b577a072e3760aaf59f585da083af # v0.0.5 + uses: mozilla-actions/sccache-action@9e326ebed976843c9932b3aa0e021c6f50310eb4 # v0.0.6 with: version: v0.8.1 - name: Environment Information @@ -72,7 +72,7 @@ jobs: needs: build-tarball runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -80,7 +80,7 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - name: Set up sccache - uses: mozilla-actions/sccache-action@89e9040de88b577a072e3760aaf59f585da083af # v0.0.5 + uses: mozilla-actions/sccache-action@9e326ebed976843c9932b3aa0e021c6f50310eb4 # v0.0.6 with: version: v0.8.1 - name: Environment Information diff --git a/.github/workflows/build-windows.yml b/.github/workflows/build-windows.yml deleted file mode 100644 index f8bc1d979a647f..00000000000000 --- a/.github/workflows/build-windows.yml +++ /dev/null @@ -1,53 +0,0 @@ -name: Build Windows - -on: - pull_request: - paths-ignore: - - README.md - - .github/** - - '!.github/workflows/build-windows.yml' - types: [opened, synchronize, reopened, ready_for_review] - push: - branches: - - main - - canary - - v[0-9]+.x-staging - - v[0-9]+.x - paths-ignore: - - README.md - - .github/** - - '!.github/workflows/build-windows.yml' - -concurrency: - group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} - cancel-in-progress: true - -env: - PYTHON_VERSION: '3.12' - FLAKY_TESTS: keep_retrying - -permissions: - contents: read - -jobs: - build-windows: - if: github.event.pull_request.draft == false - strategy: - matrix: - windows: [windows-2022] - fail-fast: false - runs-on: ${{ matrix.windows }} - steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 - with: - persist-credentials: false - - name: Set up Python ${{ env.PYTHON_VERSION }} - uses: actions/setup-python@f677139bbe7f9c59b41e40162b753c062f5d49a3 # v5.2.0 - with: - python-version: ${{ env.PYTHON_VERSION }} - - name: Install deps - run: choco install nasm - - name: Environment Information - run: npx envinfo - - name: Build - run: ./vcbuild.bat diff --git a/.github/workflows/commit-lint.yml b/.github/workflows/commit-lint.yml index 23a2d1392b3b16..0b39ccc287340f 100644 --- a/.github/workflows/commit-lint.yml +++ b/.github/workflows/commit-lint.yml @@ -17,13 +17,13 @@ jobs: run: | echo "plusOne=$((${{ github.event.pull_request.commits }} + 1))" >> $GITHUB_OUTPUT echo "minusOne=$((${{ github.event.pull_request.commits }} - 1))" >> $GITHUB_OUTPUT - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: fetch-depth: ${{ steps.nb-of-commits.outputs.plusOne }} persist-credentials: false - run: git reset HEAD^2 - name: Install Node.js - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ env.NODE_VERSION }} - name: Validate commit message diff --git a/.github/workflows/commit-queue.yml b/.github/workflows/commit-queue.yml index b30633ce0be854..8fc0a4a057fd7c 100644 --- a/.github/workflows/commit-queue.yml +++ b/.github/workflows/commit-queue.yml @@ -58,7 +58,7 @@ jobs: if: needs.get_mergeable_prs.outputs.numbers != '' runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: # Needs the whole git history for ncu to work # See https://github.com/nodejs/node-core-utils/pull/486 @@ -71,7 +71,7 @@ jobs: # Install dependencies - name: Install Node.js - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ env.NODE_VERSION }} - name: Install @node-core/utils diff --git a/.github/workflows/coverage-linux-without-intl.yml b/.github/workflows/coverage-linux-without-intl.yml index 49ba0662d62b6f..b2354a0b5d07a2 100644 --- a/.github/workflows/coverage-linux-without-intl.yml +++ b/.github/workflows/coverage-linux-without-intl.yml @@ -13,6 +13,7 @@ on: - tools/test.py - .github/workflows/coverage-linux-without-intl.yml - codecov.yml + - .nycrc push: branches: - main @@ -26,6 +27,7 @@ on: - tools/test.py - .github/workflows/coverage-linux-without-intl.yml - codecov.yml + - .nycrc concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} @@ -46,7 +48,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -54,7 +56,7 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - name: Set up sccache - uses: mozilla-actions/sccache-action@89e9040de88b577a072e3760aaf59f585da083af # v0.0.5 + uses: mozilla-actions/sccache-action@9e326ebed976843c9932b3aa0e021c6f50310eb4 # v0.0.6 with: version: v0.8.1 - name: Environment Information diff --git a/.github/workflows/coverage-linux.yml b/.github/workflows/coverage-linux.yml index 1a8e926dd82129..f9afc41e478fca 100644 --- a/.github/workflows/coverage-linux.yml +++ b/.github/workflows/coverage-linux.yml @@ -13,6 +13,7 @@ on: - tools/test.py - .github/workflows/coverage-linux.yml - codecov.yml + - .nycrc push: branches: - main @@ -26,6 +27,7 @@ on: - tools/test.py - .github/workflows/coverage-linux.yml - codecov.yml + - .nycrc concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} @@ -46,7 +48,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -54,7 +56,7 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - name: Set up sccache - uses: mozilla-actions/sccache-action@89e9040de88b577a072e3760aaf59f585da083af # v0.0.5 + uses: mozilla-actions/sccache-action@9e326ebed976843c9932b3aa0e021c6f50310eb4 # v0.0.6 with: version: v0.8.1 - name: Environment Information diff --git a/.github/workflows/coverage-windows.yml b/.github/workflows/coverage-windows.yml index b234e8554ee7c4..642cd18e10e8e6 100644 --- a/.github/workflows/coverage-windows.yml +++ b/.github/workflows/coverage-windows.yml @@ -13,6 +13,7 @@ on: - tools/test.py - .github/workflows/coverage-windows.yml - codecov.yml + - .nycrc push: branches: - main @@ -26,6 +27,7 @@ on: - tools/test.py - .github/workflows/coverage-windows.yml - codecov.yml + - .nycrc concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} @@ -43,7 +45,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: windows-2022 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/daily-wpt-fyi.yml b/.github/workflows/daily-wpt-fyi.yml index d68b27aee57307..0cea4b01619b30 100644 --- a/.github/workflows/daily-wpt-fyi.yml +++ b/.github/workflows/daily-wpt-fyi.yml @@ -45,7 +45,7 @@ jobs: run: echo "NIGHTLY=$(curl -s https://nodejs.org/download/nightly/index.json | jq -r '[.[] | select(.files[] | contains("linux-x64"))][0].version')" >> $GITHUB_ENV - name: Install Node.js id: setup-node - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ env.NIGHTLY || matrix.node-version }} check-latest: true @@ -57,7 +57,7 @@ jobs: SHORT_SHA=$(node -p 'process.version.split(/-nightly\d{8}/)[1]') echo "NIGHTLY_REF=$(gh api /repos/nodejs/node/commits/$SHORT_SHA --jq '.sha')" >> $GITHUB_ENV - name: Checkout ${{ steps.setup-node.outputs.node-version }} - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false ref: ${{ env.NIGHTLY_REF || steps.setup-node.outputs.node-version }} @@ -73,7 +73,7 @@ jobs: run: rm -rf wpt working-directory: test/fixtures - name: Checkout epochs/daily WPT - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: repository: web-platform-tests/wpt persist-credentials: false @@ -98,7 +98,7 @@ jobs: run: rm -rf deps/undici - name: Checkout undici if: ${{ env.WPT_REPORT != '' }} - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: repository: nodejs/undici persist-credentials: false diff --git a/.github/workflows/daily.yml b/.github/workflows/daily.yml index d020a1b98d0994..a94a3a52677165 100644 --- a/.github/workflows/daily.yml +++ b/.github/workflows/daily.yml @@ -15,11 +15,11 @@ jobs: build-lto: runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ env.NODE_VERSION }} - name: Environment Information diff --git a/.github/workflows/doc.yml b/.github/workflows/doc.yml index 16844a46def95f..b14a608163b10e 100644 --- a/.github/workflows/doc.yml +++ b/.github/workflows/doc.yml @@ -24,11 +24,11 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ env.NODE_VERSION }} - name: Environment Information diff --git a/.github/workflows/find-inactive-collaborators.yml b/.github/workflows/find-inactive-collaborators.yml index a5992fc8ad4f98..813b6c5ff3dcbd 100644 --- a/.github/workflows/find-inactive-collaborators.yml +++ b/.github/workflows/find-inactive-collaborators.yml @@ -19,13 +19,13 @@ jobs: runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: fetch-depth: 0 persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ env.NODE_VERSION }} diff --git a/.github/workflows/find-inactive-tsc.yml b/.github/workflows/find-inactive-tsc.yml index 4fe3f5d4cb17ac..ea35029a74c553 100644 --- a/.github/workflows/find-inactive-tsc.yml +++ b/.github/workflows/find-inactive-tsc.yml @@ -20,13 +20,13 @@ jobs: steps: - name: Checkout the repo - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: fetch-depth: 0 persist-credentials: false - name: Clone nodejs/TSC repository - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: fetch-depth: 0 path: .tmp @@ -34,7 +34,7 @@ jobs: repository: nodejs/TSC - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ env.NODE_VERSION }} diff --git a/.github/workflows/label-flaky-test-issue.yml b/.github/workflows/label-flaky-test-issue.yml index 82e2a10ab2b25c..3bca41c5e4d526 100644 --- a/.github/workflows/label-flaky-test-issue.yml +++ b/.github/workflows/label-flaky-test-issue.yml @@ -2,7 +2,7 @@ name: Label Flaky Test Issues on: issues: - types: [opened, labeled] + types: [labeled] permissions: contents: read diff --git a/.github/workflows/license-builder.yml b/.github/workflows/license-builder.yml index c1f46ed01ac8ac..6068e41b80e2f4 100644 --- a/.github/workflows/license-builder.yml +++ b/.github/workflows/license-builder.yml @@ -17,7 +17,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - run: ./tools/license-builder.sh # Run the license builder tool diff --git a/.github/workflows/linters.yml b/.github/workflows/linters.yml index 3d31ea9a17712f..09a6b779d27dbe 100644 --- a/.github/workflows/linters.yml +++ b/.github/workflows/linters.yml @@ -25,11 +25,11 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ env.NODE_VERSION }} - name: Environment Information @@ -40,7 +40,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -55,12 +55,12 @@ jobs: if: ${{ github.event.pull_request && github.event.pull_request.draft == false && github.base_ref == github.event.repository.default_branch }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: fetch-depth: 0 persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ env.NODE_VERSION }} - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -93,11 +93,11 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Use Node.js ${{ env.NODE_VERSION }} - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ env.NODE_VERSION }} - name: Environment Information @@ -118,7 +118,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -135,7 +135,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Use Python ${{ env.PYTHON_VERSION }} @@ -153,7 +153,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - run: shellcheck -V @@ -163,7 +163,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - uses: mszostok/codeowners-validator@7f3f5e28c6d7b8dfae5731e54ce2272ca384592f @@ -173,7 +173,7 @@ jobs: if: ${{ github.event.pull_request }} runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: fetch-depth: 2 persist-credentials: false @@ -182,10 +182,11 @@ jobs: lint-readme: runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Get team members if possible + if: ${{ (github.event.pull_request && github.event.pull_request.base.ref == github.event.pull_request.base.repo.default_branch) || github.event.ref == github.event.repository.default_branch }} id: team_members run: | get_list_members() { diff --git a/.github/workflows/notify-on-push.yml b/.github/workflows/notify-on-push.yml index 725c1c2dc5a639..1e3d618f2dc6df 100644 --- a/.github/workflows/notify-on-push.yml +++ b/.github/workflows/notify-on-push.yml @@ -34,7 +34,7 @@ jobs: permissions: pull-requests: write steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Check commit message diff --git a/.github/workflows/notify-on-review-wanted.yml b/.github/workflows/notify-on-review-wanted.yml new file mode 100644 index 00000000000000..5ecb01733a964a --- /dev/null +++ b/.github/workflows/notify-on-review-wanted.yml @@ -0,0 +1,40 @@ +name: Notify on Review Wanted +on: + issues: + types: [labeled] + pull_request_target: + types: [labeled] + +permissions: + contents: read + +jobs: + notifyOnReviewWanted: + name: Notify on Review Wanted + if: github.repository == 'nodejs/node' && github.event.label.name == 'review wanted' + runs-on: ubuntu-latest + steps: + - name: Determine PR or Issue + id: define-message + run: | + if [[ -n "${{ github.event.pull_request.number }}" ]]; then + number="${{ github.event.pull_request.number }}" + link="https://github.com/${{ github.repository }}/pull/$number" + echo "message=The PR (#$number) requires review from Node.js maintainers. See: $link" >> "$GITHUB_OUTPUT" + echo "title=${{ github.actor }} asks for attention on pull request #$number" >> "$GITHUB_OUTPUT" + else + number="${{ github.event.issue.number }}" + link="https://github.com/${{ github.repository }}/issues/$number" + echo "message=The issue (#$number) requires review from Node.js maintainers. See: $link" >> "$GITHUB_OUTPUT" + echo "title=${{ github.actor }} asks for attention on issue #$number" >> "$GITHUB_OUTPUT" + fi + + - name: Slack Notification + uses: rtCamp/action-slack-notify@4e5fb42d249be6a45a298f3c9543b111b02f7907 # 2.3.0 + env: + SLACK_COLOR: '#DE512A' + SLACK_ICON: https://github.com/nodejs.png?size=48 + SLACK_TITLE: ${{ steps.define-message.outputs.title }} + SLACK_MESSAGE: ${{ steps.define-message.outputs.message }} + SLACK_USERNAME: nodejs-bot + SLACK_WEBHOOK: ${{ secrets.SLACK_WEBHOOK }} diff --git a/.github/workflows/scorecard.yml b/.github/workflows/scorecard.yml index 866b3e36caccc9..5bda200cc79aa7 100644 --- a/.github/workflows/scorecard.yml +++ b/.github/workflows/scorecard.yml @@ -38,7 +38,7 @@ jobs: egress-policy: audit # TODO: change to 'egress-policy: block' after couple of runs - name: Checkout code - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false diff --git a/.github/workflows/test-asan.yml b/.github/workflows/test-asan.yml index 64317d335c81cf..2965008cca1e27 100644 --- a/.github/workflows/test-asan.yml +++ b/.github/workflows/test-asan.yml @@ -47,7 +47,7 @@ jobs: CONFIG_FLAGS: --enable-asan SCCACHE_GHA_ENABLED: 'true' steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -55,7 +55,7 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - name: Set up sccache - uses: mozilla-actions/sccache-action@89e9040de88b577a072e3760aaf59f585da083af # v0.0.5 + uses: mozilla-actions/sccache-action@9e326ebed976843c9932b3aa0e021c6f50310eb4 # v0.0.6 with: version: v0.8.1 - name: Environment Information diff --git a/.github/workflows/test-internet.yml b/.github/workflows/test-internet.yml index 9d8458cbc3bc70..b7eba9d5015814 100644 --- a/.github/workflows/test-internet.yml +++ b/.github/workflows/test-internet.yml @@ -44,7 +44,7 @@ jobs: if: github.repository == 'nodejs/node' || github.event_name != 'schedule' runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} diff --git a/.github/workflows/test-linux.yml b/.github/workflows/test-linux.yml index 9d1cb796007710..bc63a2e1edd588 100644 --- a/.github/workflows/test-linux.yml +++ b/.github/workflows/test-linux.yml @@ -37,7 +37,7 @@ jobs: if: github.event.pull_request.draft == false runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -45,7 +45,7 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - name: Set up sccache - uses: mozilla-actions/sccache-action@89e9040de88b577a072e3760aaf59f585da083af # v0.0.5 + uses: mozilla-actions/sccache-action@9e326ebed976843c9932b3aa0e021c6f50310eb4 # v0.0.6 with: version: v0.8.1 - name: Environment Information diff --git a/.github/workflows/test-macos.yml b/.github/workflows/test-macos.yml index ecbd3184a1c145..74e9f45a0fe51c 100644 --- a/.github/workflows/test-macos.yml +++ b/.github/workflows/test-macos.yml @@ -44,7 +44,7 @@ jobs: CXX: sccache g++ SCCACHE_GHA_ENABLED: 'true' steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Set up Python ${{ env.PYTHON_VERSION }} @@ -52,7 +52,7 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - name: Set up sccache - uses: mozilla-actions/sccache-action@89e9040de88b577a072e3760aaf59f585da083af # v0.0.5 + uses: mozilla-actions/sccache-action@9e326ebed976843c9932b3aa0e021c6f50310eb4 # v0.0.6 with: version: v0.8.1 - name: Environment Information @@ -73,7 +73,7 @@ jobs: echo "::endgroup::" echo "::group::Cleaned Files" - sudo rm -rfv /Users/runner/Library/Android/sdk + sudo rm -rf /Users/runner/Library/Android/sdk echo "::endgroup::" echo "::group::Free space after cleanup" @@ -81,5 +81,7 @@ jobs: echo "::endgroup::" - name: Build run: make build-ci -j$(getconf _NPROCESSORS_ONLN) V=1 CONFIG_FLAGS="--error-on-warn" + - name: Free Space After Build + run: df -h - name: Test run: make run-ci -j$(getconf _NPROCESSORS_ONLN) V=1 TEST_CI_ARGS="-p actions --node-args='--test-reporter=spec' --node-args='--test-reporter-destination=stdout' --measure-flakiness 9" diff --git a/.github/workflows/test-ubsan.yml b/.github/workflows/test-ubsan.yml index f62f0a39072b95..9ee7a53229fd8d 100644 --- a/.github/workflows/test-ubsan.yml +++ b/.github/workflows/test-ubsan.yml @@ -45,7 +45,7 @@ jobs: LINK: sccache g++ CONFIG_FLAGS: --enable-ubsan steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Store suppressions path @@ -56,7 +56,7 @@ jobs: with: python-version: ${{ env.PYTHON_VERSION }} - name: Set up sccache - uses: mozilla-actions/sccache-action@89e9040de88b577a072e3760aaf59f585da083af # v0.0.5 + uses: mozilla-actions/sccache-action@9e326ebed976843c9932b3aa0e021c6f50310eb4 # v0.0.6 with: version: v0.8.1 - name: Environment Information diff --git a/.github/workflows/timezone-update.yml b/.github/workflows/timezone-update.yml index deeec7e1022e42..cf2b53fe62c264 100644 --- a/.github/workflows/timezone-update.yml +++ b/.github/workflows/timezone-update.yml @@ -20,12 +20,12 @@ jobs: steps: - name: Checkout nodejs/node - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Checkout unicode-org/icu-data - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: path: icu-data persist-credentials: false diff --git a/.github/workflows/tools.yml b/.github/workflows/tools.yml index 19d27090748d67..d6e5a7a264020d 100644 --- a/.github/workflows/tools.yml +++ b/.github/workflows/tools.yml @@ -315,7 +315,7 @@ jobs: tail -n1 temp-output | grep "NEW_VERSION=" >> "$GITHUB_ENV" || true rm temp-output steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 if: github.event_name == 'schedule' || inputs.id == 'all' || inputs.id == matrix.id with: persist-credentials: false diff --git a/.github/workflows/update-openssl.yml b/.github/workflows/update-openssl.yml index 84f71c2fc94fe6..aaa554e1bc06b0 100644 --- a/.github/workflows/update-openssl.yml +++ b/.github/workflows/update-openssl.yml @@ -14,7 +14,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Check and download new OpenSSL version diff --git a/.github/workflows/update-v8.yml b/.github/workflows/update-v8.yml index 8169aa78b85ce8..b23bdae0de578f 100644 --- a/.github/workflows/update-v8.yml +++ b/.github/workflows/update-v8.yml @@ -16,7 +16,7 @@ jobs: if: github.repository == 'nodejs/node' runs-on: ubuntu-latest steps: - - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7 + - uses: actions/checkout@d632683dd7b4114ad314bca15554477dd762a938 # v4.2.0 with: persist-credentials: false - name: Cache node modules and update-v8 @@ -30,7 +30,7 @@ jobs: ~/.npm key: ${{ runner.os }}-build-${{ env.cache-name }} - name: Install Node.js - uses: actions/setup-node@1e60f620b9541d16bece96c5465dc8ee9832be0b # v4.0.3 + uses: actions/setup-node@0a44ba7841725637a19e28fa30b79a866c81b0a6 # v4.0.4 with: node-version: ${{ env.NODE_VERSION }} - name: Install @node-core/utils @@ -45,7 +45,7 @@ jobs: cat temp-output tail -n1 temp-output | grep "NEW_VERSION=" >> "$GITHUB_ENV" || true rm temp-output - - uses: peter-evans/create-pull-request@8867c4aba1b742c39f8d0ba35429c2dfa4b6cb20 # v7.0.1 + - uses: peter-evans/create-pull-request@5e914681df9dc83aa4e4905692ca88beb2f9e91f # v7.0.5 # Creates a PR or update the Action's existing PR, or # no-op if the base branch is already up-to-date. with: diff --git a/.gitignore b/.gitignore index 08a9e6e24e7e85..748e1904bc6c8c 100644 --- a/.gitignore +++ b/.gitignore @@ -21,6 +21,7 @@ !.mailmap !.nycrc !.yamllint.yaml +!.configurations/ # === Rules for root dir === /core diff --git a/.mailmap b/.mailmap index 3ad74a9de33aa2..c197dd83e271a0 100644 --- a/.mailmap +++ b/.mailmap @@ -2,6 +2,7 @@ Aaron Bieber Aaron Heckmann Aayush Ahuja +Abdirahim Musse <33973272+abmusse@users.noreply.github.com> Abe Fettig Abhimanyu Vashisht Adam Langley diff --git a/CHANGELOG.md b/CHANGELOG.md index ffeefb370dedfc..0452e6288ebe8d 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -38,7 +38,8 @@ release. -22.9.0
+22.10.0
+22.9.0
22.8.0
22.7.0
22.6.0
diff --git a/Makefile b/Makefile index ad33c402abf774..7d4574f68e39e2 100644 --- a/Makefile +++ b/Makefile @@ -109,7 +109,7 @@ available-node = \ # BUILDTYPE=Debug builds both release and debug builds. If you want to compile # just the debug build, run `make -C out BUILDTYPE=Debug` instead. ifeq ($(BUILDTYPE),Release) -all: $(NODE_EXE) ## Default target, builds node in out/Release/node. +all: $(NODE_EXE) ## Build node in out/Release/node (Default). else all: $(NODE_EXE) $(NODE_G_EXE) endif @@ -191,11 +191,11 @@ config.gypi: configure configure.py src/node_version.h fi .PHONY: install -install: all ## Installs node into $PREFIX (default=/usr/local). +install: all ## Install node into $PREFIX (default=/usr/local). $(PYTHON) tools/install.py $@ --dest-dir '$(DESTDIR)' --prefix '$(PREFIX)' .PHONY: uninstall -uninstall: ## Uninstalls node from $PREFIX (default=/usr/local). +uninstall: ## Uninstall node from $PREFIX (default=/usr/local). $(PYTHON) tools/install.py $@ --dest-dir '$(DESTDIR)' --prefix '$(PREFIX)' .PHONY: clean @@ -213,14 +213,14 @@ clean: ## Remove build artifacts. .PHONY: testclean .NOTPARALLEL: testclean -testclean: +testclean: ## Remove test artifacts. # Next one is legacy remove this at some point $(RM) -r test/tmp* $(RM) -r test/.tmp* .PHONY: distclean .NOTPARALLEL: distclean -distclean: +distclean: ## Remove all build and test artifacts. $(RM) -r out $(RM) config.gypi icu_config.gypi $(RM) config.mk @@ -237,7 +237,7 @@ check: test .NOTPARALLEL: coverage-clean # Remove files generated by running coverage, put the non-instrumented lib back # in place -coverage-clean: +coverage-clean: ## Remove coverage artifacts. $(RM) -r node_modules $(RM) -r gcovr $(RM) -r coverage/tmp @@ -254,20 +254,20 @@ coverage-clean: coverage: coverage-test ## Run the tests and generate a coverage report. .PHONY: coverage-build -coverage-build: all +coverage-build: all ## Build coverage files. -$(MAKE) coverage-build-js if [ ! -d gcovr ]; then $(PYTHON) -m pip install -t gcovr gcovr==7.2; fi $(MAKE) .PHONY: coverage-build-js -coverage-build-js: +coverage-build-js: ## Build JavaScript coverage files. mkdir -p node_modules if [ ! -d node_modules/c8 ]; then \ $(NODE) ./deps/npm install c8 --no-save --no-package-lock;\ fi .PHONY: coverage-test -coverage-test: coverage-build +coverage-test: coverage-build ## Run the tests and generate a coverage report. @if [ -d "out/Release/obj.target" ]; then \ $(FIND) out/$(BUILDTYPE)/obj.target -name "*.gcda" -type f -exec $(RM) {}; \ fi @@ -288,18 +288,18 @@ coverage-test: coverage-build | sed 's/<[^>]*>//g'| sed 's/ //g' .PHONY: coverage-report-js -coverage-report-js: +coverage-report-js: ## Report JavaScript coverage results. -$(MAKE) coverage-build-js $(NODE) ./node_modules/.bin/c8 report .PHONY: cctest -# Runs the C++ tests using the built `cctest` executable. -cctest: all + +cctest: all ## Run the C++ tests using the built `cctest` executable. @out/$(BUILDTYPE)/$@ --gtest_filter=$(GTEST_FILTER) $(NODE) ./test/embedding/test-embedding.js .PHONY: list-gtests -list-gtests: +list-gtests: ## List all available C++ gtests. ifeq (,$(wildcard out/$(BUILDTYPE)/cctest)) $(error Please run 'make cctest' first) endif @@ -309,12 +309,12 @@ endif # Related CI job: node-test-commit-v8-linux # Rebuilds deps/v8 as a git tree, pulls its third-party dependencies, and # builds it. -v8: +v8: ## Build deps/v8. export PATH="$(NO_BIN_OVERRIDE_PATH)" && \ tools/make-v8.sh $(V8_ARCH).$(BUILDTYPE_LOWER) $(V8_BUILD_OPTIONS) .PHONY: jstest -jstest: build-addons build-js-native-api-tests build-node-api-tests ## Runs addon tests and JS tests +jstest: build-addons build-js-native-api-tests build-node-api-tests ## Run addon tests and JS tests. $(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) \ $(TEST_CI_ARGS) \ --skip-tests=$(CI_SKIP_TESTS) \ @@ -322,11 +322,11 @@ jstest: build-addons build-js-native-api-tests build-node-api-tests ## Runs addo $(NATIVE_SUITES) .PHONY: tooltest -tooltest: +tooltest: ## Test the various tools. @$(PYTHON) -m unittest discover -s ./test/tools .PHONY: coverage-run-js -coverage-run-js: +coverage-run-js: ## Run JavaScript tests with coverage. $(RM) -r coverage/tmp -NODE_V8_COVERAGE=coverage/tmp CI_SKIP_TESTS=$(COV_SKIP_TESTS) \ TEST_CI_ARGS="$(TEST_CI_ARGS) --type=coverage" $(MAKE) jstest @@ -334,7 +334,7 @@ coverage-run-js: .PHONY: test # This does not run tests of third-party libraries inside deps. -test: all ## Runs default tests, linters, and builds docs. +test: all ## Run default tests, linters, and build docs. $(MAKE) -s tooltest $(MAKE) -s test-doc $(MAKE) -s build-addons @@ -344,7 +344,7 @@ test: all ## Runs default tests, linters, and builds docs. $(MAKE) -s jstest .PHONY: test-only -test-only: all ## For a quick test, does not run linter or build docs. +test-only: all ## Run default tests, without linters or building the docs. $(MAKE) build-addons $(MAKE) build-js-native-api-tests $(MAKE) build-node-api-tests @@ -354,7 +354,7 @@ test-only: all ## For a quick test, does not run linter or build docs. # Used by `make coverage-test` .PHONY: test-cov -test-cov: all +test-cov: all ## Run coverage tests. $(MAKE) build-addons $(MAKE) build-js-native-api-tests $(MAKE) build-node-api-tests @@ -362,7 +362,7 @@ test-cov: all CI_SKIP_TESTS=$(COV_SKIP_TESTS) $(MAKE) jstest .PHONY: test-valgrind -test-valgrind: all +test-valgrind: all ## Run tests using valgrind. $(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) --valgrind sequential parallel message .PHONY: test-check-deopts @@ -442,7 +442,7 @@ test/addons/.buildstamp: $(ADDONS_PREREQS) \ # .buildstamp is out of date and need a rebuild. # Just goes to show that recursive make really is harmful... # TODO(bnoordhuis) Force rebuild after gyp update. -build-addons: | $(NODE_EXE) test/addons/.buildstamp +build-addons: | $(NODE_EXE) test/addons/.buildstamp ## Build addons for Node.js. JS_NATIVE_API_BINDING_GYPS := \ $(filter-out test/js-native-api/??_*/binding.gyp, \ @@ -466,7 +466,7 @@ test/js-native-api/.buildstamp: $(ADDONS_PREREQS) \ # .buildstamp is out of date and need a rebuild. # Just goes to show that recursive make really is harmful... # TODO(bnoordhuis) Force rebuild after gyp or node-gyp update. -build-js-native-api-tests: | $(NODE_EXE) test/js-native-api/.buildstamp +build-js-native-api-tests: | $(NODE_EXE) test/js-native-api/.buildstamp ## Build JS Native-API tests. NODE_API_BINDING_GYPS := \ $(filter-out test/node-api/??_*/binding.gyp, \ @@ -490,7 +490,7 @@ test/node-api/.buildstamp: $(ADDONS_PREREQS) \ # .buildstamp is out of date and need a rebuild. # Just goes to show that recursive make really is harmful... # TODO(bnoordhuis) Force rebuild after gyp or node-gyp update. -build-node-api-tests: | $(NODE_EXE) test/node-api/.buildstamp +build-node-api-tests: | $(NODE_EXE) test/node-api/.buildstamp ## Build Node-API tests. BENCHMARK_NAPI_BINDING_GYPS := $(wildcard benchmark/napi/*/binding.gyp) @@ -504,7 +504,7 @@ benchmark/napi/.buildstamp: $(ADDONS_PREREQS) \ @$(call run_build_addons,"$$PWD/benchmark/napi",$@) .PHONY: clear-stalled -clear-stalled: +clear-stalled: ## Clear any stalled processes. $(info Clean up any leftover processes but don't error if found.) ps awwx | grep Release/node | grep -v grep | cat @PS_OUT=`ps awwx | grep Release/node | grep -v grep | awk '{print $$1}'`; \ @@ -513,20 +513,20 @@ clear-stalled: fi .PHONY: test-build -test-build: | all build-addons build-js-native-api-tests build-node-api-tests +test-build: | all build-addons build-js-native-api-tests build-node-api-tests ## Build all tests. .PHONY: test-build-js-native-api -test-build-js-native-api: all build-js-native-api-tests +test-build-js-native-api: all build-js-native-api-tests ## Build JS Native-API tests. .PHONY: test-build-node-api -test-build-node-api: all build-node-api-tests +test-build-node-api: all build-node-api-tests ## Build Node-API tests. .PHONY: test-all test-all: test-build ## Run default tests with both Debug and Release builds. $(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=debug,release .PHONY: test-all-valgrind -test-all-valgrind: test-build +test-all-valgrind: test-build ## Run all tests using valgrind. $(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=debug,release --valgrind .PHONY: test-all-suites @@ -545,9 +545,9 @@ else endif .PHONY: test-ci-native -# Build and test addons without building anything else + # Related CI job: node-test-commit-arm-fanned -test-ci-native: LOGLEVEL := info +test-ci-native: LOGLEVEL := info ## Build and test addons without building anything else. test-ci-native: | benchmark/napi/.buildstamp test/addons/.buildstamp test/js-native-api/.buildstamp test/node-api/.buildstamp $(PYTHON) tools/test.py $(PARALLEL_ARGS) -p tap --logfile test.tap \ --mode=$(BUILDTYPE_LOWER) --flaky-tests=$(FLAKY_TESTS) \ @@ -556,7 +556,7 @@ test-ci-native: | benchmark/napi/.buildstamp test/addons/.buildstamp test/js-nat .PHONY: test-ci-js # This target should not use a native compiler at all # Related CI job: node-test-commit-arm-fanned -test-ci-js: | clear-stalled +test-ci-js: | clear-stalled ## Build and test JavaScript with building anything else. $(PYTHON) tools/test.py $(PARALLEL_ARGS) -p tap --logfile test.tap \ --mode=$(BUILDTYPE_LOWER) --flaky-tests=$(FLAKY_TESTS) \ --skip-tests=$(CI_SKIP_TESTS) \ @@ -570,7 +570,7 @@ test-ci-js: | clear-stalled .PHONY: test-ci # Related CI jobs: most CI tests, excluding node-test-commit-arm-fanned -test-ci: LOGLEVEL := info +test-ci: LOGLEVEL := info ## Build and test everything (CI). test-ci: | clear-stalled bench-addons-build build-addons build-js-native-api-tests build-node-api-tests doc-only out/Release/cctest --gtest_output=xml:out/junit/cctest.xml $(PYTHON) tools/test.py $(PARALLEL_ARGS) -p tap --logfile test.tap \ @@ -587,7 +587,7 @@ test-ci: | clear-stalled bench-addons-build build-addons build-js-native-api-tes .PHONY: build-ci # Prepare the build for running the tests. # Related CI jobs: most CI tests, excluding node-test-commit-arm-fanned -build-ci: +build-ci: ## Build everything (CI). $(PYTHON) ./configure --verbose $(CONFIG_FLAGS) $(MAKE) @@ -601,41 +601,40 @@ build-ci: # # Using -j1 as the sub target in `test-ci` already have internal parallelism. # Refs: https://github.com/nodejs/node/pull/23733 -run-ci: build-ci +run-ci: build-ci ## Build and run all tests (CI). $(MAKE) test-ci -j1 .PHONY: test-release .PHONY: test-debug -test-debug: BUILDTYPE_LOWER=debug -test-release test-debug: test-build +test-debug: BUILDTYPE_LOWER=debug ## Run tests on a debug build. +test-release test-debug: test-build ## Run tests on a release or debug build. $(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) .PHONY: test-wpt -test-wpt: all +test-wpt: all ## Run the Web Platform Tests. $(PYTHON) tools/test.py $(PARALLEL_ARGS) wpt .PHONY: test-wpt-report -test-wpt-report: +test-wpt-report: ## Run the Web Platform Tests and generate a report. $(RM) -r out/wpt mkdir -p out/wpt -WPT_REPORT=1 $(PYTHON) tools/test.py --shell $(NODE) $(PARALLEL_ARGS) wpt $(NODE) "$$PWD/tools/merge-wpt-reports.mjs" .PHONY: test-internet -test-internet: all +test-internet: all ## Run internet tests. $(PYTHON) tools/test.py $(PARALLEL_ARGS) internet .PHONY: test-tick-processor -test-tick-processor: all +test-tick-processor: all ## Run tick processor tests. $(PYTHON) tools/test.py $(PARALLEL_ARGS) tick-processor .PHONY: test-hash-seed -# Verifies the hash seed used by V8 for hashing is random. -test-hash-seed: all +test-hash-seed: all ## Verifu that the hash seed used by V8 for hashing is random. $(NODE) test/pummel/test-hash-seed.js .PHONY: test-doc -test-doc: doc-only lint-md ## Builds, lints, and verifies the docs. +test-doc: doc-only lint-md ## Build, lint, and verify the docs. @if [ "$(shell $(node_use_openssl))" != "true" ]; then \ echo "Skipping test-doc (no crypto)"; \ else \ @@ -643,11 +642,11 @@ test-doc: doc-only lint-md ## Builds, lints, and verifies the docs. fi .PHONY: test-doc-ci -test-doc-ci: doc-only +test-doc-ci: doc-only ## Build, lint, and verify the docs (CI). $(PYTHON) tools/test.py --shell $(NODE) $(TEST_CI_ARGS) $(PARALLEL_ARGS) doctool .PHONY: test-known-issues -test-known-issues: all +test-known-issues: all ## Run tests for known issues. $(PYTHON) tools/test.py $(PARALLEL_ARGS) known_issues # Related CI job: node-test-npm @@ -656,36 +655,36 @@ test-npm: $(NODE_EXE) ## Run the npm test suite on deps/npm. $(NODE) tools/test-npm-package --install --logfile=test-npm.tap deps/npm test .PHONY: test-npm-publish -test-npm-publish: $(NODE_EXE) +test-npm-publish: $(NODE_EXE) ## Test the `npm publish` command. npm_package_config_publishtest=true $(NODE) deps/npm/test/run.js .PHONY: test-js-native-api -test-js-native-api: test-build-js-native-api +test-js-native-api: test-build-js-native-api ## Run JS Native-API tests. $(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) js-native-api .PHONY: test-js-native-api-clean .NOTPARALLEL: test-js-native-api-clean -test-js-native-api-clean: +test-js-native-api-clean: ## Remove JS Native-API testing artifacts. $(RM) -r test/js-native-api/*/build $(RM) test/js-native-api/.buildstamp .PHONY: test-node-api -test-node-api: test-build-node-api +test-node-api: test-build-node-api ## Run Node-API tests. $(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) node-api .PHONY: test-node-api-clean .NOTPARALLEL: test-node-api-clean -test-node-api-clean: +test-node-api-clean: ## Remove Node-API testing artifacts. $(RM) -r test/node-api/*/build $(RM) test/node-api/.buildstamp .PHONY: test-addons -test-addons: test-build test-js-native-api test-node-api +test-addons: test-build test-js-native-api test-node-api ## Run addon tests. $(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) addons .PHONY: test-addons-clean .NOTPARALLEL: test-addons-clean -test-addons-clean: +test-addons-clean: ## Remove addon testing artifacts. $(RM) -r "$(ADDONS_HEADERS_DIR)" $(RM) -r test/addons/??_*/ $(RM) -r test/addons/*/build @@ -694,7 +693,7 @@ test-addons-clean: $(MAKE) test-node-api-clean .PHONY: test-with-async-hooks -test-with-async-hooks: +test-with-async-hooks: ## Run tests with async hooks. $(MAKE) build-addons $(MAKE) build-js-native-api-tests $(MAKE) build-node-api-tests @@ -711,7 +710,7 @@ test-with-async-hooks: .PHONY: test-v8-updates ifneq ("","$(wildcard deps/v8/tools/run-tests.py)") # Related CI job: node-test-commit-v8-linux -test-v8: v8 ## Runs the V8 test suite on deps/v8. +test-v8: v8 ## Run the V8 test suite on deps/v8. export PATH="$(NO_BIN_OVERRIDE_PATH)" && \ $(PYTHON) deps/v8/tools/run-tests.py --gn --arch=$(V8_ARCH) $(V8_TEST_OPTIONS) \ mjsunit cctest debugger inspector message preparser \ @@ -720,24 +719,24 @@ test-v8: v8 ## Runs the V8 test suite on deps/v8. $(info Testing hash seed) $(MAKE) test-hash-seed -test-v8-intl: v8 +test-v8-intl: v8 ## Run the v8 test suite, intl tests. export PATH="$(NO_BIN_OVERRIDE_PATH)" && \ $(PYTHON) deps/v8/tools/run-tests.py --gn --arch=$(V8_ARCH) \ intl \ $(TAP_V8_INTL) $(call convert_to_junit,$(TAP_V8_INTL_JSON)) -test-v8-benchmarks: v8 +test-v8-benchmarks: v8 ## Run the v8 test suite, benchmarks. export PATH="$(NO_BIN_OVERRIDE_PATH)" && \ $(PYTHON) deps/v8/tools/run-tests.py --gn --arch=$(V8_ARCH) \ benchmarks \ $(TAP_V8_BENCHMARKS) $(call convert_to_junit,$(TAP_V8_BENCHMARKS_JSON)) -test-v8-updates: +test-v8-updates: ## Run the v8 test suite, updates. $(PYTHON) tools/test.py $(PARALLEL_ARGS) --mode=$(BUILDTYPE_LOWER) v8-updates -test-v8-all: test-v8 test-v8-intl test-v8-benchmarks test-v8-updates +test-v8-all: test-v8 test-v8-intl test-v8-benchmarks test-v8-updates ## Run the entire V8 test suite, including intl, benchmarks, and updates. # runs all v8 tests else test-v8 test-v8-intl test-v8-benchmarks test-v8-all: @@ -761,7 +760,7 @@ tools/doc/node_modules: tools/doc/package.json .PHONY: doc-only doc-only: tools/doc/node_modules \ - $(apidoc_dirs) $(apiassets) ## Builds the docs with the local or the global Node.js binary. + $(apidoc_dirs) $(apiassets) ## Build the docs with the local or the global Node.js binary. @if [ "$(shell $(node_use_openssl))" != "true" ]; then \ echo "Skipping doc-only (no crypto)"; \ else \ @@ -769,7 +768,7 @@ doc-only: tools/doc/node_modules \ fi .PHONY: doc -doc: $(NODE_EXE) doc-only +doc: $(NODE_EXE) doc-only ## Build Node.js, and then build the documentation with the new binary. out/doc: mkdir -p $@ @@ -841,16 +840,16 @@ out/doc/api/stability: out/doc/api/all.json tools/doc/stability.mjs | out/doc/ap fi .PHONY: docopen -docopen: out/doc/api/all.html +docopen: out/doc/api/all.html ## Open the documentation in a web browser. @$(PYTHON) -mwebbrowser file://$(abspath $<) .PHONY: docserve -docserve: $(apidocs_html) $(apiassets) +docserve: $(apidocs_html) $(apiassets) ## Serve the documentation on localhost:8000. @$(PYTHON) -m http.server 8000 --bind 127.0.0.1 --directory out/doc/api .PHONY: docclean .NOTPARALLEL: docclean -docclean: +docclean: ## Remove the generated documentation. $(RM) -r out/doc $(RM) "$(VERSIONS_DATA)" @@ -1032,7 +1031,7 @@ endif endif .PHONY: release-only -release-only: check-xz +release-only: check-xz ## Prepare Node.js for release. @if [ "$(DISTTYPE)" = "release" ] && `grep -q REPLACEME doc/api/*.md`; then \ echo 'Please update REPLACEME tags in the following doc/api/*.md files (See doc/contributing/releases.md):\n' ; \ REPLACEMES="$(shell grep -l REPLACEME doc/api/*.md)" ; \ @@ -1151,11 +1150,10 @@ endif sh tools/osx-notarize.sh $(FULLVERSION) .PHONY: pkg -# Builds the macOS installer for releases. -pkg: $(PKG) +pkg: $(PKG) ## Build the macOS installer for releases. .PHONY: corepack-update -corepack-update: +corepack-update: ## Update Corepack to the latest version. mkdir -p /tmp/node-corepack curl -qLo /tmp/node-corepack/package.tgz "$$($(call available-node,$(NPM) view corepack dist.tarball))" @@ -1328,9 +1326,8 @@ endif bench bench-all: bench-addons-build $(warning Please use benchmark/run.js or benchmark/compare.js to run the benchmarks.) -# Build required addons for benchmark before running it. .PHONY: bench-addons-build -bench-addons-build: | $(NODE_EXE) benchmark/napi/.buildstamp +bench-addons-build: | $(NODE_EXE) benchmark/napi/.buildstamp ## Build required addons for benchmark before running it. .PHONY: bench-addons-clean .NOTPARALLEL: bench-addons-clean @@ -1371,13 +1368,11 @@ tools/.mdlintstamp: $(LINT_MD_FILES) @touch $@ .PHONY: lint-md -# Lints the markdown documents maintained by us in the codebase. -lint-md: lint-js-doc | tools/.mdlintstamp +lint-md: lint-js-doc | tools/.mdlintstamp ## Lint the markdown documents maintained by us in the codebase. run-format-md = tools/lint-md/lint-md.mjs --format $(LINT_MD_FILES) .PHONY: format-md -# Formats the markdown documents maintained by us in the codebase. -format-md: +format-md: ## Format the markdown documents maintained by us in the codebase. @$(call available-node,$(run-format-md)) @@ -1388,19 +1383,18 @@ run-lint-js = tools/eslint/node_modules/eslint/bin/eslint.js --cache \ --max-warnings=0 --report-unused-disable-directives $(LINT_JS_TARGETS) run-lint-js-fix = $(run-lint-js) --fix -tools/eslint/node_modules: tools/eslint/package-lock.json +tools/eslint/node_modules/eslint/bin/eslint.js: tools/eslint/package-lock.json -cd tools/eslint && $(call available-node,$(run-npm-ci)) .PHONY: lint-js-fix -lint-js-fix: tools/eslint/node_modules +lint-js-fix: tools/eslint/node_modules/eslint/bin/eslint.js ## Lint and fix the JavaScript code with eslint./eslint/bin/eslint.js @$(call available-node,$(run-lint-js-fix)) .PHONY: lint-js .PHONY: lint-js-doc # Note that on the CI `lint-js-ci` is run instead. -# Lints the JavaScript code with eslint. lint-js-doc: LINT_JS_TARGETS=doc -lint-js lint-js-doc: tools/eslint/node_modules +lint-js lint-js-doc: tools/eslint/node_modules/eslint/bin/eslint.js ## Lint the JavaScript code with eslint./eslint/bin/eslint.js @if [ "$(shell $(node_use_openssl))" != "true" ]; then \ echo "Skipping $@ (no crypto)"; \ else \ @@ -1417,7 +1411,7 @@ run-lint-js-ci = tools/eslint/node_modules/eslint/bin/eslint.js \ .PHONY: lint-js-ci # On the CI the output is emitted in the TAP format. -lint-js-ci: tools/eslint/node_modules +lint-js-ci: tools/eslint/node_modules/eslint/bin/eslint.js $(info Running JS linter...) @$(call available-node,$(run-lint-js-ci)) @@ -1475,12 +1469,12 @@ FORMAT_CPP_FILES += $(wildcard \ ADDON_DOC_LINT_FLAGS=-whitespace/ending_newline,-build/header_guard .PHONY: format-cpp-build -format-cpp-build: +format-cpp-build: ## Build 'clang-format' tooling for C++ code formatting. cd tools/clang-format && $(call available-node,$(run-npm-ci)) .PHONY: format-cpp-clean .NOTPARALLEL: format-cpp-clean -format-cpp-clean: +format-cpp-clean: ## Remove 'clang-format' tooling for C++ code formatting. $(RM) -r tools/clang-format/node_modules CLANG_FORMAT_START ?= HEAD @@ -1491,7 +1485,7 @@ CLANG_FORMAT_START ?= HEAD # $ CLANG_FORMAT_START=`git rev-parse HEAD~1` make format-cpp # To format diff between main and current branch head (main...HEAD): # $ CLANG_FORMAT_START=main make format-cpp -format-cpp: ## Format C++ diff from $CLANG_FORMAT_START to current changes +format-cpp: ## Format C++ diff from $CLANG_FORMAT_START to current changes. ifneq ("","$(wildcard tools/clang-format/node_modules/)") $(info Formatting C++ diff from $(CLANG_FORMAT_START)..) @$(PYTHON) tools/clang-format/node_modules/.bin/git-clang-format \ @@ -1510,8 +1504,7 @@ else CPPLINT_QUIET = --quiet endif .PHONY: lint-cpp -# Lints the C++ code with cpplint.py and checkimports.py. -lint-cpp: tools/.cpplintstamp +lint-cpp: tools/.cpplintstamp ## Lint the C++ code with cpplint.py and checkimports.py. tools/.cpplintstamp: $(LINT_CPP_FILES) $(info Running C++ linter...) @@ -1520,7 +1513,7 @@ tools/.cpplintstamp: $(LINT_CPP_FILES) @touch $@ .PHONY: lint-addon-docs -lint-addon-docs: tools/.doclintstamp +lint-addon-docs: tools/.doclintstamp ## Lint the addon documentation. tools/.doclintstamp: test/addons/.docbuildstamp $(info Running C++ linter on addon docs...) @@ -1534,19 +1527,24 @@ cpplint: lint-cpp .PHONY: lint-py-build # python -m pip install ruff # Try with '--system' if it fails without; the system may have set '--user' -lint-py-build: +lint-py-build: ## Build resources needed to lint python files. $(info Pip installing ruff on $(shell $(PYTHON) --version)...) - $(PYTHON) -m pip install --upgrade --target tools/pip/site-packages ruff==0.5.2 || \ - $(PYTHON) -m pip install --upgrade --system --target tools/pip/site-packages ruff==0.5.2 + $(PYTHON) -m pip install --upgrade --target tools/pip/site-packages ruff==0.6.5 || \ + $(PYTHON) -m pip install --upgrade --system --target tools/pip/site-packages ruff==0.6.5 -.PHONY: lint-py +.PHONY: lint-py lint-py-fix lint-py-fix-unsafe ifneq ("","$(wildcard tools/pip/site-packages/ruff)") # Lint the Python code with ruff. lint-py: - tools/pip/site-packages/bin/ruff --version + $(info Running Python linter...) tools/pip/site-packages/bin/ruff check . +lint-py-fix: + tools/pip/site-packages/bin/ruff check . --fix + +lint-py-fix-unsafe: + tools/pip/site-packages/bin/ruff check . --fix --unsafe-fixes else -lint-py: +lint-py lint-py-fix lint-py-fix-unsafe: $(warning Python linting with ruff is not available) $(warning Run 'make lint-py-build') endif @@ -1554,15 +1552,15 @@ endif .PHONY: lint-yaml-build # python -m pip install yamllint # Try with '--system' if it fails without; the system may have set '--user' -lint-yaml-build: +lint-yaml-build: ## Build resources needed to lint YAML files. $(info Pip installing yamllint on $(shell $(PYTHON) --version)...) $(PYTHON) -m pip install --upgrade -t tools/pip/site-packages yamllint || \ $(PYTHON) -m pip install --upgrade --system -t tools/pip/site-packages yamllint .PHONY: lint-yaml -# Lints the YAML files with yamllint. -lint-yaml: +lint-yaml: ## Lint the YAML files with yamllint. @if [ -d "tools/pip/site-packages/yamllint" ]; then \ + $(info Running YAML linter...) \ PYTHONPATH=tools/pip $(PYTHON) -m yamllint .; \ else \ echo 'YAML linting with yamllint is not available'; \ @@ -1599,10 +1597,11 @@ lint lint-ci: endif .PHONY: lint-clean -lint-clean: +lint-clean: ## Remove linting artifacts. $(RM) tools/.*lintstamp $(RM) .eslintcache $(RM) -r tools/eslint/node_modules + $(RM) tools/pip/site_packages HAS_DOCKER ?= $(shell command -v docker > /dev/null 2>&1; [ $$? -eq 0 ] && echo 1 || echo 0) @@ -1612,7 +1611,7 @@ DOCKER_COMMAND ?= docker run -it -v $(PWD):/node IS_IN_WORKTREE = $(shell grep '^gitdir: ' $(PWD)/.git 2>/dev/null) GIT_WORKTREE_COMMON = $(shell git rev-parse --git-common-dir) DOCKER_COMMAND += $(if $(IS_IN_WORKTREE), -v $(GIT_WORKTREE_COMMON):$(GIT_WORKTREE_COMMON)) -gen-openssl: ## Generate platform dependent openssl files (requires docker) +gen-openssl: ## Generate platform dependent openssl files (requires docker). docker build -t node-openssl-builder deps/openssl/config/ $(DOCKER_COMMAND) node-openssl-builder make -C deps/openssl/config else diff --git a/README.md b/README.md index 0a1900e997f870..db5b5c221c83a5 100644 --- a/README.md +++ b/README.md @@ -81,8 +81,10 @@ directory contains the latest Hydrogen (Node.js 18) release. -Each directory name and filename contains a date (in UTC) and the commit -SHA at the HEAD of the release. +Each directory and filename includes the version (e.g., `v22.0.0`), +followed by the UTC date (e.g., `20240424` for April 24, 2024), +and the short commit SHA of the HEAD of the release (e.g., `ddd0a9e494`). +For instance, a full directory name might look like `v22.0.0-nightly20240424ddd0a9e494`. #### API documentation @@ -282,6 +284,8 @@ For information about the governance of the Node.js project, see ### Collaborators +* [abmusse](https://github.com/abmusse) - + **Abdirahim Musse** <> * [addaleax](https://github.com/addaleax) - **Anna Henningsen** <> (she/her) * [aduh95](https://github.com/aduh95) - @@ -358,8 +362,6 @@ For information about the governance of the Node.js project, see **James M Snell** <> (he/him) * [jkrems](https://github.com/jkrems) - **Jan Krems** <> (he/him) -* [joesepi](https://github.com/joesepi) - - **Joe Sepi** <> (he/him) * [joyeecheung](https://github.com/joyeecheung) - **Joyee Cheung** <> (she/her) * [juanarbol](https://github.com/juanarbol) - @@ -408,6 +410,8 @@ For information about the governance of the Node.js project, see **Filip Skokan** <> (he/him) * [pimterry](https://github.com/pimterry) - **Tim Perry** <> (he/him) +* [pmarchini](https://github.com/pmarchini) + **Pietro Marchini** <> (he/him) * [Qard](https://github.com/Qard) - **Stephen Belanger** <> (he/him) * [RafaelGSS](https://github.com/RafaelGSS) - @@ -562,6 +566,8 @@ For information about the governance of the Node.js project, see **Yuval Brik** <> * [joaocgreis](https://github.com/joaocgreis) - **João Reis** <> +* [joesepi](https://github.com/joesepi) - + **Joe Sepi** <> (he/him) * [joshgav](https://github.com/joshgav) - **Josh Gavant** <> * [julianduque](https://github.com/julianduque) - @@ -762,6 +768,8 @@ responding to new issues. Primary GPG keys for Node.js Releasers (some Releasers sign with subkeys): +* **Antoine du Hamel** <> + `C0D6248439F1D5604AAFFB4021D900FFDB233756` * **Beth Griggs** <> `4ED778F539E3634C779C87C6D7062848A1AB005C` * **Bryan English** <> @@ -787,6 +795,7 @@ To import the full set of trusted release keys (including subkeys possibly used to sign releases): ```bash +gpg --keyserver hkps://keys.openpgp.org --recv-keys C0D6248439F1D5604AAFFB4021D900FFDB233756 # Antoine du Hamel gpg --keyserver hkps://keys.openpgp.org --recv-keys 4ED778F539E3634C779C87C6D7062848A1AB005C # Beth Griggs gpg --keyserver hkps://keys.openpgp.org --recv-keys 141F07595B7B3FFE74309A937405533BE57C7D57 # Bryan English gpg --keyserver hkps://keys.openpgp.org --recv-keys 74F12602B6F1C4E913FAA37AD3A89613643B6201 # Danielle Adams diff --git a/android-configure b/android-configure index 49ee97e2596554..fb237241d7413d 100755 --- a/android-configure +++ b/android-configure @@ -4,6 +4,8 @@ # Note that the mix of single and double quotes is intentional, # as is the fact that the ] goes on a new line. _=[ 'exec' '/bin/sh' '-c' ''' +command -v python3.13 >/dev/null && exec python3.13 "$0" "$@" +command -v python3.12 >/dev/null && exec python3.12 "$0" "$@" command -v python3.11 >/dev/null && exec python3.11 "$0" "$@" command -v python3.10 >/dev/null && exec python3.10 "$0" "$@" command -v python3.9 >/dev/null && exec python3.9 "$0" "$@" @@ -21,7 +23,7 @@ except ImportError: from distutils.spawn import find_executable as which print('Node.js android configure: Found Python {}.{}.{}...'.format(*sys.version_info)) -acceptable_pythons = ((3, 11), (3, 10), (3, 9), (3, 8)) +acceptable_pythons = ((3, 13), (3, 12), (3, 11), (3, 10), (3, 9), (3, 8)) if sys.version_info[:2] in acceptable_pythons: import android_configure else: diff --git a/benchmark/assert/deepequal-object.js b/benchmark/assert/deepequal-object.js index 4238129b292b45..c480faf10cbae8 100644 --- a/benchmark/assert/deepequal-object.js +++ b/benchmark/assert/deepequal-object.js @@ -4,10 +4,9 @@ const common = require('../common.js'); const assert = require('assert'); const bench = common.createBenchmark(main, { - n: [25, 2e2, 2e3], - size: [1e2, 1e3, 1e4], - strict: [1], - method: ['deepEqual', 'notDeepEqual'], + n: [25, 2e2], + size: [1e2, 1e4], + method: ['deepEqual', 'notDeepEqual', 'deepStrictEqual', 'notDeepStrictEqual'], }, { combinationFilter: (p) => { return p.size === 1e4 && p.n === 25 || @@ -30,10 +29,7 @@ function createObj(size, add = '') { })); } -function main({ size, n, method, strict }) { - if (strict) { - method = method.replace('eep', 'eepStrict'); - } +function main({ size, n, method }) { const fn = assert[method]; const actual = createObj(size); diff --git a/benchmark/esm/detect-esm-syntax.js b/benchmark/esm/detect-esm-syntax.js index dfc347225f32d6..f12cb46ed77908 100644 --- a/benchmark/esm/detect-esm-syntax.js +++ b/benchmark/esm/detect-esm-syntax.js @@ -4,34 +4,33 @@ // We use the TypeScript fixture because it's a very large CommonJS file with no ESM syntax: the worst case. const common = require('../common.js'); const tmpdir = require('../../test/common/tmpdir.js'); -const fixtures = require('../../test/common/fixtures.js'); -const scriptPath = fixtures.path('snapshot', 'typescript.js'); const fs = require('node:fs'); const bench = common.createBenchmark(main, { - type: ['with-module-syntax-detection', 'without-module-syntax-detection'], + type: ['with-package-json', 'without-package-json'], n: [1e4], -}, { - flags: ['--experimental-detect-module'], }); -const benchmarkDirectory = tmpdir.fileURL('benchmark-detect-esm-syntax'); -const ambiguousURL = new URL('./typescript.js', benchmarkDirectory); -const explicitURL = new URL('./typescript.cjs', benchmarkDirectory); - async function main({ n, type }) { tmpdir.refresh(); + fs.mkdirSync(tmpdir.resolve('bench')); - fs.mkdirSync(benchmarkDirectory, { recursive: true }); - fs.cpSync(scriptPath, ambiguousURL); - fs.cpSync(scriptPath, explicitURL); - - bench.start(); - + let loader = ''; + const modules = []; for (let i = 0; i < n; i++) { - const url = type === 'with-module-syntax-detection' ? ambiguousURL : explicitURL; - await import(url); + const url = tmpdir.fileURL('bench', `mod${i}.js`); + fs.writeFileSync(url, `const foo${i} = ${i};\nexport { foo${i} };\n`); + loader += `import { foo${i} } from './mod${i}.js';\n`; + modules.push(url); } + const loaderURL = tmpdir.fileURL('bench', 'load.js'); + fs.writeFileSync(loaderURL, loader); + if (type === 'with-package-json') { + fs.writeFileSync(tmpdir.resolve('bench', 'package.json'), '{ "type": "module" }'); + } + + bench.start(); + await import(loaderURL); bench.end(n); } diff --git a/benchmark/fixtures/strip-types-benchmark.js b/benchmark/fixtures/strip-types-benchmark.js new file mode 100644 index 00000000000000..3244114dea3cf7 --- /dev/null +++ b/benchmark/fixtures/strip-types-benchmark.js @@ -0,0 +1,21 @@ +function processData(input) { + return { + ...input, + b: input.b + 1 + }; +} + +const data = { + a: "test", + b: 42, + c: true, + d: { + e: ["hello", "world"], + f: { + g: 100, + h: ["str", 123, false] + } + } +}; + +export const result = processData(data); diff --git a/benchmark/fixtures/strip-types-benchmark.ts b/benchmark/fixtures/strip-types-benchmark.ts new file mode 100644 index 00000000000000..0f0185c2d7dc52 --- /dev/null +++ b/benchmark/fixtures/strip-types-benchmark.ts @@ -0,0 +1,34 @@ +type ComplexType = { + a: string; + b: number; + c: boolean; + d: { + e: string[]; + f: { + g: number; + h: [string, number, boolean]; + }; + }; +}; + +function processData(input: ComplexType): ComplexType { + return { + ...input, + b: input.b + 1 + }; +} + +const data: ComplexType = { + a: "test", + b: 42, + c: true, + d: { + e: ["hello", "world"], + f: { + g: 100, + h: ["str", 123, false] + } + } +}; + +export const result = processData(data); diff --git a/benchmark/fixtures/transform-types-benchmark.js b/benchmark/fixtures/transform-types-benchmark.js new file mode 100644 index 00000000000000..c0946942179fe3 --- /dev/null +++ b/benchmark/fixtures/transform-types-benchmark.js @@ -0,0 +1,28 @@ +var Color; +(function (Color) { + Color[Color["Red"] = 0] = "Red"; + Color[Color["Green"] = 1] = "Green"; + Color[Color["Blue"] = 2] = "Blue"; +})(Color || (Color = {})); +var Geometry; +(function (Geometry) { + class Circle { + constructor(center, radius) { + this.center = center; + this.radius = radius; + } + area() { + return Math.PI * Math.pow(this.radius, 2); + } + } + Geometry.Circle = Circle; +})(Geometry || (Geometry = {})); +function processShape(color, shape) { + const colorName = Color[color]; + const area = shape.area().toFixed(2); + return `A ${colorName} circle with area ${area}`; +} + +const point = { x: 0, y: 0 }; +const circle = new Geometry.Circle(point, 5); +export const result = processShape(Color.Blue, circle); diff --git a/benchmark/fixtures/transform-types-benchmark.ts b/benchmark/fixtures/transform-types-benchmark.ts new file mode 100644 index 00000000000000..69c13af4ee4d7c --- /dev/null +++ b/benchmark/fixtures/transform-types-benchmark.ts @@ -0,0 +1,30 @@ +enum Color { + Red, + Green, + Blue +} + +namespace Geometry { + export interface Point { + x: number; + y: number; + } + + export class Circle { + constructor(public center: Point, public radius: number) { } + + area(): number { + return Math.PI * this.radius ** 2; + } + } +} + +function processShape(color: Color, shape: Geometry.Circle): string { + const colorName = Color[color]; + const area = shape.area().toFixed(2); + return `A ${colorName} circle with area ${area}`; +} + +const point: Geometry.Point = { x: 0, y: 0 }; +const circle = new Geometry.Circle(point, 5); +export const result = processShape(Color.Blue, circle); diff --git a/benchmark/fixtures/valid.env b/benchmark/fixtures/valid.env new file mode 100644 index 00000000000000..120488d57917e0 --- /dev/null +++ b/benchmark/fixtures/valid.env @@ -0,0 +1,67 @@ +BASIC=basic + +# COMMENTS=work +#BASIC=basic2 +#BASIC=basic3 + +# previous line intentionally left blank +AFTER_LINE=after_line +EMPTY= +EMPTY_SINGLE_QUOTES='' +EMPTY_DOUBLE_QUOTES="" +EMPTY_BACKTICKS=`` +SINGLE_QUOTES='single_quotes' +SINGLE_QUOTES_SPACED=' single quotes ' +DOUBLE_QUOTES="double_quotes" +DOUBLE_QUOTES_SPACED=" double quotes " +DOUBLE_QUOTES_INSIDE_SINGLE='double "quotes" work inside single quotes' +DOUBLE_QUOTES_WITH_NO_SPACE_BRACKET="{ port: $MONGOLAB_PORT}" +SINGLE_QUOTES_INSIDE_DOUBLE="single 'quotes' work inside double quotes" +BACKTICKS_INSIDE_SINGLE='`backticks` work inside single quotes' +BACKTICKS_INSIDE_DOUBLE="`backticks` work inside double quotes" +BACKTICKS=`backticks` +BACKTICKS_SPACED=` backticks ` +DOUBLE_QUOTES_INSIDE_BACKTICKS=`double "quotes" work inside backticks` +SINGLE_QUOTES_INSIDE_BACKTICKS=`single 'quotes' work inside backticks` +DOUBLE_AND_SINGLE_QUOTES_INSIDE_BACKTICKS=`double "quotes" and single 'quotes' work inside backticks` +EXPAND_NEWLINES="expand\nnew\nlines" +DONT_EXPAND_UNQUOTED=dontexpand\nnewlines +DONT_EXPAND_SQUOTED='dontexpand\nnewlines' +# COMMENTS=work +INLINE_COMMENTS=inline comments # work #very #well +INLINE_COMMENTS_SINGLE_QUOTES='inline comments outside of #singlequotes' # work +INLINE_COMMENTS_DOUBLE_QUOTES="inline comments outside of #doublequotes" # work +INLINE_COMMENTS_BACKTICKS=`inline comments outside of #backticks` # work +INLINE_COMMENTS_SPACE=inline comments start with a#number sign. no space required. +EQUAL_SIGNS=equals== +RETAIN_INNER_QUOTES={"foo": "bar"} +RETAIN_INNER_QUOTES_AS_STRING='{"foo": "bar"}' +RETAIN_INNER_QUOTES_AS_BACKTICKS=`{"foo": "bar's"}` +TRIM_SPACE_FROM_UNQUOTED= some spaced out string +SPACE_BEFORE_DOUBLE_QUOTES= "space before double quotes" +EMAIL=therealnerdybeast@example.tld + SPACED_KEY = parsed +EDGE_CASE_INLINE_COMMENTS="VALUE1" # or "VALUE2" or "VALUE3" + +MULTI_DOUBLE_QUOTED="THIS +IS +A +MULTILINE +STRING" + +MULTI_SINGLE_QUOTED='THIS +IS +A +MULTILINE +STRING' + +MULTI_BACKTICKED=`THIS +IS +A +"MULTILINE'S" +STRING` +export EXPORT_EXAMPLE = ignore export + +MULTI_NOT_VALID_QUOTE=" +MULTI_NOT_VALID=THIS +IS NOT MULTILINE diff --git a/benchmark/fs/readFileSync.js b/benchmark/fs/readFileSync.js index 800ab31450f43a..bfa00070b15772 100644 --- a/benchmark/fs/readFileSync.js +++ b/benchmark/fs/readFileSync.js @@ -4,7 +4,7 @@ const common = require('../common.js'); const fs = require('fs'); const bench = common.createBenchmark(main, { - encoding: ['undefined', 'utf8'], + encoding: ['undefined', 'utf8', 'ascii'], path: ['existing', 'non-existing'], hasFileDescriptor: ['true', 'false'], n: [1e4], diff --git a/benchmark/fs/readfile.js b/benchmark/fs/readfile.js index 9f74508042205f..58ea110159c1d7 100644 --- a/benchmark/fs/readfile.js +++ b/benchmark/fs/readfile.js @@ -12,7 +12,7 @@ const filename = tmpdir.resolve(`.removeme-benchmark-garbage-${process.pid}`); const bench = common.createBenchmark(main, { duration: [5], - encoding: ['', 'utf-8'], + encoding: ['', 'utf-8', 'ascii'], len: [1024, 16 * 1024 * 1024], concurrent: [1, 10], }); diff --git a/benchmark/permission/permission-processhas-fs-read.js b/benchmark/permission/permission-processhas-fs-read.js index e0b2efdb3b2f74..c2c90636aa1f62 100644 --- a/benchmark/permission/permission-processhas-fs-read.js +++ b/benchmark/permission/permission-processhas-fs-read.js @@ -14,6 +14,7 @@ const options = { '--experimental-permission', `--allow-fs-read=${rootPath}`, '--allow-child-process', + '--no-warnings', ], }; diff --git a/benchmark/ts/strip-typescript.js b/benchmark/ts/strip-typescript.js new file mode 100644 index 00000000000000..7a7155c568b613 --- /dev/null +++ b/benchmark/ts/strip-typescript.js @@ -0,0 +1,27 @@ +'use strict'; + +const common = require('../common'); +const path = require('path'); +const assert = require('node:assert'); + + +const js = path.resolve(__dirname, '../fixtures/strip-types-benchmark.js'); +const ts = path.resolve(__dirname, '../fixtures/strip-types-benchmark.ts'); + +const bench = common.createBenchmark(main, { + filepath: [ts, js], + n: [1e4], +}, { + flags: ['--experimental-strip-types', '--disable-warning=ExperimentalWarning'], +}); + +async function main({ n, filepath }) { + let output; + bench.start(); + for (let i = 0; i < n; i++) { + const { result } = await import(`${filepath}?${i}`); + output = result; + } + bench.end(n); + assert.ok(output); +} diff --git a/benchmark/ts/transform-typescript.js b/benchmark/ts/transform-typescript.js new file mode 100644 index 00000000000000..59a78eccae3ff1 --- /dev/null +++ b/benchmark/ts/transform-typescript.js @@ -0,0 +1,26 @@ +'use strict'; + +const common = require('../common'); +const path = require('path'); +const assert = require('node:assert'); + +const js = path.resolve(__dirname, '../fixtures/transform-types-benchmark.js'); +const ts = path.resolve(__dirname, '../fixtures/transform-types-benchmark.ts'); + +const bench = common.createBenchmark(main, { + filepath: [js, ts], + n: [1e4], +}, { + flags: ['--experimental-transform-types', '--disable-warning=ExperimentalWarning'], +}); + +async function main({ n, filepath }) { + let output; + bench.start(); + for (let i = 0; i < n; i++) { + const { result } = await import(`${filepath}?${i}`); + output = result; + } + bench.end(n); + assert.ok(output); +} diff --git a/benchmark/util/parse-env.js b/benchmark/util/parse-env.js new file mode 100644 index 00000000000000..1bcd08ba213b93 --- /dev/null +++ b/benchmark/util/parse-env.js @@ -0,0 +1,23 @@ +'use strict'; + +const { createBenchmark } = require('../common.js'); +const path = require('node:path'); +const fs = require('node:fs'); +const util = require('node:util'); +const assert = require('node:assert'); + +const bench = createBenchmark(main, { + n: 3e4, +}); + +const env = fs.readFileSync(path.resolve(__dirname, '../fixtures/valid.env'), 'utf-8'); + +function main({ n }) { + let noDead; + bench.start(); + for (let i = 0; i < n; i++) { + noDead = util.parseEnv(env); + } + bench.end(n); + assert(noDead !== undefined); +} diff --git a/benchmark/webstorage/getItem.js b/benchmark/webstorage/getItem.js new file mode 100644 index 00000000000000..c383f91ee0d75b --- /dev/null +++ b/benchmark/webstorage/getItem.js @@ -0,0 +1,75 @@ +'use strict'; +const { join } = require('node:path'); +const common = require('../common.js'); +const tmpdir = require('../../test/common/tmpdir'); + +let cnt = 0; + +tmpdir.refresh(); + +function nextLocalStorage() { + return join(tmpdir.path, `${++cnt}.localstorage`); +} + +const options = { + flags: ['--experimental-webstorage', `--localstorage-file=${nextLocalStorage()}`], +}; + +const bench = common.createBenchmark(main, { + type: ['localStorage-getItem', + 'localStorage-getter', + 'sessionStorage-getItem', + 'sessionStorage-getter'], + // Note: web storage has only 10mb quota + n: [1e5], +}, options); + +function fillStorage(storage, n) { + for (let i = 0; i < n; i++) { + storage.setItem(i, i); + } +} + +function main({ n, type }) { + const localStorage = globalThis.localStorage; + const sessionStorage = globalThis.sessionStorage; + + switch (type) { + case 'localStorage-getItem': + fillStorage(localStorage, n); + bench.start(); + for (let i = 0; i < n; i++) { + localStorage.getItem(i); + } + bench.end(n); + break; + case 'localStorage-getter': + fillStorage(localStorage, n); + bench.start(); + for (let i = 0; i < n; i++) { + // eslint-disable-next-line no-unused-expressions + localStorage[i]; + } + bench.end(n); + break; + case 'sessionStorage-getItem': + fillStorage(sessionStorage, n); + bench.start(); + for (let i = 0; i < n; i++) { + sessionStorage.getItem(i); + } + bench.end(n); + break; + case 'sessionStorage-getter': + fillStorage(sessionStorage, n); + bench.start(); + for (let i = 0; i < n; i++) { + // eslint-disable-next-line no-unused-expressions + sessionStorage[i]; + } + bench.end(n); + break; + default: + new Error('Invalid type'); + } +} diff --git a/benchmark/webstorage/removeItem.js b/benchmark/webstorage/removeItem.js new file mode 100644 index 00000000000000..330f17bdf35293 --- /dev/null +++ b/benchmark/webstorage/removeItem.js @@ -0,0 +1,75 @@ +'use strict'; +const { join } = require('node:path'); +const common = require('../common.js'); +const tmpdir = require('../../test/common/tmpdir'); + +let cnt = 0; + +tmpdir.refresh(); + +function nextLocalStorage() { + return join(tmpdir.path, `${++cnt}.localstorage`); +} + +const options = { + flags: ['--experimental-webstorage', `--localstorage-file=${nextLocalStorage()}`], +}; + +const bench = common.createBenchmark(main, { + type: ['localStorage-removeItem', + 'localStorage-delete', + 'sessionStorage-removeItem', + 'sessionStorage-delete', + ], + // Note: web storage has only 10mb quota + n: [1e5], +}, options); + +function fillStorage(storage, n) { + for (let i = 0; i < n; i++) { + storage.setItem(i, i); + } +} + +function main({ n, type }) { + const localStorage = globalThis.localStorage; + const sessionStorage = globalThis.sessionStorage; + + switch (type) { + case 'localStorage-removeItem': + fillStorage(localStorage, n); + bench.start(); + for (let i = 0; i < n; i++) { + localStorage.removeItem(i); + } + bench.end(n); + break; + case 'localStorage-delete': + fillStorage(localStorage, n); + bench.start(); + for (let i = 0; i < n; i++) { + delete localStorage[i]; + } + bench.end(n); + break; + case 'sessionStorage-removeItem': + fillStorage(sessionStorage, n); + bench.start(); + for (let i = 0; i < n; i++) { + sessionStorage.removeItem(i); + } + bench.end(n); + break; + + case 'sessionStorage-delete': + fillStorage(localStorage, n); + bench.start(); + for (let i = 0; i < n; i++) { + delete sessionStorage[i]; + } + bench.end(n); + break; + default: + new Error('Invalid type'); + } +} diff --git a/benchmark/webstorage/setItem.js b/benchmark/webstorage/setItem.js new file mode 100644 index 00000000000000..e6ef2f866de05a --- /dev/null +++ b/benchmark/webstorage/setItem.js @@ -0,0 +1,63 @@ +'use strict'; +const { join } = require('node:path'); +const common = require('../common.js'); +const tmpdir = require('../../test/common/tmpdir'); + +let cnt = 0; + +tmpdir.refresh(); + +function nextLocalStorage() { + return join(tmpdir.path, `${++cnt}.localstorage`); +} + +const options = { + flags: ['--experimental-webstorage', `--localstorage-file=${nextLocalStorage()}`], +}; + +const bench = common.createBenchmark(main, { + type: ['localStorage-setItem', + 'localStorage-setter', + 'sessionStorage-setItem', + 'sessionStorage-setter'], + // Note: web storage has only 10mb quota + n: [1e5], +}, options); + +function main({ n, type }) { + const localStorage = globalThis.localStorage; + const sessionStorage = globalThis.sessionStorage; + + switch (type) { + case 'localStorage-setItem': + bench.start(); + for (let i = 0; i < n; i++) { + localStorage.setItem(i, i); + } + bench.end(n); + break; + case 'localStorage-setter': + bench.start(); + for (let i = 0; i < n; i++) { + localStorage[i] = i; + } + bench.end(n); + break; + case 'sessionStorage-setItem': + bench.start(); + for (let i = 0; i < n; i++) { + sessionStorage.setItem(i, i); + } + bench.end(n); + break; + case 'sessionStorage-setter': + bench.start(); + for (let i = 0; i < n; i++) { + sessionStorage[i] = i; + } + bench.end(n); + break; + default: + new Error('Invalid type'); + } +} diff --git a/common.gypi b/common.gypi index a97e77860e151f..de83a566724a36 100644 --- a/common.gypi +++ b/common.gypi @@ -36,7 +36,7 @@ # Reset this number to 0 on major V8 upgrades. # Increment by one for each non-official patch applied to deps/v8. - 'v8_embedder_string': '-node.19', + 'v8_embedder_string': '-node.21', ##### V8 defaults for Node.js ##### diff --git a/configure.py b/configure.py index d03db1970fd7a1..c4653f37f5a69b 100755 --- a/configure.py +++ b/configure.py @@ -2307,8 +2307,9 @@ def make_bin_override(): if options.compile_commands_json: gyp_args += ['-f', 'compile_commands_json'] - os.path.islink('./compile_commands.json') and os.unlink('./compile_commands.json') - os.symlink('./out/' + config['BUILDTYPE'] + '/compile_commands.json', './compile_commands.json') + if sys.platform != 'win32': + os.path.lexists('./compile_commands.json') and os.unlink('./compile_commands.json') + os.symlink('./out/' + config['BUILDTYPE'] + '/compile_commands.json', './compile_commands.json') # pass the leftover non-whitespace positional arguments to GYP gyp_args += [arg for arg in args if not str.isspace(arg)] @@ -2318,4 +2319,7 @@ def make_bin_override(): print_verbose("running: \n " + " ".join(['python', 'tools/gyp_node.py'] + gyp_args)) run_gyp(gyp_args) +if options.compile_commands_json and sys.platform == 'win32': + os.path.isfile('./compile_commands.json') and os.unlink('./compile_commands.json') + shutil.copy2('./out/' + config['BUILDTYPE'] + '/compile_commands.json', './compile_commands.json') info('configure completed successfully') diff --git a/deps/acorn/acorn-walk/CHANGELOG.md b/deps/acorn/acorn-walk/CHANGELOG.md index 909a546e0410ba..7aeae8fd5c8622 100644 --- a/deps/acorn/acorn-walk/CHANGELOG.md +++ b/deps/acorn/acorn-walk/CHANGELOG.md @@ -1,3 +1,9 @@ +## 8.3.4 (2024-09-09) + +### Bug fixes + +Walk SwitchCase nodes as separate nodes. + ## 8.3.3 (2024-01-11) ### Bug fixes diff --git a/deps/acorn/acorn-walk/dist/walk.js b/deps/acorn/acorn-walk/dist/walk.js index 580df6413725f8..40b7aa1b078a18 100644 --- a/deps/acorn/acorn-walk/dist/walk.js +++ b/deps/acorn/acorn-walk/dist/walk.js @@ -215,16 +215,10 @@ }; base.SwitchStatement = function (node, st, c) { c(node.discriminant, st, "Expression"); - for (var i$1 = 0, list$1 = node.cases; i$1 < list$1.length; i$1 += 1) { - var cs = list$1[i$1]; + for (var i = 0, list = node.cases; i < list.length; i += 1) { + var cs = list[i]; - if (cs.test) { c(cs.test, st, "Expression"); } - for (var i = 0, list = cs.consequent; i < list.length; i += 1) - { - var cons = list[i]; - - c(cons, st, "Statement"); - } + c(cs, st); } }; base.SwitchCase = function (node, st, c) { diff --git a/deps/acorn/acorn-walk/dist/walk.mjs b/deps/acorn/acorn-walk/dist/walk.mjs index 19eebc0e70f598..c475ababc7ac30 100644 --- a/deps/acorn/acorn-walk/dist/walk.mjs +++ b/deps/acorn/acorn-walk/dist/walk.mjs @@ -209,16 +209,10 @@ base.WithStatement = function (node, st, c) { }; base.SwitchStatement = function (node, st, c) { c(node.discriminant, st, "Expression"); - for (var i$1 = 0, list$1 = node.cases; i$1 < list$1.length; i$1 += 1) { - var cs = list$1[i$1]; + for (var i = 0, list = node.cases; i < list.length; i += 1) { + var cs = list[i]; - if (cs.test) { c(cs.test, st, "Expression"); } - for (var i = 0, list = cs.consequent; i < list.length; i += 1) - { - var cons = list[i]; - - c(cons, st, "Statement"); - } + c(cs, st); } }; base.SwitchCase = function (node, st, c) { diff --git a/deps/acorn/acorn-walk/package.json b/deps/acorn/acorn-walk/package.json index 9d96d36e06fc22..133059576956d8 100644 --- a/deps/acorn/acorn-walk/package.json +++ b/deps/acorn/acorn-walk/package.json @@ -16,7 +16,7 @@ ], "./package.json": "./package.json" }, - "version": "8.3.3", + "version": "8.3.4", "engines": { "node": ">=0.4.0" }, diff --git a/deps/corepack/CHANGELOG.md b/deps/corepack/CHANGELOG.md index a7b01aa8fb10bf..7de934c0d2c0db 100644 --- a/deps/corepack/CHANGELOG.md +++ b/deps/corepack/CHANGELOG.md @@ -1,5 +1,12 @@ # Changelog +## [0.29.4](https://github.com/nodejs/corepack/compare/v0.29.3...v0.29.4) (2024-09-07) + + +### Features + +* update package manager versions ([#543](https://github.com/nodejs/corepack/issues/543)) ([b819e40](https://github.com/nodejs/corepack/commit/b819e404dbb23c4ae3d5dbe55e21de74d714ee9c)) + ## [0.29.3](https://github.com/nodejs/corepack/compare/v0.29.2...v0.29.3) (2024-07-21) diff --git a/deps/corepack/dist/lib/corepack.cjs b/deps/corepack/dist/lib/corepack.cjs index 12df55126256e5..2978fc336232e0 100644 --- a/deps/corepack/dist/lib/corepack.cjs +++ b/deps/corepack/dist/lib/corepack.cjs @@ -21260,7 +21260,7 @@ function String2(descriptor, ...args) { } // package.json -var version = "0.29.3"; +var version = "0.29.4"; // sources/Engine.ts var import_fs9 = __toESM(require("fs")); @@ -21274,7 +21274,7 @@ var import_valid3 = __toESM(require_valid2()); var config_default = { definitions: { npm: { - default: "10.8.2+sha1.3c123c7f14409dc0395478e7269fdbc32ae179d8", + default: "10.8.3+sha1.e6085b2864fcfd9b1aad7b602601b5a2fc116699", fetchLatestFrom: { type: "npm", package: "npm" @@ -21311,7 +21311,7 @@ var config_default = { } }, pnpm: { - default: "9.5.0+sha1.8c155dc114e1689d18937974f6571e0ceee66f1d", + default: "9.9.0+sha1.3edbe440f4e570aa8f049adbd06b9483d55cc2d2", fetchLatestFrom: { type: "npm", package: "pnpm" @@ -21375,7 +21375,7 @@ var config_default = { package: "yarn" }, transparent: { - default: "4.3.1+sha224.934d21773e22af4b69a7032a2d3b4cb38c1f7c019624777cc9916b23", + default: "4.4.1+sha224.fd21d9eb5fba020083811af1d4953acc21eeb9f6ff97efd1b3f9d4de", commands: [ [ "yarn", diff --git a/deps/corepack/package.json b/deps/corepack/package.json index 04a12cdc80d95d..571c359407e07a 100644 --- a/deps/corepack/package.json +++ b/deps/corepack/package.json @@ -1,6 +1,6 @@ { "name": "corepack", - "version": "0.29.3", + "version": "0.29.4", "homepage": "https://github.com/nodejs/corepack#readme", "bugs": { "url": "https://github.com/nodejs/corepack/issues" diff --git a/deps/icu-small/source/data/in/icudt75l.dat.bz2 b/deps/icu-small/source/data/in/icudt75l.dat.bz2 index c257cf22f696fe..8f0a27826abb53 100644 Binary files a/deps/icu-small/source/data/in/icudt75l.dat.bz2 and b/deps/icu-small/source/data/in/icudt75l.dat.bz2 differ diff --git a/deps/ncrypto/ncrypto.cc b/deps/ncrypto/ncrypto.cc index eb3533bb4623b1..457bd2f6c5b189 100644 --- a/deps/ncrypto/ncrypto.cc +++ b/deps/ncrypto/ncrypto.cc @@ -33,7 +33,7 @@ ClearErrorOnReturn::~ClearErrorOnReturn() { ERR_clear_error(); } -int ClearErrorOnReturn::peeKError() { return ERR_peek_error(); } +int ClearErrorOnReturn::peekError() { return ERR_peek_error(); } MarkPopErrorOnReturn::MarkPopErrorOnReturn(CryptoErrorList* errors) : errors_(errors) { ERR_set_mark(); @@ -356,6 +356,35 @@ int PasswordCallback(char* buf, int size, int rwflag, void* u) { return -1; } +// Algorithm: http://howardhinnant.github.io/date_algorithms.html +constexpr int days_from_epoch(int y, unsigned m, unsigned d) +{ + y -= m <= 2; + const int era = (y >= 0 ? y : y - 399) / 400; + const unsigned yoe = static_cast(y - era * 400); // [0, 399] + const unsigned doy = (153 * (m + (m > 2 ? -3 : 9)) + 2) / 5 + d - 1; // [0, 365] + const unsigned doe = yoe * 365 + yoe / 4 - yoe / 100 + doy; // [0, 146096] + return era * 146097 + static_cast(doe) - 719468; +} + +// tm must be in UTC +// using time_t causes problems on 32-bit systems and windows x64. +int64_t PortableTimeGM(struct tm* t) { + int year = t->tm_year + 1900; + int month = t->tm_mon; + if (month > 11) { + year += month / 12; + month %= 12; + } else if (month < 0) { + int years_diff = (11 - month) / 12; + year -= years_diff; + month += 12 * years_diff; + } + int days_since_epoch = days_from_epoch(year, month + 1, t->tm_mday); + + return 60 * (60 * (24LL * static_cast(days_since_epoch) + t->tm_hour) + t->tm_min) + t->tm_sec; +} + // ============================================================================ // SPKAC @@ -826,6 +855,18 @@ BIOPointer X509View::getValidTo() const { return bio; } +int64_t X509View::getValidToTime() const { + struct tm tp; + ASN1_TIME_to_tm(X509_get0_notAfter(cert_), &tp); + return PortableTimeGM(&tp); +} + +int64_t X509View::getValidFromTime() const { + struct tm tp; + ASN1_TIME_to_tm(X509_get0_notBefore(cert_), &tp); + return PortableTimeGM(&tp); +} + DataPointer X509View::getSerialNumber() const { ClearErrorOnReturn clearErrorOnReturn; if (cert_ == nullptr) return {}; @@ -1384,4 +1425,333 @@ DataPointer pbkdf2(const EVP_MD* md, return {}; } +// ============================================================================ + +EVPKeyPointer EVPKeyPointer::New() { + return EVPKeyPointer(EVP_PKEY_new()); +} + +EVPKeyPointer EVPKeyPointer::NewRawPublic(int id, const Buffer& data) { + if (id == 0) return {}; + return EVPKeyPointer(EVP_PKEY_new_raw_public_key(id, nullptr, data.data, data.len)); +} + +EVPKeyPointer EVPKeyPointer::NewRawPrivate(int id, const Buffer& data) { + if (id == 0) return {}; + return EVPKeyPointer(EVP_PKEY_new_raw_private_key(id, nullptr, data.data, data.len)); +} + +EVPKeyPointer::EVPKeyPointer(EVP_PKEY* pkey) : pkey_(pkey) {} + +EVPKeyPointer::EVPKeyPointer(EVPKeyPointer&& other) noexcept + : pkey_(other.release()) {} + +EVPKeyPointer& EVPKeyPointer::operator=(EVPKeyPointer&& other) noexcept { + if (this == &other) return *this; + this->~EVPKeyPointer(); + return *new (this) EVPKeyPointer(std::move(other)); +} + +EVPKeyPointer::~EVPKeyPointer() { reset(); } + +void EVPKeyPointer::reset(EVP_PKEY* pkey) { + pkey_.reset(pkey); +} + +EVP_PKEY* EVPKeyPointer::release() { + return pkey_.release(); +} + +int EVPKeyPointer::id(const EVP_PKEY* key) { + if (key == nullptr) return 0; + return EVP_PKEY_id(key); +} + +int EVPKeyPointer::base_id(const EVP_PKEY* key) { + if (key == nullptr) return 0; + return EVP_PKEY_base_id(key); +} + +int EVPKeyPointer::id() const { + return id(get()); +} + +int EVPKeyPointer::base_id() const { + return base_id(get()); +} + +int EVPKeyPointer::bits() const { + if (get() == nullptr) return 0; + return EVP_PKEY_bits(get()); +} + +size_t EVPKeyPointer::size() const { + if (get() == nullptr) return 0; + return EVP_PKEY_size(get()); +} + +EVPKeyCtxPointer EVPKeyPointer::newCtx() const { + if (!pkey_) return {}; + return EVPKeyCtxPointer(EVP_PKEY_CTX_new(get(), nullptr)); +} + +size_t EVPKeyPointer::rawPublicKeySize() const { + if (!pkey_) return 0; + size_t len = 0; + if (EVP_PKEY_get_raw_public_key(get(), nullptr, &len) == 1) return len; + return 0; +} + +size_t EVPKeyPointer::rawPrivateKeySize() const { + if (!pkey_) return 0; + size_t len = 0; + if (EVP_PKEY_get_raw_private_key(get(), nullptr, &len) == 1) return len; + return 0; +} + +DataPointer EVPKeyPointer::rawPublicKey() const { + if (!pkey_) return {}; + if (auto data = DataPointer::Alloc(rawPublicKeySize())) { + const Buffer buf = data; + size_t len = data.size(); + if (EVP_PKEY_get_raw_public_key(get(), + buf.data, + &len) != 1) return {}; + return data; + } + return {}; +} + +DataPointer EVPKeyPointer::rawPrivateKey() const { + if (!pkey_) return {}; + if (auto data = DataPointer::Alloc(rawPrivateKeySize())) { + const Buffer buf = data; + size_t len = data.size(); + if (EVP_PKEY_get_raw_private_key(get(), + buf.data, + &len) != 1) return {}; + return data; + } + return {}; +} + +BIOPointer EVPKeyPointer::derPublicKey() const { + if (!pkey_) return {}; + auto bio = BIOPointer::NewMem(); + if (!bio) return {}; + if (!i2d_PUBKEY_bio(bio.get(), get())) return {}; + return bio; +} + +namespace { +EVPKeyPointer::ParseKeyResult TryParsePublicKeyInner( + const BIOPointer& bp, + const char* name, + auto&& parse) { + if (!bp.resetBio()) { + return EVPKeyPointer::ParseKeyResult(EVPKeyPointer::PKParseError::FAILED); + } + unsigned char* der_data; + long der_len; + + // This skips surrounding data and decodes PEM to DER. + { + MarkPopErrorOnReturn mark_pop_error_on_return; + if (PEM_bytes_read_bio(&der_data, &der_len, nullptr, name, + bp.get(), nullptr, nullptr) != 1) + return EVPKeyPointer::ParseKeyResult(EVPKeyPointer::PKParseError::NOT_RECOGNIZED); + } + DataPointer data(der_data, der_len); + + // OpenSSL might modify the pointer, so we need to make a copy before parsing. + const unsigned char* p = der_data; + EVPKeyPointer pkey(parse(&p, der_len)); + if (!pkey) return EVPKeyPointer::ParseKeyResult(EVPKeyPointer::PKParseError::FAILED); + return EVPKeyPointer::ParseKeyResult(std::move(pkey)); +} + +constexpr bool IsASN1Sequence(const unsigned char* data, size_t size, + size_t* data_offset, size_t* data_size) { + if (size < 2 || data[0] != 0x30) + return false; + + if (data[1] & 0x80) { + // Long form. + size_t n_bytes = data[1] & ~0x80; + if (n_bytes + 2 > size || n_bytes > sizeof(size_t)) + return false; + size_t length = 0; + for (size_t i = 0; i < n_bytes; i++) + length = (length << 8) | data[i + 2]; + *data_offset = 2 + n_bytes; + *data_size = std::min(size - 2 - n_bytes, length); + } else { + // Short form. + *data_offset = 2; + *data_size = std::min(size - 2, data[1]); + } + + return true; +} + +constexpr bool IsEncryptedPrivateKeyInfo(const Buffer& buffer) { + // Both PrivateKeyInfo and EncryptedPrivateKeyInfo start with a SEQUENCE. + if (buffer.len == 0 || buffer.data == nullptr) return false; + size_t offset, len; + if (!IsASN1Sequence(buffer.data, buffer.len, &offset, &len)) + return false; + + // A PrivateKeyInfo sequence always starts with an integer whereas an + // EncryptedPrivateKeyInfo starts with an AlgorithmIdentifier. + return len >= 1 && buffer.data[offset] != 2; +} + +} // namespace + +bool EVPKeyPointer::IsRSAPrivateKey(const Buffer& buffer) { + // Both RSAPrivateKey and RSAPublicKey structures start with a SEQUENCE. + size_t offset, len; + if (!IsASN1Sequence(buffer.data, buffer.len, &offset, &len)) + return false; + + // An RSAPrivateKey sequence always starts with a single-byte integer whose + // value is either 0 or 1, whereas an RSAPublicKey starts with the modulus + // (which is the product of two primes and therefore at least 4), so we can + // decide the type of the structure based on the first three bytes of the + // sequence. + return len >= 3 && + buffer.data[offset] == 2 && + buffer.data[offset + 1] == 1 && + !(buffer.data[offset + 2] & 0xfe); +} + +EVPKeyPointer::ParseKeyResult EVPKeyPointer::TryParsePublicKeyPEM( + const Buffer& buffer) { + auto bp = BIOPointer::New(buffer.data, buffer.len); + if (!bp) + return ParseKeyResult(PKParseError::FAILED); + + // Try parsing as SubjectPublicKeyInfo (SPKI) first. + if (auto ret = TryParsePublicKeyInner(bp, "PUBLIC KEY", + [](const unsigned char** p, long l) { // NOLINT(runtime/int) + return d2i_PUBKEY(nullptr, p, l); + })) { + return ret; + } + + // Maybe it is PKCS#1. + if (auto ret = TryParsePublicKeyInner(bp, "RSA PUBLIC KEY", + [](const unsigned char** p, long l) { // NOLINT(runtime/int) + return d2i_PublicKey(EVP_PKEY_RSA, nullptr, p, l); + })) { + return ret; + } + + // X.509 fallback. + if (auto ret = TryParsePublicKeyInner(bp, "CERTIFICATE", + [](const unsigned char** p, long l) { // NOLINT(runtime/int) + X509Pointer x509(d2i_X509(nullptr, p, l)); + return x509 ? X509_get_pubkey(x509.get()) : nullptr; + })) { + return ret; + }; + + return ParseKeyResult(PKParseError::NOT_RECOGNIZED); +} + +EVPKeyPointer::ParseKeyResult EVPKeyPointer::TryParsePublicKey( + PKFormatType format, + PKEncodingType encoding, + const Buffer& buffer) { + if (format == PKFormatType::PEM) { + return TryParsePublicKeyPEM(buffer); + } + + if (format != PKFormatType::DER) { + return ParseKeyResult(PKParseError::FAILED); + } + + const unsigned char* start = buffer.data; + + EVP_PKEY* key = nullptr; + + if (encoding == PKEncodingType::PKCS1 && + (key = d2i_PublicKey(EVP_PKEY_RSA, nullptr, &start, buffer.len))) { + return EVPKeyPointer::ParseKeyResult(EVPKeyPointer(key)); + } + + if (encoding == PKEncodingType::SPKI && + (key = d2i_PUBKEY(nullptr, &start, buffer.len))) { + return EVPKeyPointer::ParseKeyResult(EVPKeyPointer(key)); + } + + return ParseKeyResult(PKParseError::FAILED); +} + +EVPKeyPointer::ParseKeyResult EVPKeyPointer::TryParsePrivateKey( + PKFormatType format, + PKEncodingType encoding, + std::optional> maybe_passphrase, + const Buffer& buffer) { + + static auto keyOrError = [&](EVPKeyPointer pkey, bool had_passphrase = false) { + if (int err = ERR_peek_error()) { + if (ERR_GET_LIB(err) == ERR_LIB_PEM && + ERR_GET_REASON(err) == PEM_R_BAD_PASSWORD_READ && + !had_passphrase) { + return ParseKeyResult(PKParseError::NEED_PASSPHRASE); + } + return ParseKeyResult(PKParseError::FAILED, err); + } + if (!pkey) return ParseKeyResult(PKParseError::FAILED); + return ParseKeyResult(std::move(pkey)); + }; + + Buffer* passphrase = nullptr; + if (maybe_passphrase.has_value()) { + passphrase = &maybe_passphrase.value(); + } + + auto bio = BIOPointer::New(buffer); + if (!bio) return ParseKeyResult(PKParseError::FAILED); + + if (format == PKFormatType::PEM) { + auto key = PEM_read_bio_PrivateKey(bio.get(), nullptr, PasswordCallback, passphrase); + return keyOrError(EVPKeyPointer(key), maybe_passphrase.has_value()); + } + + if (format != PKFormatType::DER) { + return ParseKeyResult(PKParseError::FAILED); + } + + switch (encoding) { + case PKEncodingType::PKCS1: { + auto key = d2i_PrivateKey_bio(bio.get(), nullptr); + return keyOrError(EVPKeyPointer(key)); + } + case PKEncodingType::PKCS8: { + if (IsEncryptedPrivateKeyInfo(buffer)) { + auto key = d2i_PKCS8PrivateKey_bio(bio.get(), + nullptr, + PasswordCallback, + passphrase); + return keyOrError(EVPKeyPointer(key), maybe_passphrase.has_value()); + } + + PKCS8Pointer p8inf(d2i_PKCS8_PRIV_KEY_INFO_bio(bio.get(), nullptr)); + if (!p8inf) { + return ParseKeyResult(PKParseError::FAILED, ERR_peek_error()); + } + return keyOrError(EVPKeyPointer(EVP_PKCS82PKEY(p8inf.get()))); + } + case PKEncodingType::SEC1: { + auto key = d2i_PrivateKey_bio(bio.get(), nullptr); + return keyOrError(EVPKeyPointer(key)); + } + default: { + return ParseKeyResult(PKParseError::FAILED, ERR_peek_error()); + } + }; +} + } // namespace ncrypto diff --git a/deps/ncrypto/ncrypto.h b/deps/ncrypto/ncrypto.h index 60bfce3ea8999e..20b69dc67b13fd 100644 --- a/deps/ncrypto/ncrypto.h +++ b/deps/ncrypto/ncrypto.h @@ -148,7 +148,7 @@ class ClearErrorOnReturn final { NCRYPTO_DISALLOW_COPY_AND_MOVE(ClearErrorOnReturn) NCRYPTO_DISALLOW_NEW_DELETE() - int peeKError(); + int peekError(); private: CryptoErrorList* errors_; @@ -172,12 +172,20 @@ class MarkPopErrorOnReturn final { CryptoErrorList* errors_; }; +// TODO(@jasnell): Eventually replace with std::expected when we are able to +// bump up to c++23. template struct Result final { + const bool has_value; T value; - std::optional error; - Result(T&& value) : value(std::move(value)) {} - Result(E&& error) : error(std::move(error)) {} + std::optional error = std::nullopt; + std::optional openssl_error = std::nullopt; + Result(T&& value) : has_value(true), value(std::move(value)) {} + Result(E&& error, std::optional openssl_error = std::nullopt) + : has_value(false), + error(std::move(error)), + openssl_error(std::move(openssl_error)) {} + inline operator bool() const { return has_value; } }; // ============================================================================ @@ -202,7 +210,6 @@ using ECGroupPointer = DeleteFnPtr; using ECKeyPointer = DeleteFnPtr; using ECPointPointer = DeleteFnPtr; using EVPKeyCtxPointer = DeleteFnPtr; -using EVPKeyPointer = DeleteFnPtr; using EVPMDCtxPointer = DeleteFnPtr; using HMACCtxPointer = DeleteFnPtr; using NetscapeSPKIPointer = DeleteFnPtr; @@ -252,9 +259,10 @@ class DataPointer final { Buffer release(); // Returns a Buffer struct that is a view of the underlying data. - inline operator const Buffer() const { + template + inline operator const Buffer() const { return { - .data = data_, + .data = static_cast(data_), .len = len_, }; } @@ -273,6 +281,11 @@ class BIOPointer final { static BIOPointer NewFile(std::string_view filename, std::string_view mode); static BIOPointer NewFp(FILE* fd, int flags); + template + static BIOPointer New(const Buffer& buf) { + return New(buf.data, buf.len); + } + BIOPointer() = default; BIOPointer(std::nullptr_t) : bio_(nullptr) {} explicit BIOPointer(BIO* bio); @@ -359,6 +372,86 @@ class BignumPointer final { DeleteFnPtr bn_; }; +class EVPKeyPointer final { +public: + static EVPKeyPointer New(); + static EVPKeyPointer NewRawPublic(int id, const Buffer& data); + static EVPKeyPointer NewRawPrivate(int id, const Buffer& data); + + enum class PKEncodingType { + // RSAPublicKey / RSAPrivateKey according to PKCS#1. + PKCS1, + // PrivateKeyInfo or EncryptedPrivateKeyInfo according to PKCS#8. + PKCS8, + // SubjectPublicKeyInfo according to X.509. + SPKI, + // ECPrivateKey according to SEC1. + SEC1 + }; + + enum class PKFormatType { + DER, + PEM, + JWK + }; + + enum class PKParseError { + NOT_RECOGNIZED, + NEED_PASSPHRASE, + FAILED + }; + using ParseKeyResult = Result; + + static ParseKeyResult TryParsePublicKey( + PKFormatType format, + PKEncodingType encoding, + const Buffer& buffer); + + static ParseKeyResult TryParsePublicKeyPEM( + const Buffer& buffer); + + static ParseKeyResult TryParsePrivateKey( + PKFormatType format, + PKEncodingType encoding, + std::optional> passphrase, + const Buffer& buffer); + + EVPKeyPointer() = default; + explicit EVPKeyPointer(EVP_PKEY* pkey); + EVPKeyPointer(EVPKeyPointer&& other) noexcept; + EVPKeyPointer& operator=(EVPKeyPointer&& other) noexcept; + NCRYPTO_DISALLOW_COPY(EVPKeyPointer) + ~EVPKeyPointer(); + + inline bool operator==(std::nullptr_t) const noexcept { return pkey_ == nullptr; } + inline operator bool() const { return pkey_ != nullptr; } + inline EVP_PKEY* get() const { return pkey_.get(); } + void reset(EVP_PKEY* pkey = nullptr); + EVP_PKEY* release(); + + static int id(const EVP_PKEY* key); + static int base_id(const EVP_PKEY* key); + + int id() const; + int base_id() const; + int bits() const; + size_t size() const; + + size_t rawPublicKeySize() const; + size_t rawPrivateKeySize() const; + DataPointer rawPublicKey() const; + DataPointer rawPrivateKey() const; + + BIOPointer derPublicKey() const; + + EVPKeyCtxPointer newCtx() const; + + static bool IsRSAPrivateKey(const Buffer& buffer); + +private: + DeleteFnPtr pkey_; +}; + class DHPointer final { public: @@ -462,6 +555,8 @@ class X509View final { BIOPointer getInfoAccess() const; BIOPointer getValidFrom() const; BIOPointer getValidTo() const; + int64_t getValidFromTime() const; + int64_t getValidToTime() const; DataPointer getSerialNumber() const; Result getPublicKey() const; StackOfASN1 getKeyUsage() const; diff --git a/deps/ngtcp2/nghttp3/lib/nghttp3_ringbuf.c b/deps/ngtcp2/nghttp3/lib/nghttp3_ringbuf.c index 61a7d06cad306f..38b5460837190b 100644 --- a/deps/ngtcp2/nghttp3/lib/nghttp3_ringbuf.c +++ b/deps/ngtcp2/nghttp3/lib/nghttp3_ringbuf.c @@ -33,7 +33,7 @@ #include "nghttp3_macro.h" -#if defined(_MSC_VER) && !defined(__clang__) && \ +#if defined(_MSC_VER) && _MSC_VER < 1941 && !defined(__clang__) && \ (defined(_M_ARM) || defined(_M_ARM64)) unsigned int __popcnt(unsigned int x) { unsigned int c = 0; diff --git a/deps/ngtcp2/ngtcp2/lib/ngtcp2_ringbuf.c b/deps/ngtcp2/ngtcp2/lib/ngtcp2_ringbuf.c index c381c231276d34..ecfdeb63b3485b 100644 --- a/deps/ngtcp2/ngtcp2/lib/ngtcp2_ringbuf.c +++ b/deps/ngtcp2/ngtcp2/lib/ngtcp2_ringbuf.c @@ -31,7 +31,7 @@ #include "ngtcp2_macro.h" -#if defined(_MSC_VER) && !defined(__clang__) && \ +#if defined(_MSC_VER) && _MSC_VER < 1941 && !defined(__clang__) && \ (defined(_M_ARM) || defined(_M_ARM64)) static unsigned int __popcnt(unsigned int x) { unsigned int c = 0; diff --git a/deps/npm/docs/content/commands/npm-init.md b/deps/npm/docs/content/commands/npm-init.md index c4b39e6397019c..870f846761e4e3 100644 --- a/deps/npm/docs/content/commands/npm-init.md +++ b/deps/npm/docs/content/commands/npm-init.md @@ -7,7 +7,7 @@ description: Create a package.json file ### Synopsis ```bash -npm init (same as `npx `) +npm init (same as `npx create-`) npm init <@scope> (same as `npx <@scope>/create`) aliases: create, innit diff --git a/deps/npm/docs/content/commands/npm-install.md b/deps/npm/docs/content/commands/npm-install.md index 3ec3c0f3c12896..ec933394b1a726 100644 --- a/deps/npm/docs/content/commands/npm-install.md +++ b/deps/npm/docs/content/commands/npm-install.md @@ -178,10 +178,6 @@ into a tarball (b). npm install ansi-regex --save-bundle ``` - **Note**: If there is a file or folder named `` in the current - working directory, then it will try to install that, and only try to - fetch the package by name if it is not valid. - * `npm install @npm:`: Install a package under a custom alias. Allows multiple versions of diff --git a/deps/npm/docs/content/commands/npm-ls.md b/deps/npm/docs/content/commands/npm-ls.md index 59e5b281ef1155..3abedf9cd8e4e6 100644 --- a/deps/npm/docs/content/commands/npm-ls.md +++ b/deps/npm/docs/content/commands/npm-ls.md @@ -27,7 +27,7 @@ packages will *also* show the paths to the specified packages. For example, running `npm ls promzard` in npm's source tree will show: ```bash -npm@10.8.3 /path/to/npm +npm@10.9.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 ``` diff --git a/deps/npm/docs/content/commands/npm.md b/deps/npm/docs/content/commands/npm.md index 52ad44d60a6db9..d92892ec544eb9 100644 --- a/deps/npm/docs/content/commands/npm.md +++ b/deps/npm/docs/content/commands/npm.md @@ -14,7 +14,7 @@ Note: This command is unaware of workspaces. ### Version -10.8.3 +10.9.0 ### Description diff --git a/deps/npm/docs/content/configuring-npm/package-json.md b/deps/npm/docs/content/configuring-npm/package-json.md index a843b974b0ed55..7e9daf1317717f 100644 --- a/deps/npm/docs/content/configuring-npm/package-json.md +++ b/deps/npm/docs/content/configuring-npm/package-json.md @@ -337,6 +337,10 @@ the `files` globs. Exceptions to this are: These can not be included. +### exports + +The "exports" provides a modern alternative to "main" allowing multiple entry points to be defined, conditional entry resolution support between environments, and preventing any other entry points besides those defined in "exports". This encapsulation allows module authors to clearly define the public interface for their package. For more details see the [node.js documentation on package entry points](https://nodejs.org/api/packages.html#package-entry-points) + ### main The main field is a module ID that is the primary entry point to your @@ -1125,6 +1129,32 @@ Like the `os` option, you can also block architectures: The host architecture is determined by `process.arch` +### devEngines + +The `devEngines` field aids engineers working on a codebase to all be using the same tooling. + +You can specify a `devEngines` property in your `package.json` which will run before `install`, `ci`, and `run` commands. + +> Note: `engines` and `devEngines` differ in object shape. They also function very differently. `engines` is designed to alert the user when a dependency uses a differening npm or node version that the project it's being used in, whereas `devEngines` is used to alert people interacting with the source code of a project. + +The supported keys under the `devEngines` property are `cpu`, `os`, `libc`, `runtime`, and `packageManager`. Each property can be an object or an array of objects. Objects must contain `name`, and optionally can specify `version`, and `onFail`. `onFail` can be `warn`, `error`, or `ignore`, and if left undefined is of the same value as `error`. `npm` will assume that you're running with `node`. +Here's an example of a project that will fail if the environment is not `node` and `npm`. If you set `runtime.name` or `packageManager.name` to any other string, it will fail within the npm CLI. + +```json +{ + "devEngines": { + "runtime": { + "name": "node", + "onFail": "error" + }, + "packageManager": { + "name": "npm", + "onFail": "error" + } + } +} +``` + ### private If you set `"private": true` in your package.json, then npm will refuse to diff --git a/deps/npm/docs/output/commands/npm-access.html b/deps/npm/docs/output/commands/npm-access.html index 76bf07b8473ee4..0a839663fcb016 100644 --- a/deps/npm/docs/output/commands/npm-access.html +++ b/deps/npm/docs/output/commands/npm-access.html @@ -141,9 +141,9 @@
-

+

npm-access - @10.8.3 + @10.9.0

Set access level on published packages
diff --git a/deps/npm/docs/output/commands/npm-adduser.html b/deps/npm/docs/output/commands/npm-adduser.html index 8cf2e1abf4bf55..2ac615ad13978d 100644 --- a/deps/npm/docs/output/commands/npm-adduser.html +++ b/deps/npm/docs/output/commands/npm-adduser.html @@ -141,9 +141,9 @@
-

+

npm-adduser - @10.8.3 + @10.9.0

Add a registry user account
diff --git a/deps/npm/docs/output/commands/npm-audit.html b/deps/npm/docs/output/commands/npm-audit.html index 7d53101a2f37b8..d7374ccfa66089 100644 --- a/deps/npm/docs/output/commands/npm-audit.html +++ b/deps/npm/docs/output/commands/npm-audit.html @@ -141,9 +141,9 @@
-

+

npm-audit - @10.8.3 + @10.9.0

Run a security audit
diff --git a/deps/npm/docs/output/commands/npm-bugs.html b/deps/npm/docs/output/commands/npm-bugs.html index d9e5cf60b6776d..9186bc100ae8f6 100644 --- a/deps/npm/docs/output/commands/npm-bugs.html +++ b/deps/npm/docs/output/commands/npm-bugs.html @@ -141,9 +141,9 @@
-

+

npm-bugs - @10.8.3 + @10.9.0

Report bugs for a package in a web browser
diff --git a/deps/npm/docs/output/commands/npm-cache.html b/deps/npm/docs/output/commands/npm-cache.html index 21f2cc68bcc6ce..55cec217dbdfb7 100644 --- a/deps/npm/docs/output/commands/npm-cache.html +++ b/deps/npm/docs/output/commands/npm-cache.html @@ -141,9 +141,9 @@
-

+

npm-cache - @10.8.3 + @10.9.0

Manipulates packages cache
diff --git a/deps/npm/docs/output/commands/npm-ci.html b/deps/npm/docs/output/commands/npm-ci.html index 9f645ecfb59c44..efe858777565a9 100644 --- a/deps/npm/docs/output/commands/npm-ci.html +++ b/deps/npm/docs/output/commands/npm-ci.html @@ -141,9 +141,9 @@
-

+

npm-ci - @10.8.3 + @10.9.0

Clean install a project
diff --git a/deps/npm/docs/output/commands/npm-completion.html b/deps/npm/docs/output/commands/npm-completion.html index d73c648bfa391f..4a91a94498f33b 100644 --- a/deps/npm/docs/output/commands/npm-completion.html +++ b/deps/npm/docs/output/commands/npm-completion.html @@ -141,9 +141,9 @@
-

+

npm-completion - @10.8.3 + @10.9.0

Tab Completion for npm
diff --git a/deps/npm/docs/output/commands/npm-config.html b/deps/npm/docs/output/commands/npm-config.html index 4e5c70886388ec..d18998fea8471d 100644 --- a/deps/npm/docs/output/commands/npm-config.html +++ b/deps/npm/docs/output/commands/npm-config.html @@ -141,9 +141,9 @@
-

+

npm-config - @10.8.3 + @10.9.0

Manage the npm configuration files
diff --git a/deps/npm/docs/output/commands/npm-dedupe.html b/deps/npm/docs/output/commands/npm-dedupe.html index f4c92e79f8d59c..194ea085383df3 100644 --- a/deps/npm/docs/output/commands/npm-dedupe.html +++ b/deps/npm/docs/output/commands/npm-dedupe.html @@ -141,9 +141,9 @@
-

+

npm-dedupe - @10.8.3 + @10.9.0

Reduce duplication in the package tree
diff --git a/deps/npm/docs/output/commands/npm-deprecate.html b/deps/npm/docs/output/commands/npm-deprecate.html index c644650427aaff..ae40adfbbab051 100644 --- a/deps/npm/docs/output/commands/npm-deprecate.html +++ b/deps/npm/docs/output/commands/npm-deprecate.html @@ -141,9 +141,9 @@
-

+

npm-deprecate - @10.8.3 + @10.9.0

Deprecate a version of a package
diff --git a/deps/npm/docs/output/commands/npm-diff.html b/deps/npm/docs/output/commands/npm-diff.html index 6e23a06faa413c..257b1c21572083 100644 --- a/deps/npm/docs/output/commands/npm-diff.html +++ b/deps/npm/docs/output/commands/npm-diff.html @@ -141,9 +141,9 @@
-

+

npm-diff - @10.8.3 + @10.9.0

The registry diff command
diff --git a/deps/npm/docs/output/commands/npm-dist-tag.html b/deps/npm/docs/output/commands/npm-dist-tag.html index 2e0ea2b927c3d9..08e7770a0c1745 100644 --- a/deps/npm/docs/output/commands/npm-dist-tag.html +++ b/deps/npm/docs/output/commands/npm-dist-tag.html @@ -141,9 +141,9 @@
-

+

npm-dist-tag - @10.8.3 + @10.9.0

Modify package distribution tags
diff --git a/deps/npm/docs/output/commands/npm-docs.html b/deps/npm/docs/output/commands/npm-docs.html index e5108efb32b0e3..8b647251f36740 100644 --- a/deps/npm/docs/output/commands/npm-docs.html +++ b/deps/npm/docs/output/commands/npm-docs.html @@ -141,9 +141,9 @@
-

+

npm-docs - @10.8.3 + @10.9.0

Open documentation for a package in a web browser
diff --git a/deps/npm/docs/output/commands/npm-doctor.html b/deps/npm/docs/output/commands/npm-doctor.html index 6ec69164f8d3e4..110ec2f9b20bf0 100644 --- a/deps/npm/docs/output/commands/npm-doctor.html +++ b/deps/npm/docs/output/commands/npm-doctor.html @@ -141,9 +141,9 @@
-

+

npm-doctor - @10.8.3 + @10.9.0

Check the health of your npm environment
diff --git a/deps/npm/docs/output/commands/npm-edit.html b/deps/npm/docs/output/commands/npm-edit.html index 3b0e72471738dc..9e4f7b361a5cb6 100644 --- a/deps/npm/docs/output/commands/npm-edit.html +++ b/deps/npm/docs/output/commands/npm-edit.html @@ -141,9 +141,9 @@
-

+

npm-edit - @10.8.3 + @10.9.0

Edit an installed package
diff --git a/deps/npm/docs/output/commands/npm-exec.html b/deps/npm/docs/output/commands/npm-exec.html index 71087fefd84709..695fa35ab825c2 100644 --- a/deps/npm/docs/output/commands/npm-exec.html +++ b/deps/npm/docs/output/commands/npm-exec.html @@ -141,9 +141,9 @@
-

+

npm-exec - @10.8.3 + @10.9.0

Run a command from a local or remote npm package
diff --git a/deps/npm/docs/output/commands/npm-explain.html b/deps/npm/docs/output/commands/npm-explain.html index dcc6a94838aaa5..e79255ff5fa6c6 100644 --- a/deps/npm/docs/output/commands/npm-explain.html +++ b/deps/npm/docs/output/commands/npm-explain.html @@ -141,9 +141,9 @@
-

+

npm-explain - @10.8.3 + @10.9.0

Explain installed packages
diff --git a/deps/npm/docs/output/commands/npm-explore.html b/deps/npm/docs/output/commands/npm-explore.html index 783de27e7dd87c..e296f4146aff98 100644 --- a/deps/npm/docs/output/commands/npm-explore.html +++ b/deps/npm/docs/output/commands/npm-explore.html @@ -141,9 +141,9 @@
-

+

npm-explore - @10.8.3 + @10.9.0

Browse an installed package
diff --git a/deps/npm/docs/output/commands/npm-find-dupes.html b/deps/npm/docs/output/commands/npm-find-dupes.html index fb1badb1476c27..a8c914a0dd3d44 100644 --- a/deps/npm/docs/output/commands/npm-find-dupes.html +++ b/deps/npm/docs/output/commands/npm-find-dupes.html @@ -141,9 +141,9 @@
-

+

npm-find-dupes - @10.8.3 + @10.9.0

Find duplication in the package tree
diff --git a/deps/npm/docs/output/commands/npm-fund.html b/deps/npm/docs/output/commands/npm-fund.html index 9e597d085c0c34..36a63253439b60 100644 --- a/deps/npm/docs/output/commands/npm-fund.html +++ b/deps/npm/docs/output/commands/npm-fund.html @@ -141,9 +141,9 @@
-

+

npm-fund - @10.8.3 + @10.9.0

Retrieve funding information
diff --git a/deps/npm/docs/output/commands/npm-help-search.html b/deps/npm/docs/output/commands/npm-help-search.html index 0b521d6de2ab1f..76dea45d852e75 100644 --- a/deps/npm/docs/output/commands/npm-help-search.html +++ b/deps/npm/docs/output/commands/npm-help-search.html @@ -141,9 +141,9 @@
-

+

npm-help-search - @10.8.3 + @10.9.0

Search npm help documentation
diff --git a/deps/npm/docs/output/commands/npm-help.html b/deps/npm/docs/output/commands/npm-help.html index dafcf20f7bc08b..e6b14af2f9ec6c 100644 --- a/deps/npm/docs/output/commands/npm-help.html +++ b/deps/npm/docs/output/commands/npm-help.html @@ -141,9 +141,9 @@
-

+

npm-help - @10.8.3 + @10.9.0

Get help on npm
diff --git a/deps/npm/docs/output/commands/npm-hook.html b/deps/npm/docs/output/commands/npm-hook.html index 953ce47aca7c80..393700a9a7165a 100644 --- a/deps/npm/docs/output/commands/npm-hook.html +++ b/deps/npm/docs/output/commands/npm-hook.html @@ -141,9 +141,9 @@
-

+

npm-hook - @10.8.3 + @10.9.0

Manage registry hooks
diff --git a/deps/npm/docs/output/commands/npm-init.html b/deps/npm/docs/output/commands/npm-init.html index 305e34838072dd..8ff01b2f7a76d0 100644 --- a/deps/npm/docs/output/commands/npm-init.html +++ b/deps/npm/docs/output/commands/npm-init.html @@ -141,9 +141,9 @@
-

+

npm-init - @10.8.3 + @10.9.0

Create a package.json file
@@ -154,7 +154,7 @@

Table of contents

Synopsis

-
npm init <package-spec> (same as `npx <package-spec>`)
+
npm init <package-spec> (same as `npx create-<package-spec>`)
 npm init <@scope> (same as `npx <@scope>/create`)
 
 aliases: create, innit
diff --git a/deps/npm/docs/output/commands/npm-install-ci-test.html b/deps/npm/docs/output/commands/npm-install-ci-test.html
index a25a150d655e86..b0f9d237ed8e98 100644
--- a/deps/npm/docs/output/commands/npm-install-ci-test.html
+++ b/deps/npm/docs/output/commands/npm-install-ci-test.html
@@ -141,9 +141,9 @@
 
 
-

+

npm-install-ci-test - @10.8.3 + @10.9.0

Install a project with a clean slate and run tests
diff --git a/deps/npm/docs/output/commands/npm-install-test.html b/deps/npm/docs/output/commands/npm-install-test.html index 1a36de01dcff6f..b0fbd63887fff5 100644 --- a/deps/npm/docs/output/commands/npm-install-test.html +++ b/deps/npm/docs/output/commands/npm-install-test.html @@ -141,9 +141,9 @@
-

+

npm-install-test - @10.8.3 + @10.9.0

Install package(s) and run tests
diff --git a/deps/npm/docs/output/commands/npm-install.html b/deps/npm/docs/output/commands/npm-install.html index 858c325cd833b7..fa57e02eaf9ad5 100644 --- a/deps/npm/docs/output/commands/npm-install.html +++ b/deps/npm/docs/output/commands/npm-install.html @@ -141,9 +141,9 @@
-

+

npm-install - @10.8.3 + @10.9.0

Install a package
@@ -309,9 +309,6 @@

Description

npm install readable-stream --save-exact npm install ansi-regex --save-bundle
-

Note: If there is a file or folder named <name> in the current -working directory, then it will try to install that, and only try to -fetch the package by name if it is not valid.

  • npm install <alias>@npm:<name>:

    diff --git a/deps/npm/docs/output/commands/npm-link.html b/deps/npm/docs/output/commands/npm-link.html index c6738b85254188..4e461ebefafd42 100644 --- a/deps/npm/docs/output/commands/npm-link.html +++ b/deps/npm/docs/output/commands/npm-link.html @@ -141,9 +141,9 @@
    -

    +

    npm-link - @10.8.3 + @10.9.0

    Symlink a package folder
    diff --git a/deps/npm/docs/output/commands/npm-login.html b/deps/npm/docs/output/commands/npm-login.html index bc8e4684ea5363..9c1584ca36bc41 100644 --- a/deps/npm/docs/output/commands/npm-login.html +++ b/deps/npm/docs/output/commands/npm-login.html @@ -141,9 +141,9 @@
    -

    +

    npm-login - @10.8.3 + @10.9.0

    Login to a registry user account
    diff --git a/deps/npm/docs/output/commands/npm-logout.html b/deps/npm/docs/output/commands/npm-logout.html index 5b48e25a8245b0..8908b329395254 100644 --- a/deps/npm/docs/output/commands/npm-logout.html +++ b/deps/npm/docs/output/commands/npm-logout.html @@ -141,9 +141,9 @@
    -

    +

    npm-logout - @10.8.3 + @10.9.0

    Log out of the registry
    diff --git a/deps/npm/docs/output/commands/npm-ls.html b/deps/npm/docs/output/commands/npm-ls.html index c1c5cd0c9e779f..2615a492a75e38 100644 --- a/deps/npm/docs/output/commands/npm-ls.html +++ b/deps/npm/docs/output/commands/npm-ls.html @@ -141,9 +141,9 @@
    -

    +

    npm-ls - @10.8.3 + @10.9.0

    List installed packages
    @@ -168,7 +168,7 @@

    Description

    the results to only the paths to the packages named. Note that nested packages will also show the paths to the specified packages. For example, running npm ls promzard in npm's source tree will show:

    -
    npm@10.8.3 /path/to/npm
    +
    npm@10.9.0 /path/to/npm
     └─┬ init-package-json@0.0.4
       └── promzard@0.1.5
     
    diff --git a/deps/npm/docs/output/commands/npm-org.html b/deps/npm/docs/output/commands/npm-org.html index 9f97a53bf5cb34..66d823f7f3c2de 100644 --- a/deps/npm/docs/output/commands/npm-org.html +++ b/deps/npm/docs/output/commands/npm-org.html @@ -141,9 +141,9 @@
    -

    +

    npm-org - @10.8.3 + @10.9.0

    Manage orgs
    diff --git a/deps/npm/docs/output/commands/npm-outdated.html b/deps/npm/docs/output/commands/npm-outdated.html index db933f402e923f..6002ff1818da2c 100644 --- a/deps/npm/docs/output/commands/npm-outdated.html +++ b/deps/npm/docs/output/commands/npm-outdated.html @@ -141,9 +141,9 @@
    -

    +

    npm-outdated - @10.8.3 + @10.9.0

    Check for outdated packages
    diff --git a/deps/npm/docs/output/commands/npm-owner.html b/deps/npm/docs/output/commands/npm-owner.html index 7b3d111efad61a..e8b7a05274aa91 100644 --- a/deps/npm/docs/output/commands/npm-owner.html +++ b/deps/npm/docs/output/commands/npm-owner.html @@ -141,9 +141,9 @@
    -

    +

    npm-owner - @10.8.3 + @10.9.0

    Manage package owners
    diff --git a/deps/npm/docs/output/commands/npm-pack.html b/deps/npm/docs/output/commands/npm-pack.html index a39574805ca1a1..596ba9a35e3ac5 100644 --- a/deps/npm/docs/output/commands/npm-pack.html +++ b/deps/npm/docs/output/commands/npm-pack.html @@ -141,9 +141,9 @@
    -

    +

    npm-pack - @10.8.3 + @10.9.0

    Create a tarball from a package
    diff --git a/deps/npm/docs/output/commands/npm-ping.html b/deps/npm/docs/output/commands/npm-ping.html index bb052270cf3f88..de393dbd0b5d71 100644 --- a/deps/npm/docs/output/commands/npm-ping.html +++ b/deps/npm/docs/output/commands/npm-ping.html @@ -141,9 +141,9 @@
    -

    +

    npm-ping - @10.8.3 + @10.9.0

    Ping npm registry
    diff --git a/deps/npm/docs/output/commands/npm-pkg.html b/deps/npm/docs/output/commands/npm-pkg.html index 86c5ad73917db5..ffb153cc440ce4 100644 --- a/deps/npm/docs/output/commands/npm-pkg.html +++ b/deps/npm/docs/output/commands/npm-pkg.html @@ -141,9 +141,9 @@
    -

    +

    npm-pkg - @10.8.3 + @10.9.0

    Manages your package.json
    diff --git a/deps/npm/docs/output/commands/npm-prefix.html b/deps/npm/docs/output/commands/npm-prefix.html index d16200830c3a3a..da63644f2df42a 100644 --- a/deps/npm/docs/output/commands/npm-prefix.html +++ b/deps/npm/docs/output/commands/npm-prefix.html @@ -141,9 +141,9 @@
    -

    +

    npm-prefix - @10.8.3 + @10.9.0

    Display prefix
    diff --git a/deps/npm/docs/output/commands/npm-profile.html b/deps/npm/docs/output/commands/npm-profile.html index ac0c7206cbaa46..c3679196a85bc4 100644 --- a/deps/npm/docs/output/commands/npm-profile.html +++ b/deps/npm/docs/output/commands/npm-profile.html @@ -141,9 +141,9 @@
    -

    +

    npm-profile - @10.8.3 + @10.9.0

    Change settings on your registry profile
    diff --git a/deps/npm/docs/output/commands/npm-prune.html b/deps/npm/docs/output/commands/npm-prune.html index ea1976ed852ca7..4e844ab9f02cbb 100644 --- a/deps/npm/docs/output/commands/npm-prune.html +++ b/deps/npm/docs/output/commands/npm-prune.html @@ -141,9 +141,9 @@
    -

    +

    npm-prune - @10.8.3 + @10.9.0

    Remove extraneous packages
    diff --git a/deps/npm/docs/output/commands/npm-publish.html b/deps/npm/docs/output/commands/npm-publish.html index 89d5ac62ec1fac..b808cc29a15744 100644 --- a/deps/npm/docs/output/commands/npm-publish.html +++ b/deps/npm/docs/output/commands/npm-publish.html @@ -141,9 +141,9 @@
    -

    +

    npm-publish - @10.8.3 + @10.9.0

    Publish a package
    diff --git a/deps/npm/docs/output/commands/npm-query.html b/deps/npm/docs/output/commands/npm-query.html index 90c75bccd61189..d85a2f6c27ff79 100644 --- a/deps/npm/docs/output/commands/npm-query.html +++ b/deps/npm/docs/output/commands/npm-query.html @@ -141,9 +141,9 @@
    -

    +

    npm-query - @10.8.3 + @10.9.0

    Dependency selector query
    diff --git a/deps/npm/docs/output/commands/npm-rebuild.html b/deps/npm/docs/output/commands/npm-rebuild.html index 6dbb7f9c12e42d..ff28b35b86bced 100644 --- a/deps/npm/docs/output/commands/npm-rebuild.html +++ b/deps/npm/docs/output/commands/npm-rebuild.html @@ -141,9 +141,9 @@
    -

    +

    npm-rebuild - @10.8.3 + @10.9.0

    Rebuild a package
    diff --git a/deps/npm/docs/output/commands/npm-repo.html b/deps/npm/docs/output/commands/npm-repo.html index c31847aeca0622..8ee7e9d6d2d074 100644 --- a/deps/npm/docs/output/commands/npm-repo.html +++ b/deps/npm/docs/output/commands/npm-repo.html @@ -141,9 +141,9 @@
    -

    +

    npm-repo - @10.8.3 + @10.9.0

    Open package repository page in the browser
    diff --git a/deps/npm/docs/output/commands/npm-restart.html b/deps/npm/docs/output/commands/npm-restart.html index 6d0b29454a77be..e3171bf280910b 100644 --- a/deps/npm/docs/output/commands/npm-restart.html +++ b/deps/npm/docs/output/commands/npm-restart.html @@ -141,9 +141,9 @@
    -

    +

    npm-restart - @10.8.3 + @10.9.0

    Restart a package
    diff --git a/deps/npm/docs/output/commands/npm-root.html b/deps/npm/docs/output/commands/npm-root.html index 670cf25a731134..4e2e82e5bb259c 100644 --- a/deps/npm/docs/output/commands/npm-root.html +++ b/deps/npm/docs/output/commands/npm-root.html @@ -141,9 +141,9 @@
    -

    +

    npm-root - @10.8.3 + @10.9.0

    Display npm root
    diff --git a/deps/npm/docs/output/commands/npm-run-script.html b/deps/npm/docs/output/commands/npm-run-script.html index bcabcc0bfec01f..4673e733f49fcd 100644 --- a/deps/npm/docs/output/commands/npm-run-script.html +++ b/deps/npm/docs/output/commands/npm-run-script.html @@ -141,9 +141,9 @@
    -

    +

    npm-run-script - @10.8.3 + @10.9.0

    Run arbitrary package scripts
    diff --git a/deps/npm/docs/output/commands/npm-sbom.html b/deps/npm/docs/output/commands/npm-sbom.html index f95f0992fb5cd1..00508eca6cd91a 100644 --- a/deps/npm/docs/output/commands/npm-sbom.html +++ b/deps/npm/docs/output/commands/npm-sbom.html @@ -141,9 +141,9 @@
    -

    +

    npm-sbom - @10.8.3 + @10.9.0

    Generate a Software Bill of Materials (SBOM)
    diff --git a/deps/npm/docs/output/commands/npm-search.html b/deps/npm/docs/output/commands/npm-search.html index ba4b81381b0b38..edf4e437e65400 100644 --- a/deps/npm/docs/output/commands/npm-search.html +++ b/deps/npm/docs/output/commands/npm-search.html @@ -141,9 +141,9 @@
    -

    +

    npm-search - @10.8.3 + @10.9.0

    Search for packages
    diff --git a/deps/npm/docs/output/commands/npm-shrinkwrap.html b/deps/npm/docs/output/commands/npm-shrinkwrap.html index c143822f846db7..f225abaed9218f 100644 --- a/deps/npm/docs/output/commands/npm-shrinkwrap.html +++ b/deps/npm/docs/output/commands/npm-shrinkwrap.html @@ -141,9 +141,9 @@
    -

    +

    npm-shrinkwrap - @10.8.3 + @10.9.0

    Lock down dependency versions for publication
    diff --git a/deps/npm/docs/output/commands/npm-star.html b/deps/npm/docs/output/commands/npm-star.html index e634b8686c2e7a..04b36628ad2176 100644 --- a/deps/npm/docs/output/commands/npm-star.html +++ b/deps/npm/docs/output/commands/npm-star.html @@ -141,9 +141,9 @@
    -

    +

    npm-star - @10.8.3 + @10.9.0

    Mark your favorite packages
    diff --git a/deps/npm/docs/output/commands/npm-stars.html b/deps/npm/docs/output/commands/npm-stars.html index 0986e3a60f6acd..3183aa047f1f89 100644 --- a/deps/npm/docs/output/commands/npm-stars.html +++ b/deps/npm/docs/output/commands/npm-stars.html @@ -141,9 +141,9 @@
    -

    +

    npm-stars - @10.8.3 + @10.9.0

    View packages marked as favorites
    diff --git a/deps/npm/docs/output/commands/npm-start.html b/deps/npm/docs/output/commands/npm-start.html index 0879146131ed6e..b81caefe0a4431 100644 --- a/deps/npm/docs/output/commands/npm-start.html +++ b/deps/npm/docs/output/commands/npm-start.html @@ -141,9 +141,9 @@
    -

    +

    npm-start - @10.8.3 + @10.9.0

    Start a package
    diff --git a/deps/npm/docs/output/commands/npm-stop.html b/deps/npm/docs/output/commands/npm-stop.html index ea3001a24bdcab..85d4c782a736e7 100644 --- a/deps/npm/docs/output/commands/npm-stop.html +++ b/deps/npm/docs/output/commands/npm-stop.html @@ -141,9 +141,9 @@
    -

    +

    npm-stop - @10.8.3 + @10.9.0

    Stop a package
    diff --git a/deps/npm/docs/output/commands/npm-team.html b/deps/npm/docs/output/commands/npm-team.html index 2cad22ffbd4fe2..6ad869fa3bed9e 100644 --- a/deps/npm/docs/output/commands/npm-team.html +++ b/deps/npm/docs/output/commands/npm-team.html @@ -141,9 +141,9 @@
    -

    +

    npm-team - @10.8.3 + @10.9.0

    Manage organization teams and team memberships
    diff --git a/deps/npm/docs/output/commands/npm-test.html b/deps/npm/docs/output/commands/npm-test.html index ab7431a1b0a8b0..bce16c92e4f087 100644 --- a/deps/npm/docs/output/commands/npm-test.html +++ b/deps/npm/docs/output/commands/npm-test.html @@ -141,9 +141,9 @@
    -

    +

    npm-test - @10.8.3 + @10.9.0

    Test a package
    diff --git a/deps/npm/docs/output/commands/npm-token.html b/deps/npm/docs/output/commands/npm-token.html index ca7df2869ec714..99fcafd9b8f53b 100644 --- a/deps/npm/docs/output/commands/npm-token.html +++ b/deps/npm/docs/output/commands/npm-token.html @@ -141,9 +141,9 @@
    -

    +

    npm-token - @10.8.3 + @10.9.0

    Manage your authentication tokens
    diff --git a/deps/npm/docs/output/commands/npm-uninstall.html b/deps/npm/docs/output/commands/npm-uninstall.html index ea964c96e07073..dd8e7a2234604e 100644 --- a/deps/npm/docs/output/commands/npm-uninstall.html +++ b/deps/npm/docs/output/commands/npm-uninstall.html @@ -141,9 +141,9 @@
    -

    +

    npm-uninstall - @10.8.3 + @10.9.0

    Remove a package
    diff --git a/deps/npm/docs/output/commands/npm-unpublish.html b/deps/npm/docs/output/commands/npm-unpublish.html index 607049cf467cd5..56e2c59f62addb 100644 --- a/deps/npm/docs/output/commands/npm-unpublish.html +++ b/deps/npm/docs/output/commands/npm-unpublish.html @@ -141,9 +141,9 @@
    -

    +

    npm-unpublish - @10.8.3 + @10.9.0

    Remove a package from the registry
    diff --git a/deps/npm/docs/output/commands/npm-unstar.html b/deps/npm/docs/output/commands/npm-unstar.html index 15a72f6013482d..710d8a947d665f 100644 --- a/deps/npm/docs/output/commands/npm-unstar.html +++ b/deps/npm/docs/output/commands/npm-unstar.html @@ -141,9 +141,9 @@
    -

    +

    npm-unstar - @10.8.3 + @10.9.0

    Remove an item from your favorite packages
    diff --git a/deps/npm/docs/output/commands/npm-update.html b/deps/npm/docs/output/commands/npm-update.html index e38eb06ba4c8fc..e587ec92f8614a 100644 --- a/deps/npm/docs/output/commands/npm-update.html +++ b/deps/npm/docs/output/commands/npm-update.html @@ -141,9 +141,9 @@
    -

    +

    npm-update - @10.8.3 + @10.9.0

    Update packages
    diff --git a/deps/npm/docs/output/commands/npm-version.html b/deps/npm/docs/output/commands/npm-version.html index dfb31dd2c9821d..196a0236093673 100644 --- a/deps/npm/docs/output/commands/npm-version.html +++ b/deps/npm/docs/output/commands/npm-version.html @@ -141,9 +141,9 @@
    -

    +

    npm-version - @10.8.3 + @10.9.0

    Bump a package version
    diff --git a/deps/npm/docs/output/commands/npm-view.html b/deps/npm/docs/output/commands/npm-view.html index 8837e47e8f6867..0eebee3037748d 100644 --- a/deps/npm/docs/output/commands/npm-view.html +++ b/deps/npm/docs/output/commands/npm-view.html @@ -141,9 +141,9 @@
    -

    +

    npm-view - @10.8.3 + @10.9.0

    View registry info
    diff --git a/deps/npm/docs/output/commands/npm-whoami.html b/deps/npm/docs/output/commands/npm-whoami.html index f6dbcc3f3ae190..0d1fa5ea9b87d7 100644 --- a/deps/npm/docs/output/commands/npm-whoami.html +++ b/deps/npm/docs/output/commands/npm-whoami.html @@ -141,9 +141,9 @@
    -

    +

    npm-whoami - @10.8.3 + @10.9.0

    Display npm username
    diff --git a/deps/npm/docs/output/commands/npm.html b/deps/npm/docs/output/commands/npm.html index e9f96129c1571d..005009c1dba277 100644 --- a/deps/npm/docs/output/commands/npm.html +++ b/deps/npm/docs/output/commands/npm.html @@ -141,9 +141,9 @@
    -

    +

    npm - @10.8.3 + @10.9.0

    javascript package manager
    @@ -158,7 +158,7 @@

    Table of contents

    Note: This command is unaware of workspaces.

    Version

    -

    10.8.3

    +

    10.9.0

    Description

    npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency diff --git a/deps/npm/docs/output/commands/npx.html b/deps/npm/docs/output/commands/npx.html index 418fc957a78f24..d3239ed320bb70 100644 --- a/deps/npm/docs/output/commands/npx.html +++ b/deps/npm/docs/output/commands/npx.html @@ -141,9 +141,9 @@

    -

    +

    npx - @10.8.3 + @10.9.0

    Run a command from a local or remote npm package
    diff --git a/deps/npm/docs/output/configuring-npm/folders.html b/deps/npm/docs/output/configuring-npm/folders.html index 9bcbc0e5a36fc1..f23aef6354f73d 100644 --- a/deps/npm/docs/output/configuring-npm/folders.html +++ b/deps/npm/docs/output/configuring-npm/folders.html @@ -141,9 +141,9 @@
    -

    +

    folders - @10.8.3 + @10.9.0

    Folder Structures Used by npm
    diff --git a/deps/npm/docs/output/configuring-npm/install.html b/deps/npm/docs/output/configuring-npm/install.html index d0d46ffc693878..cddbcaae2de8be 100644 --- a/deps/npm/docs/output/configuring-npm/install.html +++ b/deps/npm/docs/output/configuring-npm/install.html @@ -141,9 +141,9 @@
    -

    +

    install - @10.8.3 + @10.9.0

    Download and install node and npm
    diff --git a/deps/npm/docs/output/configuring-npm/npm-global.html b/deps/npm/docs/output/configuring-npm/npm-global.html index 9bcbc0e5a36fc1..f23aef6354f73d 100644 --- a/deps/npm/docs/output/configuring-npm/npm-global.html +++ b/deps/npm/docs/output/configuring-npm/npm-global.html @@ -141,9 +141,9 @@
    -

    +

    folders - @10.8.3 + @10.9.0

    Folder Structures Used by npm
    diff --git a/deps/npm/docs/output/configuring-npm/npm-json.html b/deps/npm/docs/output/configuring-npm/npm-json.html index dfdc917fc21244..0cd27f8d82fdd8 100644 --- a/deps/npm/docs/output/configuring-npm/npm-json.html +++ b/deps/npm/docs/output/configuring-npm/npm-json.html @@ -141,16 +141,16 @@
    -

    +

    package.json - @10.8.3 + @10.9.0

    Specifics of npm's package.json handling

    Table of contents

    - +

    Description

    @@ -411,6 +411,8 @@

    files

  • yarn.lock
  • These can not be included.

    +

    exports

    +

    The "exports" provides a modern alternative to "main" allowing multiple entry points to be defined, conditional entry resolution support between environments, and preventing any other entry points besides those defined in "exports". This encapsulation allows module authors to clearly define the public interface for their package. For more details see the node.js documentation on package entry points

    main

    The main field is a module ID that is the primary entry point to your program. That is, if your package is named foo, and a user installs it, @@ -1005,6 +1007,27 @@

    cpu

    }
  • The host architecture is determined by process.arch

    +

    devEngines

    +

    The devEngines field aids engineers working on a codebase to all be using the same tooling.

    +

    You can specify a devEngines property in your package.json which will run before install, ci, and run commands.

    +
    +

    Note: engines and devEngines differ in object shape. They also function very differently. engines is designed to alert the user when a dependency uses a differening npm or node version that the project it's being used in, whereas devEngines is used to alert people interacting with the source code of a project.

    +
    +

    The supported keys under the devEngines property are cpu, os, libc, runtime, and packageManager. Each property can be an object or an array of objects. Objects must contain name, and optionally can specify version, and onFail. onFail can be warn, error, or ignore, and if left undefined is of the same value as error. npm will assume that you're running with node. +Here's an example of a project that will fail if the environment is not node and npm. If you set runtime.name or packageManager.name to any other string, it will fail within the npm CLI.

    +
    {
    +  "devEngines": {
    +    "runtime": {
    +      "name": "node",
    +      "onFail": "error"
    +    },
    +    "packageManager": {
    +      "name": "npm",
    +      "onFail": "error"
    +    }
    +  }
    +}
    +

    private

    If you set "private": true in your package.json, then npm will refuse to publish it.

    diff --git a/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html b/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html index 567f47ad15dba3..5f20e99541a7a7 100644 --- a/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html +++ b/deps/npm/docs/output/configuring-npm/npm-shrinkwrap-json.html @@ -141,9 +141,9 @@
    -

    +

    npm-shrinkwrap.json - @10.8.3 + @10.9.0

    A publishable lockfile
    diff --git a/deps/npm/docs/output/configuring-npm/npmrc.html b/deps/npm/docs/output/configuring-npm/npmrc.html index 19950a61fb82be..a20c3d807d3d83 100644 --- a/deps/npm/docs/output/configuring-npm/npmrc.html +++ b/deps/npm/docs/output/configuring-npm/npmrc.html @@ -141,9 +141,9 @@
    -

    +

    npmrc - @10.8.3 + @10.9.0

    The npm config files
    diff --git a/deps/npm/docs/output/configuring-npm/package-json.html b/deps/npm/docs/output/configuring-npm/package-json.html index dfdc917fc21244..0cd27f8d82fdd8 100644 --- a/deps/npm/docs/output/configuring-npm/package-json.html +++ b/deps/npm/docs/output/configuring-npm/package-json.html @@ -141,16 +141,16 @@
    -

    +

    package.json - @10.8.3 + @10.9.0

    Specifics of npm's package.json handling

    Table of contents

    - +

    Description

    @@ -411,6 +411,8 @@

    files

  • yarn.lock
  • These can not be included.

    +

    exports

    +

    The "exports" provides a modern alternative to "main" allowing multiple entry points to be defined, conditional entry resolution support between environments, and preventing any other entry points besides those defined in "exports". This encapsulation allows module authors to clearly define the public interface for their package. For more details see the node.js documentation on package entry points

    main

    The main field is a module ID that is the primary entry point to your program. That is, if your package is named foo, and a user installs it, @@ -1005,6 +1007,27 @@

    cpu

    }

    The host architecture is determined by process.arch

    +

    devEngines

    +

    The devEngines field aids engineers working on a codebase to all be using the same tooling.

    +

    You can specify a devEngines property in your package.json which will run before install, ci, and run commands.

    +
    +

    Note: engines and devEngines differ in object shape. They also function very differently. engines is designed to alert the user when a dependency uses a differening npm or node version that the project it's being used in, whereas devEngines is used to alert people interacting with the source code of a project.

    +
    +

    The supported keys under the devEngines property are cpu, os, libc, runtime, and packageManager. Each property can be an object or an array of objects. Objects must contain name, and optionally can specify version, and onFail. onFail can be warn, error, or ignore, and if left undefined is of the same value as error. npm will assume that you're running with node. +Here's an example of a project that will fail if the environment is not node and npm. If you set runtime.name or packageManager.name to any other string, it will fail within the npm CLI.

    +
    {
    +  "devEngines": {
    +    "runtime": {
    +      "name": "node",
    +      "onFail": "error"
    +    },
    +    "packageManager": {
    +      "name": "npm",
    +      "onFail": "error"
    +    }
    +  }
    +}
    +

    private

    If you set "private": true in your package.json, then npm will refuse to publish it.

    diff --git a/deps/npm/docs/output/configuring-npm/package-lock-json.html b/deps/npm/docs/output/configuring-npm/package-lock-json.html index 7ece2f78141166..58598df7ae4fbc 100644 --- a/deps/npm/docs/output/configuring-npm/package-lock-json.html +++ b/deps/npm/docs/output/configuring-npm/package-lock-json.html @@ -141,9 +141,9 @@
    -

    +

    package-lock.json - @10.8.3 + @10.9.0

    A manifestation of the manifest
    diff --git a/deps/npm/docs/output/using-npm/config.html b/deps/npm/docs/output/using-npm/config.html index 0bc94abe755923..5d70bb7e8e803e 100644 --- a/deps/npm/docs/output/using-npm/config.html +++ b/deps/npm/docs/output/using-npm/config.html @@ -141,9 +141,9 @@
    -

    +

    config - @10.8.3 + @10.9.0

    More than you probably want to know about npm configuration
    diff --git a/deps/npm/docs/output/using-npm/dependency-selectors.html b/deps/npm/docs/output/using-npm/dependency-selectors.html index 38c14547a1d051..008377b3ab55ff 100644 --- a/deps/npm/docs/output/using-npm/dependency-selectors.html +++ b/deps/npm/docs/output/using-npm/dependency-selectors.html @@ -141,9 +141,9 @@
    -

    +

    Dependency Selector Syntax & Querying - @10.8.3 + @10.9.0

    Dependency Selector Syntax & Querying
    diff --git a/deps/npm/docs/output/using-npm/developers.html b/deps/npm/docs/output/using-npm/developers.html index fbb68f149c8f56..e75223cd623a54 100644 --- a/deps/npm/docs/output/using-npm/developers.html +++ b/deps/npm/docs/output/using-npm/developers.html @@ -141,9 +141,9 @@
    -

    +

    developers - @10.8.3 + @10.9.0

    Developer Guide
    diff --git a/deps/npm/docs/output/using-npm/logging.html b/deps/npm/docs/output/using-npm/logging.html index 9943e1a6e81b07..5499bd53e285f2 100644 --- a/deps/npm/docs/output/using-npm/logging.html +++ b/deps/npm/docs/output/using-npm/logging.html @@ -141,9 +141,9 @@
    -

    +

    Logging - @10.8.3 + @10.9.0

    Why, What & How We Log
    diff --git a/deps/npm/docs/output/using-npm/orgs.html b/deps/npm/docs/output/using-npm/orgs.html index 487f16e322e353..c9ed80f6421c5e 100644 --- a/deps/npm/docs/output/using-npm/orgs.html +++ b/deps/npm/docs/output/using-npm/orgs.html @@ -141,9 +141,9 @@
    -

    +

    orgs - @10.8.3 + @10.9.0

    Working with Teams & Orgs
    diff --git a/deps/npm/docs/output/using-npm/package-spec.html b/deps/npm/docs/output/using-npm/package-spec.html index c485b897fb2a18..a370699bb555bd 100644 --- a/deps/npm/docs/output/using-npm/package-spec.html +++ b/deps/npm/docs/output/using-npm/package-spec.html @@ -141,9 +141,9 @@
    -

    +

    package-spec - @10.8.3 + @10.9.0

    Package name specifier
    diff --git a/deps/npm/docs/output/using-npm/registry.html b/deps/npm/docs/output/using-npm/registry.html index 285769976fb4b3..9d9f8a539333c9 100644 --- a/deps/npm/docs/output/using-npm/registry.html +++ b/deps/npm/docs/output/using-npm/registry.html @@ -141,9 +141,9 @@
    -

    +

    registry - @10.8.3 + @10.9.0

    The JavaScript Package Registry
    diff --git a/deps/npm/docs/output/using-npm/removal.html b/deps/npm/docs/output/using-npm/removal.html index 207835c29cad23..2668dd9dd8a440 100644 --- a/deps/npm/docs/output/using-npm/removal.html +++ b/deps/npm/docs/output/using-npm/removal.html @@ -141,9 +141,9 @@
    -

    +

    removal - @10.8.3 + @10.9.0

    Cleaning the Slate
    diff --git a/deps/npm/docs/output/using-npm/scope.html b/deps/npm/docs/output/using-npm/scope.html index a536b06726e5e4..0441f8ef703e95 100644 --- a/deps/npm/docs/output/using-npm/scope.html +++ b/deps/npm/docs/output/using-npm/scope.html @@ -141,9 +141,9 @@
    -

    +

    scope - @10.8.3 + @10.9.0

    Scoped packages
    diff --git a/deps/npm/docs/output/using-npm/scripts.html b/deps/npm/docs/output/using-npm/scripts.html index a22d6c9d8ec27f..ff37c2ede18062 100644 --- a/deps/npm/docs/output/using-npm/scripts.html +++ b/deps/npm/docs/output/using-npm/scripts.html @@ -141,9 +141,9 @@
    -

    +

    scripts - @10.8.3 + @10.9.0

    How npm handles the "scripts" field
    diff --git a/deps/npm/docs/output/using-npm/workspaces.html b/deps/npm/docs/output/using-npm/workspaces.html index b3ebf5c300ce0f..19195cc868db4b 100644 --- a/deps/npm/docs/output/using-npm/workspaces.html +++ b/deps/npm/docs/output/using-npm/workspaces.html @@ -141,9 +141,9 @@
    -

    +

    workspaces - @10.8.3 + @10.9.0

    Working with workspaces
    diff --git a/deps/npm/lib/arborist-cmd.js b/deps/npm/lib/arborist-cmd.js index 9d247d02fa181d..f0167887b06996 100644 --- a/deps/npm/lib/arborist-cmd.js +++ b/deps/npm/lib/arborist-cmd.js @@ -18,6 +18,7 @@ class ArboristCmd extends BaseCommand { static workspaces = true static ignoreImplicitWorkspace = false + static checkDevEngines = true constructor (npm) { super(npm) diff --git a/deps/npm/lib/base-cmd.js b/deps/npm/lib/base-cmd.js index 99ae6d7f43c70a..941ffefad2ef4e 100644 --- a/deps/npm/lib/base-cmd.js +++ b/deps/npm/lib/base-cmd.js @@ -1,10 +1,12 @@ const { log } = require('proc-log') class BaseCommand { + // these defaults can be overridden by individual commands static workspaces = false static ignoreImplicitWorkspace = true + static checkDevEngines = false - // these are all overridden by individual commands + // these should always be overridden by individual commands static name = null static description = null static params = null @@ -129,6 +131,63 @@ class BaseCommand { } } + // Checks the devEngines entry in the package.json at this.localPrefix + async checkDevEngines () { + const force = this.npm.flatOptions.force + + const { devEngines } = await require('@npmcli/package-json') + .normalize(this.npm.config.localPrefix) + .then(p => p.content) + .catch(() => ({})) + + if (typeof devEngines === 'undefined') { + return + } + + const { checkDevEngines, currentEnv } = require('npm-install-checks') + const current = currentEnv.devEngines({ + nodeVersion: this.npm.nodeVersion, + npmVersion: this.npm.version, + }) + + const failures = checkDevEngines(devEngines, current) + const warnings = failures.filter(f => f.isWarn) + const errors = failures.filter(f => f.isError) + + const genMsg = (failure, i = 0) => { + return [...new Set([ + // eslint-disable-next-line + i === 0 ? 'The developer of this package has specified the following through devEngines' : '', + `${failure.message}`, + `${failure.errors.map(e => e.message).join('\n')}`, + ])].filter(v => v).join('\n') + } + + [...warnings, ...(force ? errors : [])].forEach((failure, i) => { + const message = genMsg(failure, i) + log.warn('EBADDEVENGINES', message) + log.warn('EBADDEVENGINES', { + current: failure.current, + required: failure.required, + }) + }) + + if (force) { + return + } + + if (errors.length) { + const failure = errors[0] + const message = genMsg(failure) + throw Object.assign(new Error(message), { + engine: failure.engine, + code: 'EBADDEVENGINES', + current: failure.current, + required: failure.required, + }) + } + } + async setWorkspaces () { const { relative } = require('node:path') diff --git a/deps/npm/lib/commands/init.js b/deps/npm/lib/commands/init.js index b8ef3e59ccf298..09e8d8522f7f31 100644 --- a/deps/npm/lib/commands/init.js +++ b/deps/npm/lib/commands/init.js @@ -31,7 +31,7 @@ class Init extends BaseCommand { static name = 'init' static usage = [ - ' (same as `npx `)', + ' (same as `npx create-`)', '<@scope> (same as `npx <@scope>/create`)', ] diff --git a/deps/npm/lib/commands/run-script.js b/deps/npm/lib/commands/run-script.js index 0a139d08af745b..50c745d6d9c078 100644 --- a/deps/npm/lib/commands/run-script.js +++ b/deps/npm/lib/commands/run-script.js @@ -21,6 +21,7 @@ class RunScript extends BaseCommand { static workspaces = true static ignoreImplicitWorkspace = false static isShellout = true + static checkDevEngines = true static async completion (opts, npm) { const argv = opts.conf.argv.remain diff --git a/deps/npm/lib/npm.js b/deps/npm/lib/npm.js index 5563cec21ba4d2..893e032f1ecedd 100644 --- a/deps/npm/lib/npm.js +++ b/deps/npm/lib/npm.js @@ -247,6 +247,10 @@ class Npm { execWorkspaces = true } + if (command.checkDevEngines && !this.global) { + await command.checkDevEngines() + } + return time.start(`command:${cmd}`, () => execWorkspaces ? command.execWorkspaces(args) : command.exec(args)) } diff --git a/deps/npm/lib/utils/error-message.js b/deps/npm/lib/utils/error-message.js index fc47c909069f07..4b5582ac8e1816 100644 --- a/deps/npm/lib/utils/error-message.js +++ b/deps/npm/lib/utils/error-message.js @@ -200,6 +200,13 @@ const errorMessage = (er, npm) => { ].join('\n')]) break + case 'EBADDEVENGINES': { + const { current, required } = er + summary.push(['EBADDEVENGINES', er.message]) + detail.push(['EBADDEVENGINES', { current, required }]) + break + } + case 'EBADPLATFORM': { const actual = er.current const expected = { ...er.required } diff --git a/deps/npm/lib/utils/ping.js b/deps/npm/lib/utils/ping.js index 00956d0c1630ce..1c8c9e827a4eaa 100644 --- a/deps/npm/lib/utils/ping.js +++ b/deps/npm/lib/utils/ping.js @@ -2,6 +2,6 @@ // used by the ping and doctor commands const fetch = require('npm-registry-fetch') module.exports = async (flatOptions) => { - const res = await fetch('/-/ping?write=true', flatOptions) + const res = await fetch('/-/ping', { ...flatOptions, cache: false }) return res.json().catch(() => ({})) } diff --git a/deps/npm/man/man1/npm-access.1 b/deps/npm/man/man1/npm-access.1 index d3e622c61d53be..602193f3b8e3e8 100644 --- a/deps/npm/man/man1/npm-access.1 +++ b/deps/npm/man/man1/npm-access.1 @@ -1,4 +1,4 @@ -.TH "NPM-ACCESS" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-ACCESS" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-access\fR - Set access level on published packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-adduser.1 b/deps/npm/man/man1/npm-adduser.1 index a670b65c62782f..467384ec0ec40e 100644 --- a/deps/npm/man/man1/npm-adduser.1 +++ b/deps/npm/man/man1/npm-adduser.1 @@ -1,4 +1,4 @@ -.TH "NPM-ADDUSER" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-ADDUSER" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-adduser\fR - Add a registry user account .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-audit.1 b/deps/npm/man/man1/npm-audit.1 index 7b8e907cd65e66..64bf5d5461a157 100644 --- a/deps/npm/man/man1/npm-audit.1 +++ b/deps/npm/man/man1/npm-audit.1 @@ -1,4 +1,4 @@ -.TH "NPM-AUDIT" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-AUDIT" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-audit\fR - Run a security audit .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-bugs.1 b/deps/npm/man/man1/npm-bugs.1 index d20ac5377799ce..c25b04b6814137 100644 --- a/deps/npm/man/man1/npm-bugs.1 +++ b/deps/npm/man/man1/npm-bugs.1 @@ -1,4 +1,4 @@ -.TH "NPM-BUGS" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-BUGS" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-bugs\fR - Report bugs for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-cache.1 b/deps/npm/man/man1/npm-cache.1 index ae80a0ed6d86de..e9da044a5b4479 100644 --- a/deps/npm/man/man1/npm-cache.1 +++ b/deps/npm/man/man1/npm-cache.1 @@ -1,4 +1,4 @@ -.TH "NPM-CACHE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-CACHE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-cache\fR - Manipulates packages cache .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ci.1 b/deps/npm/man/man1/npm-ci.1 index 840ef475bd964a..1ad1419cb1f98f 100644 --- a/deps/npm/man/man1/npm-ci.1 +++ b/deps/npm/man/man1/npm-ci.1 @@ -1,4 +1,4 @@ -.TH "NPM-CI" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-CI" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-ci\fR - Clean install a project .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-completion.1 b/deps/npm/man/man1/npm-completion.1 index ad7294e0ea4fe7..6f246c436cead3 100644 --- a/deps/npm/man/man1/npm-completion.1 +++ b/deps/npm/man/man1/npm-completion.1 @@ -1,4 +1,4 @@ -.TH "NPM-COMPLETION" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-COMPLETION" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-completion\fR - Tab Completion for npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-config.1 b/deps/npm/man/man1/npm-config.1 index 10828f3752aa6d..1bef58e188b68c 100644 --- a/deps/npm/man/man1/npm-config.1 +++ b/deps/npm/man/man1/npm-config.1 @@ -1,4 +1,4 @@ -.TH "NPM-CONFIG" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-CONFIG" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-config\fR - Manage the npm configuration files .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dedupe.1 b/deps/npm/man/man1/npm-dedupe.1 index 3c3621f37b3701..b1e21f0478ed4f 100644 --- a/deps/npm/man/man1/npm-dedupe.1 +++ b/deps/npm/man/man1/npm-dedupe.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEDUPE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-DEDUPE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-dedupe\fR - Reduce duplication in the package tree .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-deprecate.1 b/deps/npm/man/man1/npm-deprecate.1 index a7f7089a9c88f0..31e9e56cd95382 100644 --- a/deps/npm/man/man1/npm-deprecate.1 +++ b/deps/npm/man/man1/npm-deprecate.1 @@ -1,4 +1,4 @@ -.TH "NPM-DEPRECATE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-DEPRECATE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-deprecate\fR - Deprecate a version of a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-diff.1 b/deps/npm/man/man1/npm-diff.1 index 4d3f9f6955ada5..c29ae3266d1543 100644 --- a/deps/npm/man/man1/npm-diff.1 +++ b/deps/npm/man/man1/npm-diff.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIFF" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-DIFF" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-diff\fR - The registry diff command .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-dist-tag.1 b/deps/npm/man/man1/npm-dist-tag.1 index d8451c181f18be..46e5cf8ac3492a 100644 --- a/deps/npm/man/man1/npm-dist-tag.1 +++ b/deps/npm/man/man1/npm-dist-tag.1 @@ -1,4 +1,4 @@ -.TH "NPM-DIST-TAG" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-DIST-TAG" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-dist-tag\fR - Modify package distribution tags .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-docs.1 b/deps/npm/man/man1/npm-docs.1 index f6398139688780..6cda2d87abe163 100644 --- a/deps/npm/man/man1/npm-docs.1 +++ b/deps/npm/man/man1/npm-docs.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCS" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-DOCS" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-docs\fR - Open documentation for a package in a web browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-doctor.1 b/deps/npm/man/man1/npm-doctor.1 index ba1cff38927cb4..7a67ed1e6b32b1 100644 --- a/deps/npm/man/man1/npm-doctor.1 +++ b/deps/npm/man/man1/npm-doctor.1 @@ -1,4 +1,4 @@ -.TH "NPM-DOCTOR" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-DOCTOR" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-doctor\fR - Check the health of your npm environment .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-edit.1 b/deps/npm/man/man1/npm-edit.1 index 6beaa7b38444c1..5ae7e24b67d60e 100644 --- a/deps/npm/man/man1/npm-edit.1 +++ b/deps/npm/man/man1/npm-edit.1 @@ -1,4 +1,4 @@ -.TH "NPM-EDIT" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-EDIT" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-edit\fR - Edit an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-exec.1 b/deps/npm/man/man1/npm-exec.1 index 3e267f6eaae86d..20c21e67919d65 100644 --- a/deps/npm/man/man1/npm-exec.1 +++ b/deps/npm/man/man1/npm-exec.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXEC" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-EXEC" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-exec\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explain.1 b/deps/npm/man/man1/npm-explain.1 index 60f19d7e1a844f..65ef0e086e2ce0 100644 --- a/deps/npm/man/man1/npm-explain.1 +++ b/deps/npm/man/man1/npm-explain.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLAIN" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-EXPLAIN" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-explain\fR - Explain installed packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-explore.1 b/deps/npm/man/man1/npm-explore.1 index 3c1abf7b4db562..6b92a81c2abc0d 100644 --- a/deps/npm/man/man1/npm-explore.1 +++ b/deps/npm/man/man1/npm-explore.1 @@ -1,4 +1,4 @@ -.TH "NPM-EXPLORE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-EXPLORE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-explore\fR - Browse an installed package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-find-dupes.1 b/deps/npm/man/man1/npm-find-dupes.1 index 134480927461d7..261313e1b23ad1 100644 --- a/deps/npm/man/man1/npm-find-dupes.1 +++ b/deps/npm/man/man1/npm-find-dupes.1 @@ -1,4 +1,4 @@ -.TH "NPM-FIND-DUPES" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-FIND-DUPES" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-find-dupes\fR - Find duplication in the package tree .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-fund.1 b/deps/npm/man/man1/npm-fund.1 index 25a3b60faa0cb6..b87d57d20c0f50 100644 --- a/deps/npm/man/man1/npm-fund.1 +++ b/deps/npm/man/man1/npm-fund.1 @@ -1,4 +1,4 @@ -.TH "NPM-FUND" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-FUND" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-fund\fR - Retrieve funding information .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help-search.1 b/deps/npm/man/man1/npm-help-search.1 index 607139184335f4..9660ca29697e9f 100644 --- a/deps/npm/man/man1/npm-help-search.1 +++ b/deps/npm/man/man1/npm-help-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP-SEARCH" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-HELP-SEARCH" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-help-search\fR - Search npm help documentation .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-help.1 b/deps/npm/man/man1/npm-help.1 index 97b15b6eeab138..a8c70fdc88a9ae 100644 --- a/deps/npm/man/man1/npm-help.1 +++ b/deps/npm/man/man1/npm-help.1 @@ -1,4 +1,4 @@ -.TH "NPM-HELP" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-HELP" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-help\fR - Get help on npm .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-hook.1 b/deps/npm/man/man1/npm-hook.1 index b7362d6e2ef8dc..2c07a5afadbb64 100644 --- a/deps/npm/man/man1/npm-hook.1 +++ b/deps/npm/man/man1/npm-hook.1 @@ -1,4 +1,4 @@ -.TH "NPM-HOOK" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-HOOK" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-hook\fR - Manage registry hooks .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-init.1 b/deps/npm/man/man1/npm-init.1 index 4639e3031dad6d..cb7339dc0f4ff0 100644 --- a/deps/npm/man/man1/npm-init.1 +++ b/deps/npm/man/man1/npm-init.1 @@ -1,11 +1,11 @@ -.TH "NPM-INIT" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-INIT" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-init\fR - Create a package.json file .SS "Synopsis" .P .RS 2 .nf -npm init (same as `npx `) +npm init (same as `npx create-`) npm init <@scope> (same as `npx <@scope>/create`) aliases: create, innit diff --git a/deps/npm/man/man1/npm-install-ci-test.1 b/deps/npm/man/man1/npm-install-ci-test.1 index 3f8e17b9c69746..52defde693a506 100644 --- a/deps/npm/man/man1/npm-install-ci-test.1 +++ b/deps/npm/man/man1/npm-install-ci-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-CI-TEST" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-INSTALL-CI-TEST" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-install-ci-test\fR - Install a project with a clean slate and run tests .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install-test.1 b/deps/npm/man/man1/npm-install-test.1 index 6d3c5aca8c6411..e2023e1a985474 100644 --- a/deps/npm/man/man1/npm-install-test.1 +++ b/deps/npm/man/man1/npm-install-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL-TEST" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-INSTALL-TEST" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-install-test\fR - Install package(s) and run tests .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-install.1 b/deps/npm/man/man1/npm-install.1 index 3d51d664fb6bf3..59b3a8c81ac73f 100644 --- a/deps/npm/man/man1/npm-install.1 +++ b/deps/npm/man/man1/npm-install.1 @@ -1,4 +1,4 @@ -.TH "NPM-INSTALL" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-INSTALL" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-install\fR - Install a package .SS "Synopsis" @@ -173,8 +173,6 @@ npm install readable-stream --save-exact npm install ansi-regex --save-bundle .fi .RE -.P -\fBNote\fR: If there is a file or folder named \fB\fR in the current working directory, then it will try to install that, and only try to fetch the package by name if it is not valid. .IP \(bu 4 \fBnpm install @npm:\fR: .P diff --git a/deps/npm/man/man1/npm-link.1 b/deps/npm/man/man1/npm-link.1 index b2d6e243af726d..e6106495400fdc 100644 --- a/deps/npm/man/man1/npm-link.1 +++ b/deps/npm/man/man1/npm-link.1 @@ -1,4 +1,4 @@ -.TH "NPM-LINK" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-LINK" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-link\fR - Symlink a package folder .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-login.1 b/deps/npm/man/man1/npm-login.1 index ae61f04aeba5f2..36f2bf2d0f35dc 100644 --- a/deps/npm/man/man1/npm-login.1 +++ b/deps/npm/man/man1/npm-login.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGIN" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-LOGIN" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-login\fR - Login to a registry user account .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-logout.1 b/deps/npm/man/man1/npm-logout.1 index 5601da1924d19d..78cba39ccd85c9 100644 --- a/deps/npm/man/man1/npm-logout.1 +++ b/deps/npm/man/man1/npm-logout.1 @@ -1,4 +1,4 @@ -.TH "NPM-LOGOUT" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-LOGOUT" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-logout\fR - Log out of the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ls.1 b/deps/npm/man/man1/npm-ls.1 index 0775be95e46462..b0364fe1531753 100644 --- a/deps/npm/man/man1/npm-ls.1 +++ b/deps/npm/man/man1/npm-ls.1 @@ -1,4 +1,4 @@ -.TH "NPM-LS" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-LS" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-ls\fR - List installed packages .SS "Synopsis" @@ -20,7 +20,7 @@ Positional arguments are \fBname@version-range\fR identifiers, which will limit .P .RS 2 .nf -npm@10.8.3 /path/to/npm +npm@10.9.0 /path/to/npm └─┬ init-package-json@0.0.4 └── promzard@0.1.5 .fi diff --git a/deps/npm/man/man1/npm-org.1 b/deps/npm/man/man1/npm-org.1 index 782162959b8238..0465a6b5bddb54 100644 --- a/deps/npm/man/man1/npm-org.1 +++ b/deps/npm/man/man1/npm-org.1 @@ -1,4 +1,4 @@ -.TH "NPM-ORG" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-ORG" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-org\fR - Manage orgs .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-outdated.1 b/deps/npm/man/man1/npm-outdated.1 index bd22481ccb46b7..ad1cf07823630f 100644 --- a/deps/npm/man/man1/npm-outdated.1 +++ b/deps/npm/man/man1/npm-outdated.1 @@ -1,4 +1,4 @@ -.TH "NPM-OUTDATED" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-OUTDATED" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-outdated\fR - Check for outdated packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-owner.1 b/deps/npm/man/man1/npm-owner.1 index 22202d1d46e877..5842e5a51d125f 100644 --- a/deps/npm/man/man1/npm-owner.1 +++ b/deps/npm/man/man1/npm-owner.1 @@ -1,4 +1,4 @@ -.TH "NPM-OWNER" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-OWNER" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-owner\fR - Manage package owners .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pack.1 b/deps/npm/man/man1/npm-pack.1 index dbe579ed7ce4bf..6cc2acd4b580f7 100644 --- a/deps/npm/man/man1/npm-pack.1 +++ b/deps/npm/man/man1/npm-pack.1 @@ -1,4 +1,4 @@ -.TH "NPM-PACK" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PACK" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-pack\fR - Create a tarball from a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-ping.1 b/deps/npm/man/man1/npm-ping.1 index 21318c025568a9..ad03cbbb6a1398 100644 --- a/deps/npm/man/man1/npm-ping.1 +++ b/deps/npm/man/man1/npm-ping.1 @@ -1,4 +1,4 @@ -.TH "NPM-PING" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PING" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-ping\fR - Ping npm registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-pkg.1 b/deps/npm/man/man1/npm-pkg.1 index 05c3b50411b09c..2f45876234ec3d 100644 --- a/deps/npm/man/man1/npm-pkg.1 +++ b/deps/npm/man/man1/npm-pkg.1 @@ -1,4 +1,4 @@ -.TH "NPM-PKG" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PKG" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-pkg\fR - Manages your package.json .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prefix.1 b/deps/npm/man/man1/npm-prefix.1 index 9745f3370c89be..cac44344c71d79 100644 --- a/deps/npm/man/man1/npm-prefix.1 +++ b/deps/npm/man/man1/npm-prefix.1 @@ -1,4 +1,4 @@ -.TH "NPM-PREFIX" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PREFIX" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-prefix\fR - Display prefix .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-profile.1 b/deps/npm/man/man1/npm-profile.1 index 8171db696c6ede..28e3f41244ace1 100644 --- a/deps/npm/man/man1/npm-profile.1 +++ b/deps/npm/man/man1/npm-profile.1 @@ -1,4 +1,4 @@ -.TH "NPM-PROFILE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PROFILE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-profile\fR - Change settings on your registry profile .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-prune.1 b/deps/npm/man/man1/npm-prune.1 index cd065ff5a58ab2..7634c75f0901fa 100644 --- a/deps/npm/man/man1/npm-prune.1 +++ b/deps/npm/man/man1/npm-prune.1 @@ -1,4 +1,4 @@ -.TH "NPM-PRUNE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PRUNE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-prune\fR - Remove extraneous packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-publish.1 b/deps/npm/man/man1/npm-publish.1 index 17572afaaa84a9..c6724b6fe2ce51 100644 --- a/deps/npm/man/man1/npm-publish.1 +++ b/deps/npm/man/man1/npm-publish.1 @@ -1,4 +1,4 @@ -.TH "NPM-PUBLISH" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-PUBLISH" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-publish\fR - Publish a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-query.1 b/deps/npm/man/man1/npm-query.1 index 8b395876b68555..0a3c6e665e7ecf 100644 --- a/deps/npm/man/man1/npm-query.1 +++ b/deps/npm/man/man1/npm-query.1 @@ -1,4 +1,4 @@ -.TH "NPM-QUERY" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-QUERY" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-query\fR - Dependency selector query .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-rebuild.1 b/deps/npm/man/man1/npm-rebuild.1 index d500181cd66c72..e537cec373e3c6 100644 --- a/deps/npm/man/man1/npm-rebuild.1 +++ b/deps/npm/man/man1/npm-rebuild.1 @@ -1,4 +1,4 @@ -.TH "NPM-REBUILD" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-REBUILD" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-rebuild\fR - Rebuild a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-repo.1 b/deps/npm/man/man1/npm-repo.1 index 0592faf7559d21..869541f6faaa51 100644 --- a/deps/npm/man/man1/npm-repo.1 +++ b/deps/npm/man/man1/npm-repo.1 @@ -1,4 +1,4 @@ -.TH "NPM-REPO" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-REPO" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-repo\fR - Open package repository page in the browser .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-restart.1 b/deps/npm/man/man1/npm-restart.1 index c92a1acede0ca1..b0e3b0deead7ef 100644 --- a/deps/npm/man/man1/npm-restart.1 +++ b/deps/npm/man/man1/npm-restart.1 @@ -1,4 +1,4 @@ -.TH "NPM-RESTART" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-RESTART" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-restart\fR - Restart a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-root.1 b/deps/npm/man/man1/npm-root.1 index 8daf0430c4e76e..94a132557e3979 100644 --- a/deps/npm/man/man1/npm-root.1 +++ b/deps/npm/man/man1/npm-root.1 @@ -1,4 +1,4 @@ -.TH "NPM-ROOT" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-ROOT" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-root\fR - Display npm root .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-run-script.1 b/deps/npm/man/man1/npm-run-script.1 index b3374d7cdc5e4a..424c733d6a0523 100644 --- a/deps/npm/man/man1/npm-run-script.1 +++ b/deps/npm/man/man1/npm-run-script.1 @@ -1,4 +1,4 @@ -.TH "NPM-RUN-SCRIPT" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-RUN-SCRIPT" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-run-script\fR - Run arbitrary package scripts .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-sbom.1 b/deps/npm/man/man1/npm-sbom.1 index 70423b46fdf408..7bb8199c8b11c8 100644 --- a/deps/npm/man/man1/npm-sbom.1 +++ b/deps/npm/man/man1/npm-sbom.1 @@ -1,4 +1,4 @@ -.TH "NPM-SBOM" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-SBOM" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-sbom\fR - Generate a Software Bill of Materials (SBOM) .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-search.1 b/deps/npm/man/man1/npm-search.1 index c72a963f577209..4fe67890bc949a 100644 --- a/deps/npm/man/man1/npm-search.1 +++ b/deps/npm/man/man1/npm-search.1 @@ -1,4 +1,4 @@ -.TH "NPM-SEARCH" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-SEARCH" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-search\fR - Search for packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-shrinkwrap.1 b/deps/npm/man/man1/npm-shrinkwrap.1 index 1f677474153975..ed3c0cb03ccca7 100644 --- a/deps/npm/man/man1/npm-shrinkwrap.1 +++ b/deps/npm/man/man1/npm-shrinkwrap.1 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-SHRINKWRAP" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-shrinkwrap\fR - Lock down dependency versions for publication .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-star.1 b/deps/npm/man/man1/npm-star.1 index f441999ab56fb3..83d0ff230ec1bb 100644 --- a/deps/npm/man/man1/npm-star.1 +++ b/deps/npm/man/man1/npm-star.1 @@ -1,4 +1,4 @@ -.TH "NPM-STAR" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-STAR" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-star\fR - Mark your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stars.1 b/deps/npm/man/man1/npm-stars.1 index 5f22addcb31b10..bdeffec7a3b1b5 100644 --- a/deps/npm/man/man1/npm-stars.1 +++ b/deps/npm/man/man1/npm-stars.1 @@ -1,4 +1,4 @@ -.TH "NPM-STARS" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-STARS" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-stars\fR - View packages marked as favorites .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-start.1 b/deps/npm/man/man1/npm-start.1 index 1e2c8265972942..c3cb48c6f72576 100644 --- a/deps/npm/man/man1/npm-start.1 +++ b/deps/npm/man/man1/npm-start.1 @@ -1,4 +1,4 @@ -.TH "NPM-START" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-START" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-start\fR - Start a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-stop.1 b/deps/npm/man/man1/npm-stop.1 index 546dc8bef5da82..36ef105209b7f0 100644 --- a/deps/npm/man/man1/npm-stop.1 +++ b/deps/npm/man/man1/npm-stop.1 @@ -1,4 +1,4 @@ -.TH "NPM-STOP" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-STOP" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-stop\fR - Stop a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-team.1 b/deps/npm/man/man1/npm-team.1 index c8e4286873d415..6712c8f92b23c8 100644 --- a/deps/npm/man/man1/npm-team.1 +++ b/deps/npm/man/man1/npm-team.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEAM" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-TEAM" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-team\fR - Manage organization teams and team memberships .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-test.1 b/deps/npm/man/man1/npm-test.1 index 2edba4e88bb489..5e63e9c6d76ab3 100644 --- a/deps/npm/man/man1/npm-test.1 +++ b/deps/npm/man/man1/npm-test.1 @@ -1,4 +1,4 @@ -.TH "NPM-TEST" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-TEST" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-test\fR - Test a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-token.1 b/deps/npm/man/man1/npm-token.1 index 20bda1a7ef30b9..29a2c361574121 100644 --- a/deps/npm/man/man1/npm-token.1 +++ b/deps/npm/man/man1/npm-token.1 @@ -1,4 +1,4 @@ -.TH "NPM-TOKEN" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-TOKEN" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-token\fR - Manage your authentication tokens .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-uninstall.1 b/deps/npm/man/man1/npm-uninstall.1 index 052c11b8085074..0c10a60c1f07e7 100644 --- a/deps/npm/man/man1/npm-uninstall.1 +++ b/deps/npm/man/man1/npm-uninstall.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNINSTALL" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-UNINSTALL" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-uninstall\fR - Remove a package .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unpublish.1 b/deps/npm/man/man1/npm-unpublish.1 index fd7ee3066e9986..098af2e59310bc 100644 --- a/deps/npm/man/man1/npm-unpublish.1 +++ b/deps/npm/man/man1/npm-unpublish.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNPUBLISH" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-UNPUBLISH" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-unpublish\fR - Remove a package from the registry .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-unstar.1 b/deps/npm/man/man1/npm-unstar.1 index f5e409f091725f..7326984aa8b4d2 100644 --- a/deps/npm/man/man1/npm-unstar.1 +++ b/deps/npm/man/man1/npm-unstar.1 @@ -1,4 +1,4 @@ -.TH "NPM-UNSTAR" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-UNSTAR" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-unstar\fR - Remove an item from your favorite packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-update.1 b/deps/npm/man/man1/npm-update.1 index dd9d503de4e6a0..797a9e601cf3d2 100644 --- a/deps/npm/man/man1/npm-update.1 +++ b/deps/npm/man/man1/npm-update.1 @@ -1,4 +1,4 @@ -.TH "NPM-UPDATE" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-UPDATE" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-update\fR - Update packages .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-version.1 b/deps/npm/man/man1/npm-version.1 index 5e6d0beefac912..d0a7dcee6fea65 100644 --- a/deps/npm/man/man1/npm-version.1 +++ b/deps/npm/man/man1/npm-version.1 @@ -1,4 +1,4 @@ -.TH "NPM-VERSION" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-VERSION" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-version\fR - Bump a package version .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-view.1 b/deps/npm/man/man1/npm-view.1 index baca4b0a97fae8..2925f8d7d2cffe 100644 --- a/deps/npm/man/man1/npm-view.1 +++ b/deps/npm/man/man1/npm-view.1 @@ -1,4 +1,4 @@ -.TH "NPM-VIEW" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-VIEW" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-view\fR - View registry info .SS "Synopsis" diff --git a/deps/npm/man/man1/npm-whoami.1 b/deps/npm/man/man1/npm-whoami.1 index 40caa068abd1ef..f283db5c9d247a 100644 --- a/deps/npm/man/man1/npm-whoami.1 +++ b/deps/npm/man/man1/npm-whoami.1 @@ -1,4 +1,4 @@ -.TH "NPM-WHOAMI" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM-WHOAMI" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-whoami\fR - Display npm username .SS "Synopsis" diff --git a/deps/npm/man/man1/npm.1 b/deps/npm/man/man1/npm.1 index 471e5b5f7c110f..b2034bc50fa5e3 100644 --- a/deps/npm/man/man1/npm.1 +++ b/deps/npm/man/man1/npm.1 @@ -1,4 +1,4 @@ -.TH "NPM" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPM" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm\fR - javascript package manager .SS "Synopsis" @@ -12,7 +12,7 @@ npm Note: This command is unaware of workspaces. .SS "Version" .P -10.8.3 +10.9.0 .SS "Description" .P npm is the package manager for the Node JavaScript platform. It puts modules in place so that node can find them, and manages dependency conflicts intelligently. diff --git a/deps/npm/man/man1/npx.1 b/deps/npm/man/man1/npx.1 index 0849e0d10949e2..a1000443cc665e 100644 --- a/deps/npm/man/man1/npx.1 +++ b/deps/npm/man/man1/npx.1 @@ -1,4 +1,4 @@ -.TH "NPX" "1" "August 2024" "NPM@10.8.3" "" +.TH "NPX" "1" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpx\fR - Run a command from a local or remote npm package .SS "Synopsis" diff --git a/deps/npm/man/man5/folders.5 b/deps/npm/man/man5/folders.5 index 6e4b89a844f0d8..6e3045729c46ca 100644 --- a/deps/npm/man/man5/folders.5 +++ b/deps/npm/man/man5/folders.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "August 2024" "NPM@10.8.3" "" +.TH "FOLDERS" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/install.5 b/deps/npm/man/man5/install.5 index e5d1e8921317cb..f5fabf02d7431b 100644 --- a/deps/npm/man/man5/install.5 +++ b/deps/npm/man/man5/install.5 @@ -1,4 +1,4 @@ -.TH "INSTALL" "5" "August 2024" "NPM@10.8.3" "" +.TH "INSTALL" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBinstall\fR - Download and install node and npm .SS "Description" diff --git a/deps/npm/man/man5/npm-global.5 b/deps/npm/man/man5/npm-global.5 index 6e4b89a844f0d8..6e3045729c46ca 100644 --- a/deps/npm/man/man5/npm-global.5 +++ b/deps/npm/man/man5/npm-global.5 @@ -1,4 +1,4 @@ -.TH "FOLDERS" "5" "August 2024" "NPM@10.8.3" "" +.TH "FOLDERS" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBfolders\fR - Folder Structures Used by npm .SS "Description" diff --git a/deps/npm/man/man5/npm-json.5 b/deps/npm/man/man5/npm-json.5 index b167d6e0e92849..49773dda01d706 100644 --- a/deps/npm/man/man5/npm-json.5 +++ b/deps/npm/man/man5/npm-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "August 2024" "NPM@10.8.3" "" +.TH "PACKAGE.JSON" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" @@ -338,6 +338,9 @@ Most of these ignored files can be included specifically if included in the \fBf .P These can not be included. +.SS "exports" +.P +The "exports" provides a modern alternative to "main" allowing multiple entry points to be defined, conditional entry resolution support between environments, and preventing any other entry points besides those defined in "exports". This encapsulation allows module authors to clearly define the public interface for their package. For more details see the \fBnode.js documentation on package entry points\fR \fI\(lahttps://nodejs.org/api/packages.html#package-entry-points\(ra\fR .SS "main" .P The main field is a module ID that is the primary entry point to your program. That is, if your package is named \fBfoo\fR, and a user installs it, and then does \fBrequire("foo")\fR, then your main module's exports object will be returned. @@ -1041,6 +1044,35 @@ Like the \fBos\fR option, you can also block architectures: .RE .P The host architecture is determined by \fBprocess.arch\fR +.SS "devEngines" +.P +The \fBdevEngines\fR field aids engineers working on a codebase to all be using the same tooling. +.P +You can specify a \fBdevEngines\fR property in your \fBpackage.json\fR which will run before \fBinstall\fR, \fBci\fR, and \fBrun\fR commands. +.RS 0 +.P +Note: \fBengines\fR and \fBdevEngines\fR differ in object shape. They also function very differently. \fBengines\fR is designed to alert the user when a dependency uses a differening npm or node version that the project it's being used in, whereas \fBdevEngines\fR is used to alert people interacting with the source code of a project. +.RE 0 + +.P +The supported keys under the \fBdevEngines\fR property are \fBcpu\fR, \fBos\fR, \fBlibc\fR, \fBruntime\fR, and \fBpackageManager\fR. Each property can be an object or an array of objects. Objects must contain \fBname\fR, and optionally can specify \fBversion\fR, and \fBonFail\fR. \fBonFail\fR can be \fBwarn\fR, \fBerror\fR, or \fBignore\fR, and if left undefined is of the same value as \fBerror\fR. \fBnpm\fR will assume that you're running with \fBnode\fR. Here's an example of a project that will fail if the environment is not \fBnode\fR and \fBnpm\fR. If you set \fBruntime.name\fR or \fBpackageManager.name\fR to any other string, it will fail within the npm CLI. +.P +.RS 2 +.nf +{ + "devEngines": { + "runtime": { + "name": "node", + "onFail": "error" + }, + "packageManager": { + "name": "npm", + "onFail": "error" + } + } +} +.fi +.RE .SS "private" .P If you set \fB"private": true\fR in your package.json, then npm will refuse to publish it. diff --git a/deps/npm/man/man5/npm-shrinkwrap-json.5 b/deps/npm/man/man5/npm-shrinkwrap-json.5 index 13353dc1e6a187..d1f394d23aa1ce 100644 --- a/deps/npm/man/man5/npm-shrinkwrap-json.5 +++ b/deps/npm/man/man5/npm-shrinkwrap-json.5 @@ -1,4 +1,4 @@ -.TH "NPM-SHRINKWRAP.JSON" "5" "August 2024" "NPM@10.8.3" "" +.TH "NPM-SHRINKWRAP.JSON" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpm-shrinkwrap.json\fR - A publishable lockfile .SS "Description" diff --git a/deps/npm/man/man5/npmrc.5 b/deps/npm/man/man5/npmrc.5 index 9f439429b53923..3b78f989c23545 100644 --- a/deps/npm/man/man5/npmrc.5 +++ b/deps/npm/man/man5/npmrc.5 @@ -1,4 +1,4 @@ -.TH "NPMRC" "5" "August 2024" "NPM@10.8.3" "" +.TH "NPMRC" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBnpmrc\fR - The npm config files .SS "Description" diff --git a/deps/npm/man/man5/package-json.5 b/deps/npm/man/man5/package-json.5 index b167d6e0e92849..49773dda01d706 100644 --- a/deps/npm/man/man5/package-json.5 +++ b/deps/npm/man/man5/package-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE.JSON" "5" "August 2024" "NPM@10.8.3" "" +.TH "PACKAGE.JSON" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBpackage.json\fR - Specifics of npm's package.json handling .SS "Description" @@ -338,6 +338,9 @@ Most of these ignored files can be included specifically if included in the \fBf .P These can not be included. +.SS "exports" +.P +The "exports" provides a modern alternative to "main" allowing multiple entry points to be defined, conditional entry resolution support between environments, and preventing any other entry points besides those defined in "exports". This encapsulation allows module authors to clearly define the public interface for their package. For more details see the \fBnode.js documentation on package entry points\fR \fI\(lahttps://nodejs.org/api/packages.html#package-entry-points\(ra\fR .SS "main" .P The main field is a module ID that is the primary entry point to your program. That is, if your package is named \fBfoo\fR, and a user installs it, and then does \fBrequire("foo")\fR, then your main module's exports object will be returned. @@ -1041,6 +1044,35 @@ Like the \fBos\fR option, you can also block architectures: .RE .P The host architecture is determined by \fBprocess.arch\fR +.SS "devEngines" +.P +The \fBdevEngines\fR field aids engineers working on a codebase to all be using the same tooling. +.P +You can specify a \fBdevEngines\fR property in your \fBpackage.json\fR which will run before \fBinstall\fR, \fBci\fR, and \fBrun\fR commands. +.RS 0 +.P +Note: \fBengines\fR and \fBdevEngines\fR differ in object shape. They also function very differently. \fBengines\fR is designed to alert the user when a dependency uses a differening npm or node version that the project it's being used in, whereas \fBdevEngines\fR is used to alert people interacting with the source code of a project. +.RE 0 + +.P +The supported keys under the \fBdevEngines\fR property are \fBcpu\fR, \fBos\fR, \fBlibc\fR, \fBruntime\fR, and \fBpackageManager\fR. Each property can be an object or an array of objects. Objects must contain \fBname\fR, and optionally can specify \fBversion\fR, and \fBonFail\fR. \fBonFail\fR can be \fBwarn\fR, \fBerror\fR, or \fBignore\fR, and if left undefined is of the same value as \fBerror\fR. \fBnpm\fR will assume that you're running with \fBnode\fR. Here's an example of a project that will fail if the environment is not \fBnode\fR and \fBnpm\fR. If you set \fBruntime.name\fR or \fBpackageManager.name\fR to any other string, it will fail within the npm CLI. +.P +.RS 2 +.nf +{ + "devEngines": { + "runtime": { + "name": "node", + "onFail": "error" + }, + "packageManager": { + "name": "npm", + "onFail": "error" + } + } +} +.fi +.RE .SS "private" .P If you set \fB"private": true\fR in your package.json, then npm will refuse to publish it. diff --git a/deps/npm/man/man5/package-lock-json.5 b/deps/npm/man/man5/package-lock-json.5 index 183a8fbff85d75..df3dcca1c4fa75 100644 --- a/deps/npm/man/man5/package-lock-json.5 +++ b/deps/npm/man/man5/package-lock-json.5 @@ -1,4 +1,4 @@ -.TH "PACKAGE-LOCK.JSON" "5" "August 2024" "NPM@10.8.3" "" +.TH "PACKAGE-LOCK.JSON" "5" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBpackage-lock.json\fR - A manifestation of the manifest .SS "Description" diff --git a/deps/npm/man/man7/config.7 b/deps/npm/man/man7/config.7 index 0cd1e963107f62..ee166c9d4ccee9 100644 --- a/deps/npm/man/man7/config.7 +++ b/deps/npm/man/man7/config.7 @@ -1,4 +1,4 @@ -.TH "CONFIG" "7" "August 2024" "NPM@10.8.3" "" +.TH "CONFIG" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBconfig\fR - More than you probably want to know about npm configuration .SS "Description" diff --git a/deps/npm/man/man7/dependency-selectors.7 b/deps/npm/man/man7/dependency-selectors.7 index 92cbf923c1546a..54d80fee4dc58d 100644 --- a/deps/npm/man/man7/dependency-selectors.7 +++ b/deps/npm/man/man7/dependency-selectors.7 @@ -1,4 +1,4 @@ -.TH "QUERYING" "7" "August 2024" "NPM@10.8.3" "" +.TH "QUERYING" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBQuerying\fR - Dependency Selector Syntax & Querying .SS "Description" diff --git a/deps/npm/man/man7/developers.7 b/deps/npm/man/man7/developers.7 index fd7d1342f26052..aa907f3f51deda 100644 --- a/deps/npm/man/man7/developers.7 +++ b/deps/npm/man/man7/developers.7 @@ -1,4 +1,4 @@ -.TH "DEVELOPERS" "7" "August 2024" "NPM@10.8.3" "" +.TH "DEVELOPERS" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBdevelopers\fR - Developer Guide .SS "Description" diff --git a/deps/npm/man/man7/logging.7 b/deps/npm/man/man7/logging.7 index 3ab9f5c1ddf49e..f36ae6e4703048 100644 --- a/deps/npm/man/man7/logging.7 +++ b/deps/npm/man/man7/logging.7 @@ -1,4 +1,4 @@ -.TH "LOGGING" "7" "August 2024" "NPM@10.8.3" "" +.TH "LOGGING" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBLogging\fR - Why, What & How We Log .SS "Description" diff --git a/deps/npm/man/man7/orgs.7 b/deps/npm/man/man7/orgs.7 index c3718278543966..2866d7dd9800a5 100644 --- a/deps/npm/man/man7/orgs.7 +++ b/deps/npm/man/man7/orgs.7 @@ -1,4 +1,4 @@ -.TH "ORGS" "7" "August 2024" "NPM@10.8.3" "" +.TH "ORGS" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBorgs\fR - Working with Teams & Orgs .SS "Description" diff --git a/deps/npm/man/man7/package-spec.7 b/deps/npm/man/man7/package-spec.7 index 60cbfcd9ff6068..0d76a5f3017253 100644 --- a/deps/npm/man/man7/package-spec.7 +++ b/deps/npm/man/man7/package-spec.7 @@ -1,4 +1,4 @@ -.TH "PACKAGE-SPEC" "7" "August 2024" "NPM@10.8.3" "" +.TH "PACKAGE-SPEC" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBpackage-spec\fR - Package name specifier .SS "Description" diff --git a/deps/npm/man/man7/registry.7 b/deps/npm/man/man7/registry.7 index a798125ab8dd05..61ad2702f7b235 100644 --- a/deps/npm/man/man7/registry.7 +++ b/deps/npm/man/man7/registry.7 @@ -1,4 +1,4 @@ -.TH "REGISTRY" "7" "August 2024" "NPM@10.8.3" "" +.TH "REGISTRY" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBregistry\fR - The JavaScript Package Registry .SS "Description" diff --git a/deps/npm/man/man7/removal.7 b/deps/npm/man/man7/removal.7 index 9e792b51df71a3..a76b743553c304 100644 --- a/deps/npm/man/man7/removal.7 +++ b/deps/npm/man/man7/removal.7 @@ -1,4 +1,4 @@ -.TH "REMOVAL" "7" "August 2024" "NPM@10.8.3" "" +.TH "REMOVAL" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBremoval\fR - Cleaning the Slate .SS "Synopsis" diff --git a/deps/npm/man/man7/scope.7 b/deps/npm/man/man7/scope.7 index 9313cb144366f1..94c22ccbd9f8fa 100644 --- a/deps/npm/man/man7/scope.7 +++ b/deps/npm/man/man7/scope.7 @@ -1,4 +1,4 @@ -.TH "SCOPE" "7" "August 2024" "NPM@10.8.3" "" +.TH "SCOPE" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBscope\fR - Scoped packages .SS "Description" diff --git a/deps/npm/man/man7/scripts.7 b/deps/npm/man/man7/scripts.7 index 843454ed9fd913..925d3181dc9174 100644 --- a/deps/npm/man/man7/scripts.7 +++ b/deps/npm/man/man7/scripts.7 @@ -1,4 +1,4 @@ -.TH "SCRIPTS" "7" "August 2024" "NPM@10.8.3" "" +.TH "SCRIPTS" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBscripts\fR - How npm handles the "scripts" field .SS "Description" diff --git a/deps/npm/man/man7/workspaces.7 b/deps/npm/man/man7/workspaces.7 index 7459be91ff0880..1812eb73eb7ece 100644 --- a/deps/npm/man/man7/workspaces.7 +++ b/deps/npm/man/man7/workspaces.7 @@ -1,4 +1,4 @@ -.TH "WORKSPACES" "7" "August 2024" "NPM@10.8.3" "" +.TH "WORKSPACES" "7" "October 2024" "NPM@10.9.0" "" .SH "NAME" \fBworkspaces\fR - Working with workspaces .SS "Description" diff --git a/deps/npm/node_modules/@isaacs/fs-minipass/LICENSE b/deps/npm/node_modules/@isaacs/fs-minipass/LICENSE new file mode 100644 index 00000000000000..19129e315fe593 --- /dev/null +++ b/deps/npm/node_modules/@isaacs/fs-minipass/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/index.js b/deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/index.js new file mode 100644 index 00000000000000..2b3178c5263b4c --- /dev/null +++ b/deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/index.js @@ -0,0 +1,430 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.WriteStreamSync = exports.WriteStream = exports.ReadStreamSync = exports.ReadStream = void 0; +const events_1 = __importDefault(require("events")); +const fs_1 = __importDefault(require("fs")); +const minipass_1 = require("minipass"); +const writev = fs_1.default.writev; +const _autoClose = Symbol('_autoClose'); +const _close = Symbol('_close'); +const _ended = Symbol('_ended'); +const _fd = Symbol('_fd'); +const _finished = Symbol('_finished'); +const _flags = Symbol('_flags'); +const _flush = Symbol('_flush'); +const _handleChunk = Symbol('_handleChunk'); +const _makeBuf = Symbol('_makeBuf'); +const _mode = Symbol('_mode'); +const _needDrain = Symbol('_needDrain'); +const _onerror = Symbol('_onerror'); +const _onopen = Symbol('_onopen'); +const _onread = Symbol('_onread'); +const _onwrite = Symbol('_onwrite'); +const _open = Symbol('_open'); +const _path = Symbol('_path'); +const _pos = Symbol('_pos'); +const _queue = Symbol('_queue'); +const _read = Symbol('_read'); +const _readSize = Symbol('_readSize'); +const _reading = Symbol('_reading'); +const _remain = Symbol('_remain'); +const _size = Symbol('_size'); +const _write = Symbol('_write'); +const _writing = Symbol('_writing'); +const _defaultFlag = Symbol('_defaultFlag'); +const _errored = Symbol('_errored'); +class ReadStream extends minipass_1.Minipass { + [_errored] = false; + [_fd]; + [_path]; + [_readSize]; + [_reading] = false; + [_size]; + [_remain]; + [_autoClose]; + constructor(path, opt) { + opt = opt || {}; + super(opt); + this.readable = true; + this.writable = false; + if (typeof path !== 'string') { + throw new TypeError('path must be a string'); + } + this[_errored] = false; + this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined; + this[_path] = path; + this[_readSize] = opt.readSize || 16 * 1024 * 1024; + this[_reading] = false; + this[_size] = typeof opt.size === 'number' ? opt.size : Infinity; + this[_remain] = this[_size]; + this[_autoClose] = + typeof opt.autoClose === 'boolean' ? opt.autoClose : true; + if (typeof this[_fd] === 'number') { + this[_read](); + } + else { + this[_open](); + } + } + get fd() { + return this[_fd]; + } + get path() { + return this[_path]; + } + //@ts-ignore + write() { + throw new TypeError('this is a readable stream'); + } + //@ts-ignore + end() { + throw new TypeError('this is a readable stream'); + } + [_open]() { + fs_1.default.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)); + } + [_onopen](er, fd) { + if (er) { + this[_onerror](er); + } + else { + this[_fd] = fd; + this.emit('open', fd); + this[_read](); + } + } + [_makeBuf]() { + return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])); + } + [_read]() { + if (!this[_reading]) { + this[_reading] = true; + const buf = this[_makeBuf](); + /* c8 ignore start */ + if (buf.length === 0) { + return process.nextTick(() => this[_onread](null, 0, buf)); + } + /* c8 ignore stop */ + fs_1.default.read(this[_fd], buf, 0, buf.length, null, (er, br, b) => this[_onread](er, br, b)); + } + } + [_onread](er, br, buf) { + this[_reading] = false; + if (er) { + this[_onerror](er); + } + else if (this[_handleChunk](br, buf)) { + this[_read](); + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs_1.default.close(fd, er => er ? this.emit('error', er) : this.emit('close')); + } + } + [_onerror](er) { + this[_reading] = true; + this[_close](); + this.emit('error', er); + } + [_handleChunk](br, buf) { + let ret = false; + // no effect if infinite + this[_remain] -= br; + if (br > 0) { + ret = super.write(br < buf.length ? buf.subarray(0, br) : buf); + } + if (br === 0 || this[_remain] <= 0) { + ret = false; + this[_close](); + super.end(); + } + return ret; + } + emit(ev, ...args) { + switch (ev) { + case 'prefinish': + case 'finish': + return false; + case 'drain': + if (typeof this[_fd] === 'number') { + this[_read](); + } + return false; + case 'error': + if (this[_errored]) { + return false; + } + this[_errored] = true; + return super.emit(ev, ...args); + default: + return super.emit(ev, ...args); + } + } +} +exports.ReadStream = ReadStream; +class ReadStreamSync extends ReadStream { + [_open]() { + let threw = true; + try { + this[_onopen](null, fs_1.default.openSync(this[_path], 'r')); + threw = false; + } + finally { + if (threw) { + this[_close](); + } + } + } + [_read]() { + let threw = true; + try { + if (!this[_reading]) { + this[_reading] = true; + do { + const buf = this[_makeBuf](); + /* c8 ignore start */ + const br = buf.length === 0 + ? 0 + : fs_1.default.readSync(this[_fd], buf, 0, buf.length, null); + /* c8 ignore stop */ + if (!this[_handleChunk](br, buf)) { + break; + } + } while (true); + this[_reading] = false; + } + threw = false; + } + finally { + if (threw) { + this[_close](); + } + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs_1.default.closeSync(fd); + this.emit('close'); + } + } +} +exports.ReadStreamSync = ReadStreamSync; +class WriteStream extends events_1.default { + readable = false; + writable = true; + [_errored] = false; + [_writing] = false; + [_ended] = false; + [_queue] = []; + [_needDrain] = false; + [_path]; + [_mode]; + [_autoClose]; + [_fd]; + [_defaultFlag]; + [_flags]; + [_finished] = false; + [_pos]; + constructor(path, opt) { + opt = opt || {}; + super(opt); + this[_path] = path; + this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined; + this[_mode] = opt.mode === undefined ? 0o666 : opt.mode; + this[_pos] = typeof opt.start === 'number' ? opt.start : undefined; + this[_autoClose] = + typeof opt.autoClose === 'boolean' ? opt.autoClose : true; + // truncating makes no sense when writing into the middle + const defaultFlag = this[_pos] !== undefined ? 'r+' : 'w'; + this[_defaultFlag] = opt.flags === undefined; + this[_flags] = opt.flags === undefined ? defaultFlag : opt.flags; + if (this[_fd] === undefined) { + this[_open](); + } + } + emit(ev, ...args) { + if (ev === 'error') { + if (this[_errored]) { + return false; + } + this[_errored] = true; + } + return super.emit(ev, ...args); + } + get fd() { + return this[_fd]; + } + get path() { + return this[_path]; + } + [_onerror](er) { + this[_close](); + this[_writing] = true; + this.emit('error', er); + } + [_open]() { + fs_1.default.open(this[_path], this[_flags], this[_mode], (er, fd) => this[_onopen](er, fd)); + } + [_onopen](er, fd) { + if (this[_defaultFlag] && + this[_flags] === 'r+' && + er && + er.code === 'ENOENT') { + this[_flags] = 'w'; + this[_open](); + } + else if (er) { + this[_onerror](er); + } + else { + this[_fd] = fd; + this.emit('open', fd); + if (!this[_writing]) { + this[_flush](); + } + } + } + end(buf, enc) { + if (buf) { + //@ts-ignore + this.write(buf, enc); + } + this[_ended] = true; + // synthetic after-write logic, where drain/finish live + if (!this[_writing] && + !this[_queue].length && + typeof this[_fd] === 'number') { + this[_onwrite](null, 0); + } + return this; + } + write(buf, enc) { + if (typeof buf === 'string') { + buf = Buffer.from(buf, enc); + } + if (this[_ended]) { + this.emit('error', new Error('write() after end()')); + return false; + } + if (this[_fd] === undefined || this[_writing] || this[_queue].length) { + this[_queue].push(buf); + this[_needDrain] = true; + return false; + } + this[_writing] = true; + this[_write](buf); + return true; + } + [_write](buf) { + fs_1.default.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => this[_onwrite](er, bw)); + } + [_onwrite](er, bw) { + if (er) { + this[_onerror](er); + } + else { + if (this[_pos] !== undefined && typeof bw === 'number') { + this[_pos] += bw; + } + if (this[_queue].length) { + this[_flush](); + } + else { + this[_writing] = false; + if (this[_ended] && !this[_finished]) { + this[_finished] = true; + this[_close](); + this.emit('finish'); + } + else if (this[_needDrain]) { + this[_needDrain] = false; + this.emit('drain'); + } + } + } + } + [_flush]() { + if (this[_queue].length === 0) { + if (this[_ended]) { + this[_onwrite](null, 0); + } + } + else if (this[_queue].length === 1) { + this[_write](this[_queue].pop()); + } + else { + const iovec = this[_queue]; + this[_queue] = []; + writev(this[_fd], iovec, this[_pos], (er, bw) => this[_onwrite](er, bw)); + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs_1.default.close(fd, er => er ? this.emit('error', er) : this.emit('close')); + } + } +} +exports.WriteStream = WriteStream; +class WriteStreamSync extends WriteStream { + [_open]() { + let fd; + // only wrap in a try{} block if we know we'll retry, to avoid + // the rethrow obscuring the error's source frame in most cases. + if (this[_defaultFlag] && this[_flags] === 'r+') { + try { + fd = fs_1.default.openSync(this[_path], this[_flags], this[_mode]); + } + catch (er) { + if (er?.code === 'ENOENT') { + this[_flags] = 'w'; + return this[_open](); + } + else { + throw er; + } + } + } + else { + fd = fs_1.default.openSync(this[_path], this[_flags], this[_mode]); + } + this[_onopen](null, fd); + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs_1.default.closeSync(fd); + this.emit('close'); + } + } + [_write](buf) { + // throw the original, but try to close if it fails + let threw = true; + try { + this[_onwrite](null, fs_1.default.writeSync(this[_fd], buf, 0, buf.length, this[_pos])); + threw = false; + } + finally { + if (threw) { + try { + this[_close](); + } + catch { + // ok error + } + } + } + } +} +exports.WriteStreamSync = WriteStreamSync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/package.json b/deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/@isaacs/fs-minipass/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/index.js b/deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/index.js new file mode 100644 index 00000000000000..287a0f614dcc65 --- /dev/null +++ b/deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/index.js @@ -0,0 +1,420 @@ +import EE from 'events'; +import fs from 'fs'; +import { Minipass } from 'minipass'; +const writev = fs.writev; +const _autoClose = Symbol('_autoClose'); +const _close = Symbol('_close'); +const _ended = Symbol('_ended'); +const _fd = Symbol('_fd'); +const _finished = Symbol('_finished'); +const _flags = Symbol('_flags'); +const _flush = Symbol('_flush'); +const _handleChunk = Symbol('_handleChunk'); +const _makeBuf = Symbol('_makeBuf'); +const _mode = Symbol('_mode'); +const _needDrain = Symbol('_needDrain'); +const _onerror = Symbol('_onerror'); +const _onopen = Symbol('_onopen'); +const _onread = Symbol('_onread'); +const _onwrite = Symbol('_onwrite'); +const _open = Symbol('_open'); +const _path = Symbol('_path'); +const _pos = Symbol('_pos'); +const _queue = Symbol('_queue'); +const _read = Symbol('_read'); +const _readSize = Symbol('_readSize'); +const _reading = Symbol('_reading'); +const _remain = Symbol('_remain'); +const _size = Symbol('_size'); +const _write = Symbol('_write'); +const _writing = Symbol('_writing'); +const _defaultFlag = Symbol('_defaultFlag'); +const _errored = Symbol('_errored'); +export class ReadStream extends Minipass { + [_errored] = false; + [_fd]; + [_path]; + [_readSize]; + [_reading] = false; + [_size]; + [_remain]; + [_autoClose]; + constructor(path, opt) { + opt = opt || {}; + super(opt); + this.readable = true; + this.writable = false; + if (typeof path !== 'string') { + throw new TypeError('path must be a string'); + } + this[_errored] = false; + this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined; + this[_path] = path; + this[_readSize] = opt.readSize || 16 * 1024 * 1024; + this[_reading] = false; + this[_size] = typeof opt.size === 'number' ? opt.size : Infinity; + this[_remain] = this[_size]; + this[_autoClose] = + typeof opt.autoClose === 'boolean' ? opt.autoClose : true; + if (typeof this[_fd] === 'number') { + this[_read](); + } + else { + this[_open](); + } + } + get fd() { + return this[_fd]; + } + get path() { + return this[_path]; + } + //@ts-ignore + write() { + throw new TypeError('this is a readable stream'); + } + //@ts-ignore + end() { + throw new TypeError('this is a readable stream'); + } + [_open]() { + fs.open(this[_path], 'r', (er, fd) => this[_onopen](er, fd)); + } + [_onopen](er, fd) { + if (er) { + this[_onerror](er); + } + else { + this[_fd] = fd; + this.emit('open', fd); + this[_read](); + } + } + [_makeBuf]() { + return Buffer.allocUnsafe(Math.min(this[_readSize], this[_remain])); + } + [_read]() { + if (!this[_reading]) { + this[_reading] = true; + const buf = this[_makeBuf](); + /* c8 ignore start */ + if (buf.length === 0) { + return process.nextTick(() => this[_onread](null, 0, buf)); + } + /* c8 ignore stop */ + fs.read(this[_fd], buf, 0, buf.length, null, (er, br, b) => this[_onread](er, br, b)); + } + } + [_onread](er, br, buf) { + this[_reading] = false; + if (er) { + this[_onerror](er); + } + else if (this[_handleChunk](br, buf)) { + this[_read](); + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')); + } + } + [_onerror](er) { + this[_reading] = true; + this[_close](); + this.emit('error', er); + } + [_handleChunk](br, buf) { + let ret = false; + // no effect if infinite + this[_remain] -= br; + if (br > 0) { + ret = super.write(br < buf.length ? buf.subarray(0, br) : buf); + } + if (br === 0 || this[_remain] <= 0) { + ret = false; + this[_close](); + super.end(); + } + return ret; + } + emit(ev, ...args) { + switch (ev) { + case 'prefinish': + case 'finish': + return false; + case 'drain': + if (typeof this[_fd] === 'number') { + this[_read](); + } + return false; + case 'error': + if (this[_errored]) { + return false; + } + this[_errored] = true; + return super.emit(ev, ...args); + default: + return super.emit(ev, ...args); + } + } +} +export class ReadStreamSync extends ReadStream { + [_open]() { + let threw = true; + try { + this[_onopen](null, fs.openSync(this[_path], 'r')); + threw = false; + } + finally { + if (threw) { + this[_close](); + } + } + } + [_read]() { + let threw = true; + try { + if (!this[_reading]) { + this[_reading] = true; + do { + const buf = this[_makeBuf](); + /* c8 ignore start */ + const br = buf.length === 0 + ? 0 + : fs.readSync(this[_fd], buf, 0, buf.length, null); + /* c8 ignore stop */ + if (!this[_handleChunk](br, buf)) { + break; + } + } while (true); + this[_reading] = false; + } + threw = false; + } + finally { + if (threw) { + this[_close](); + } + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs.closeSync(fd); + this.emit('close'); + } + } +} +export class WriteStream extends EE { + readable = false; + writable = true; + [_errored] = false; + [_writing] = false; + [_ended] = false; + [_queue] = []; + [_needDrain] = false; + [_path]; + [_mode]; + [_autoClose]; + [_fd]; + [_defaultFlag]; + [_flags]; + [_finished] = false; + [_pos]; + constructor(path, opt) { + opt = opt || {}; + super(opt); + this[_path] = path; + this[_fd] = typeof opt.fd === 'number' ? opt.fd : undefined; + this[_mode] = opt.mode === undefined ? 0o666 : opt.mode; + this[_pos] = typeof opt.start === 'number' ? opt.start : undefined; + this[_autoClose] = + typeof opt.autoClose === 'boolean' ? opt.autoClose : true; + // truncating makes no sense when writing into the middle + const defaultFlag = this[_pos] !== undefined ? 'r+' : 'w'; + this[_defaultFlag] = opt.flags === undefined; + this[_flags] = opt.flags === undefined ? defaultFlag : opt.flags; + if (this[_fd] === undefined) { + this[_open](); + } + } + emit(ev, ...args) { + if (ev === 'error') { + if (this[_errored]) { + return false; + } + this[_errored] = true; + } + return super.emit(ev, ...args); + } + get fd() { + return this[_fd]; + } + get path() { + return this[_path]; + } + [_onerror](er) { + this[_close](); + this[_writing] = true; + this.emit('error', er); + } + [_open]() { + fs.open(this[_path], this[_flags], this[_mode], (er, fd) => this[_onopen](er, fd)); + } + [_onopen](er, fd) { + if (this[_defaultFlag] && + this[_flags] === 'r+' && + er && + er.code === 'ENOENT') { + this[_flags] = 'w'; + this[_open](); + } + else if (er) { + this[_onerror](er); + } + else { + this[_fd] = fd; + this.emit('open', fd); + if (!this[_writing]) { + this[_flush](); + } + } + } + end(buf, enc) { + if (buf) { + //@ts-ignore + this.write(buf, enc); + } + this[_ended] = true; + // synthetic after-write logic, where drain/finish live + if (!this[_writing] && + !this[_queue].length && + typeof this[_fd] === 'number') { + this[_onwrite](null, 0); + } + return this; + } + write(buf, enc) { + if (typeof buf === 'string') { + buf = Buffer.from(buf, enc); + } + if (this[_ended]) { + this.emit('error', new Error('write() after end()')); + return false; + } + if (this[_fd] === undefined || this[_writing] || this[_queue].length) { + this[_queue].push(buf); + this[_needDrain] = true; + return false; + } + this[_writing] = true; + this[_write](buf); + return true; + } + [_write](buf) { + fs.write(this[_fd], buf, 0, buf.length, this[_pos], (er, bw) => this[_onwrite](er, bw)); + } + [_onwrite](er, bw) { + if (er) { + this[_onerror](er); + } + else { + if (this[_pos] !== undefined && typeof bw === 'number') { + this[_pos] += bw; + } + if (this[_queue].length) { + this[_flush](); + } + else { + this[_writing] = false; + if (this[_ended] && !this[_finished]) { + this[_finished] = true; + this[_close](); + this.emit('finish'); + } + else if (this[_needDrain]) { + this[_needDrain] = false; + this.emit('drain'); + } + } + } + } + [_flush]() { + if (this[_queue].length === 0) { + if (this[_ended]) { + this[_onwrite](null, 0); + } + } + else if (this[_queue].length === 1) { + this[_write](this[_queue].pop()); + } + else { + const iovec = this[_queue]; + this[_queue] = []; + writev(this[_fd], iovec, this[_pos], (er, bw) => this[_onwrite](er, bw)); + } + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs.close(fd, er => er ? this.emit('error', er) : this.emit('close')); + } + } +} +export class WriteStreamSync extends WriteStream { + [_open]() { + let fd; + // only wrap in a try{} block if we know we'll retry, to avoid + // the rethrow obscuring the error's source frame in most cases. + if (this[_defaultFlag] && this[_flags] === 'r+') { + try { + fd = fs.openSync(this[_path], this[_flags], this[_mode]); + } + catch (er) { + if (er?.code === 'ENOENT') { + this[_flags] = 'w'; + return this[_open](); + } + else { + throw er; + } + } + } + else { + fd = fs.openSync(this[_path], this[_flags], this[_mode]); + } + this[_onopen](null, fd); + } + [_close]() { + if (this[_autoClose] && typeof this[_fd] === 'number') { + const fd = this[_fd]; + this[_fd] = undefined; + fs.closeSync(fd); + this.emit('close'); + } + } + [_write](buf) { + // throw the original, but try to close if it fails + let threw = true; + try { + this[_onwrite](null, fs.writeSync(this[_fd], buf, 0, buf.length, this[_pos])); + threw = false; + } + finally { + if (threw) { + try { + this[_close](); + } + catch { + // ok error + } + } + } + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/package.json b/deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/@isaacs/fs-minipass/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/@isaacs/fs-minipass/package.json b/deps/npm/node_modules/@isaacs/fs-minipass/package.json new file mode 100644 index 00000000000000..cc4576c4afe776 --- /dev/null +++ b/deps/npm/node_modules/@isaacs/fs-minipass/package.json @@ -0,0 +1,72 @@ +{ + "name": "@isaacs/fs-minipass", + "version": "4.0.1", + "main": "./dist/commonjs/index.js", + "scripts": { + "prepare": "tshy", + "pretest": "npm run prepare", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "keywords": [], + "author": "Isaac Z. Schlueter", + "license": "ISC", + "repository": { + "type": "git", + "url": "https://github.com/npm/fs-minipass.git" + }, + "description": "fs read and write streams based on minipass", + "dependencies": { + "minipass": "^7.0.4" + }, + "devDependencies": { + "@types/node": "^20.11.30", + "mutate-fs": "^2.1.1", + "prettier": "^3.2.5", + "tap": "^18.7.1", + "tshy": "^1.12.0", + "typedoc": "^0.25.12" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">=18.0.0" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/deps/npm/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/@npmcli/agent/package.json index ef5b4e3228cc46..4d648fb5dfe052 100644 --- a/deps/npm/node_modules/@npmcli/agent/package.json +++ b/deps/npm/node_modules/@npmcli/agent/package.json @@ -1,17 +1,18 @@ { "name": "@npmcli/agent", - "version": "2.2.2", + "version": "3.0.0", "description": "the http/https agent used by the npm cli", "main": "lib/index.js", "scripts": { "gencerts": "bash scripts/create-cert.sh", "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "author": "GitHub Inc.", "license": "ISC", @@ -24,11 +25,11 @@ "lib/" ], "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", + "version": "4.23.1", "publish": "true" }, "dependencies": { @@ -39,17 +40,16 @@ "socks-proxy-agent": "^8.0.3" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.1", "minipass-fetch": "^3.0.3", "nock": "^13.2.7", - "semver": "^7.5.4", - "simple-socks": "^3.1.0", + "socksv5": "^0.0.6", "tap": "^16.3.0" }, "repository": { "type": "git", - "url": "https://github.com/npm/agent.git" + "url": "git+https://github.com/npm/agent.git" }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js index 06d03bbce7a32f..6bd4e9407e72d6 100644 --- a/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js +++ b/deps/npm/node_modules/@npmcli/arborist/lib/arborist/build-ideal-tree.js @@ -195,7 +195,10 @@ module.exports = cls => class IdealTreeBuilder extends cls { for (const node of this.idealTree.inventory.values()) { if (!node.optional) { try { - checkEngine(node.package, npmVersion, nodeVersion, this.options.force) + // if devEngines is present in the root node we ignore the engines check + if (!(node.isRoot && node.package.devEngines)) { + checkEngine(node.package, npmVersion, nodeVersion, this.options.force) + } } catch (err) { if (engineStrict) { throw err diff --git a/deps/npm/node_modules/@npmcli/arborist/package.json b/deps/npm/node_modules/@npmcli/arborist/package.json index 7fec2c6bea339b..b1e2b21a254635 100644 --- a/deps/npm/node_modules/@npmcli/arborist/package.json +++ b/deps/npm/node_modules/@npmcli/arborist/package.json @@ -1,49 +1,49 @@ { "name": "@npmcli/arborist", - "version": "7.5.4", + "version": "8.0.0", "description": "Manage node_modules trees", "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/fs": "^3.1.1", - "@npmcli/installed-package-contents": "^2.1.0", - "@npmcli/map-workspaces": "^3.0.2", - "@npmcli/metavuln-calculator": "^7.1.1", - "@npmcli/name-from-folder": "^2.0.0", - "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^5.1.0", - "@npmcli/query": "^3.1.0", - "@npmcli/redact": "^2.0.0", - "@npmcli/run-script": "^8.1.0", - "bin-links": "^4.0.4", - "cacache": "^18.0.3", + "@npmcli/fs": "^4.0.0", + "@npmcli/installed-package-contents": "^3.0.0", + "@npmcli/map-workspaces": "^4.0.1", + "@npmcli/metavuln-calculator": "^8.0.0", + "@npmcli/name-from-folder": "^3.0.0", + "@npmcli/node-gyp": "^4.0.0", + "@npmcli/package-json": "^6.0.1", + "@npmcli/query": "^4.0.0", + "@npmcli/redact": "^3.0.0", + "@npmcli/run-script": "^9.0.1", + "bin-links": "^5.0.0", + "cacache": "^19.0.1", "common-ancestor-path": "^1.0.1", - "hosted-git-info": "^7.0.2", - "json-parse-even-better-errors": "^3.0.2", + "hosted-git-info": "^8.0.0", + "json-parse-even-better-errors": "^4.0.0", "json-stringify-nice": "^1.1.4", "lru-cache": "^10.2.2", "minimatch": "^9.0.4", - "nopt": "^7.2.1", - "npm-install-checks": "^6.2.0", - "npm-package-arg": "^11.0.2", - "npm-pick-manifest": "^9.0.1", - "npm-registry-fetch": "^17.0.1", - "pacote": "^18.0.6", - "parse-conflict-json": "^3.0.0", - "proc-log": "^4.2.0", - "proggy": "^2.0.0", + "nopt": "^8.0.0", + "npm-install-checks": "^7.1.0", + "npm-package-arg": "^12.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-registry-fetch": "^18.0.1", + "pacote": "^19.0.0", + "parse-conflict-json": "^4.0.0", + "proc-log": "^5.0.0", + "proggy": "^3.0.0", "promise-all-reject-late": "^1.0.0", "promise-call-limit": "^3.0.1", - "read-package-json-fast": "^3.0.2", + "read-package-json-fast": "^4.0.0", "semver": "^7.3.7", - "ssri": "^10.0.6", + "ssri": "^12.0.0", "treeverse": "^3.0.0", "walk-up-path": "^3.0.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "benchmark": "^2.1.4", - "minify-registry-metadata": "^3.0.0", + "minify-registry-metadata": "^4.0.0", "nock": "^13.3.3", "tap": "^16.3.8", "tar-stream": "^3.0.0", @@ -89,7 +89,7 @@ ] }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/@npmcli/config/package.json b/deps/npm/node_modules/@npmcli/config/package.json index 6cf2b85438c65a..18c677393b5ff3 100644 --- a/deps/npm/node_modules/@npmcli/config/package.json +++ b/deps/npm/node_modules/@npmcli/config/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/config", - "version": "8.3.4", + "version": "9.0.0", "files": [ "bin/", "lib/" @@ -31,23 +31,23 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/mock-globals": "^1.0.0", "@npmcli/template-oss": "4.23.3", "tap": "^16.3.8" }, "dependencies": { - "@npmcli/map-workspaces": "^3.0.2", - "@npmcli/package-json": "^5.1.1", + "@npmcli/map-workspaces": "^4.0.1", + "@npmcli/package-json": "^6.0.1", "ci-info": "^4.0.0", - "ini": "^4.1.2", - "nopt": "^7.2.1", - "proc-log": "^4.2.0", + "ini": "^5.0.0", + "nopt": "^8.0.0", + "proc-log": "^5.0.0", "semver": "^7.3.5", "walk-up-path": "^3.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/@npmcli/fs/package.json b/deps/npm/node_modules/@npmcli/fs/package.json index 5261a11b78000e..e4063ec8752437 100644 --- a/deps/npm/node_modules/@npmcli/fs/package.json +++ b/deps/npm/node_modules/@npmcli/fs/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/fs", - "version": "3.1.1", + "version": "4.0.0", "description": "filesystem utilities for the npm cli", "main": "lib/index.js", "files": [ @@ -11,12 +11,13 @@ "snap": "tap", "test": "tap", "npmclilint": "npmcli-lint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -29,19 +30,20 @@ "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "dependencies": { "semver": "^7.3.5" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0" + "version": "4.23.3", + "publish": true }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/@npmcli/git/package.json b/deps/npm/node_modules/@npmcli/git/package.json index b6aa4a282cc0f1..2bc6730ba2151b 100644 --- a/deps/npm/node_modules/@npmcli/git/package.json +++ b/deps/npm/node_modules/@npmcli/git/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/git", - "version": "5.0.8", + "version": "6.0.1", "main": "lib/index.js", "files": [ "bin/", @@ -14,13 +14,14 @@ "author": "GitHub Inc.", "license": "ISC", "scripts": { - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "snap": "tap", "test": "tap", "posttest": "npm run lint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "timeout": 600, @@ -30,29 +31,29 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "npm-package-arg": "^11.0.0", "slash": "^3.0.0", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/promise-spawn": "^7.0.0", - "ini": "^4.1.3", + "@npmcli/promise-spawn": "^8.0.0", + "ini": "^5.0.0", "lru-cache": "^10.0.1", - "npm-pick-manifest": "^9.0.0", - "proc-log": "^4.0.0", + "npm-pick-manifest": "^10.0.0", + "proc-log": "^5.0.0", "promise-inflight": "^1.0.1", "promise-retry": "^2.0.1", "semver": "^7.3.5", - "which": "^4.0.0" + "which": "^5.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/@npmcli/installed-package-contents/package.json b/deps/npm/node_modules/@npmcli/installed-package-contents/package.json index 132256430a6c18..d5b68a737daf49 100644 --- a/deps/npm/node_modules/@npmcli/installed-package-contents/package.json +++ b/deps/npm/node_modules/@npmcli/installed-package-contents/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/installed-package-contents", - "version": "2.1.0", + "version": "3.0.0", "description": "Get the list of files installed in a package in node_modules, including bundled dependencies", "author": "GitHub Inc.", "main": "lib/index.js", @@ -11,35 +11,36 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.4", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "dependencies": { - "npm-bundled": "^3.0.0", - "npm-normalize-package-bin": "^3.0.0" + "npm-bundled": "^4.0.0", + "npm-normalize-package-bin": "^4.0.0" }, "repository": { "type": "git", - "url": "https://github.com/npm/installed-package-contents.git" + "url": "git+https://github.com/npm/installed-package-contents.git" }, "files": [ "bin/", "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.4", + "version": "4.23.3", "publish": true }, "tap": { diff --git a/deps/npm/node_modules/@npmcli/map-workspaces/lib/index.js b/deps/npm/node_modules/@npmcli/map-workspaces/lib/index.js index b20bf5de5d631e..1b39d2e3f67e08 100644 --- a/deps/npm/node_modules/@npmcli/map-workspaces/lib/index.js +++ b/deps/npm/node_modules/@npmcli/map-workspaces/lib/index.js @@ -2,7 +2,7 @@ const path = require('path') const getName = require('@npmcli/name-from-folder') const { minimatch } = require('minimatch') -const rpj = require('read-package-json-fast') +const pkgJson = require('@npmcli/package-json') const { glob } = require('glob') function appendNegatedPatterns (allPatterns) { @@ -67,15 +67,7 @@ function getPatterns (workspaces) { } function getPackageName (pkg, pathname) { - const { name } = pkg - return name || getName(pathname) -} - -function pkgPathmame (opts) { - return (...args) => { - const cwd = opts.cwd ? opts.cwd : process.cwd() - return path.join.apply(null, [cwd, ...args]) - } + return pkg.name || getName(pathname) } // make sure glob pattern only matches folders @@ -101,16 +93,19 @@ async function mapWorkspaces (opts = {}) { code: 'EMAPWORKSPACESPKG', }) } + if (!opts.cwd) { + opts.cwd = process.cwd() + } const { workspaces = [] } = opts.pkg const { patterns, negatedPatterns } = getPatterns(workspaces) const results = new Map() - const seen = new Map() if (!patterns.length && !negatedPatterns.length) { return results } + const seen = new Map() const getGlobOpts = () => ({ ...opts, ignore: [ @@ -121,8 +116,6 @@ async function mapWorkspaces (opts = {}) { ], }) - const getPackagePathname = pkgPathmame(opts) - let matches = await glob(patterns.map((p) => getGlobPattern(p)), getGlobOpts()) // preserves glob@8 behavior matches = matches.sort((a, b) => a.localeCompare(b, 'en')) @@ -138,10 +131,8 @@ async function mapWorkspaces (opts = {}) { for (const match of orderedMatches) { let pkg - const packageJsonPathname = getPackagePathname(match, 'package.json') - try { - pkg = await rpj(packageJsonPathname) + pkg = await pkgJson.normalize(path.join(opts.cwd, match)) } catch (err) { if (err.code === 'ENOENT') { continue @@ -150,15 +141,14 @@ async function mapWorkspaces (opts = {}) { } } - const packagePathname = path.dirname(packageJsonPathname) - const name = getPackageName(pkg, packagePathname) + const name = getPackageName(pkg.content, pkg.path) let seenPackagePathnames = seen.get(name) if (!seenPackagePathnames) { seenPackagePathnames = new Set() seen.set(name, seenPackagePathnames) } - seenPackagePathnames.add(packagePathname) + seenPackagePathnames.add(pkg.path) } const errorMessageArray = ['must not have multiple workspaces with the same name'] @@ -200,6 +190,9 @@ mapWorkspaces.virtual = function (opts = {}) { code: 'EMAPWORKSPACESLOCKFILE', }) } + if (!opts.cwd) { + opts.cwd = process.cwd() + } const { packages = {} } = opts.lockfile const { workspaces = [] } = packages[''] || {} @@ -218,10 +211,9 @@ mapWorkspaces.virtual = function (opts = {}) { } } - const getPackagePathname = pkgPathmame(opts) for (const pattern of patterns) { for (const packageKey of minimatch.match(packageKeys, pattern)) { - const packagePathname = getPackagePathname(packageKey) + const packagePathname = path.join(opts.cwd, packageKey) const name = getPackageName(packages[packageKey], packagePathname) results.set(packagePathname, name) } diff --git a/deps/npm/node_modules/@npmcli/map-workspaces/package.json b/deps/npm/node_modules/@npmcli/map-workspaces/package.json index e6292b06bd2b43..f2667f25e9d5d2 100644 --- a/deps/npm/node_modules/@npmcli/map-workspaces/package.json +++ b/deps/npm/node_modules/@npmcli/map-workspaces/package.json @@ -1,18 +1,18 @@ { "name": "@npmcli/map-workspaces", - "version": "3.0.6", + "version": "4.0.1", "main": "lib/index.js", "files": [ "bin/", "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "description": "Retrieves a name:pathname Map for a given workspaces config", "repository": { "type": "git", - "url": "https://github.com/npm/map-workspaces.git" + "url": "git+https://github.com/npm/map-workspaces.git" }, "keywords": [ "npm", @@ -25,14 +25,15 @@ "author": "GitHub Inc.", "license": "ISC", "scripts": { - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "pretest": "npm run lint", "test": "tap", "snap": "tap", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -42,19 +43,19 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/name-from-folder": "^2.0.0", + "@npmcli/name-from-folder": "^3.0.0", + "@npmcli/package-json": "^6.0.0", "glob": "^10.2.2", - "minimatch": "^9.0.0", - "read-package-json-fast": "^3.0.0" + "minimatch": "^9.0.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json b/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json index a7ec02d2ee72b2..d4c3cf54d83ea7 100644 --- a/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json +++ b/deps/npm/node_modules/@npmcli/metavuln-calculator/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/metavuln-calculator", - "version": "7.1.1", + "version": "8.0.0", "main": "lib/index.js", "files": [ "bin/", @@ -18,9 +18,9 @@ "posttest": "npm run lint", "snap": "tap", "postsnap": "npm run lint", - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force" }, @@ -33,24 +33,24 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "require-inject": "^1.4.4", "tap": "^16.0.1" }, "dependencies": { - "cacache": "^18.0.0", - "json-parse-even-better-errors": "^3.0.0", - "pacote": "^18.0.0", - "proc-log": "^4.1.0", + "cacache": "^19.0.0", + "json-parse-even-better-errors": "^4.0.0", + "pacote": "^19.0.0", + "proc-log": "^5.0.0", "semver": "^7.3.5" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true", "ciVersions": [ "16.14.0", diff --git a/deps/npm/node_modules/@npmcli/name-from-folder/package.json b/deps/npm/node_modules/@npmcli/name-from-folder/package.json index f0aa5b16dba1aa..323edd81d22fb4 100644 --- a/deps/npm/node_modules/@npmcli/name-from-folder/package.json +++ b/deps/npm/node_modules/@npmcli/name-from-folder/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/name-from-folder", - "version": "2.0.0", + "version": "3.0.0", "files": [ "bin/", "lib/" @@ -9,30 +9,32 @@ "description": "Get the package name from a folder path", "repository": { "type": "git", - "url": "https://github.com/npm/name-from-folder.git" + "url": "git+https://github.com/npm/name-from-folder.git" }, "author": "GitHub Inc.", "license": "ISC", "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.1", - "@npmcli/template-oss": "4.11.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.2" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.0" + "version": "4.23.3", + "publish": true }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/@npmcli/node-gyp/LICENSE b/deps/npm/node_modules/@npmcli/node-gyp/LICENSE new file mode 100644 index 00000000000000..3609cabca45350 --- /dev/null +++ b/deps/npm/node_modules/@npmcli/node-gyp/LICENSE @@ -0,0 +1,7 @@ +ISC License: + +Copyright (c) 2023 by GitHub Inc. + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@npmcli/node-gyp/package.json b/deps/npm/node_modules/@npmcli/node-gyp/package.json index 999572bc5883a2..3be9663a39de04 100644 --- a/deps/npm/node_modules/@npmcli/node-gyp/package.json +++ b/deps/npm/node_modules/@npmcli/node-gyp/package.json @@ -1,19 +1,20 @@ { "name": "@npmcli/node-gyp", - "version": "3.0.0", + "version": "4.0.0", "description": "Tools for dealing with node-gyp packages", "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", - "url": "https://github.com/npm/node-gyp.git" + "url": "git+https://github.com/npm/node-gyp.git" }, "keywords": [ "npm", @@ -28,16 +29,17 @@ "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.23.3", + "publish": true }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js b/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js index 682d234825de94..3adec0143f445a 100644 --- a/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js +++ b/deps/npm/node_modules/@npmcli/package-json/lib/normalize.js @@ -127,13 +127,9 @@ function unixifyPath (ref) { return ref.replace(/\\|:/g, '/') } -function securePath (ref) { - const secured = path.join('.', path.join('/', unixifyPath(ref))) - return secured.startsWith('.') ? '' : secured -} - function secureAndUnixifyPath (ref) { - return unixifyPath(securePath(ref)) + const secured = unixifyPath(path.join('.', path.join('/', unixifyPath(ref)))) + return secured.startsWith('./') ? '' : secured } // We don't want the `changes` array in here by default because this is a hot @@ -376,7 +372,7 @@ const normalize = async (pkg, { strict, steps, root, changes, allowLegacyCase }) // expand "directories.bin" if (steps.includes('binDir') && data.directories?.bin && !data.bin) { - const binsDir = path.resolve(pkg.path, securePath(data.directories.bin)) + const binsDir = path.resolve(pkg.path, secureAndUnixifyPath(data.directories.bin)) const bins = await lazyLoadGlob()('**', { cwd: binsDir }) data.bin = bins.reduce((acc, binFile) => { if (binFile && !binFile.startsWith('.')) { diff --git a/deps/npm/node_modules/@npmcli/package-json/package.json b/deps/npm/node_modules/@npmcli/package-json/package.json index a5ea22bdbb340f..e766e8ccb4bbf5 100644 --- a/deps/npm/node_modules/@npmcli/package-json/package.json +++ b/deps/npm/node_modules/@npmcli/package-json/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/package-json", - "version": "5.2.0", + "version": "6.0.1", "description": "Programmatic API to update package.json", "main": "lib/index.js", "files": [ @@ -10,12 +10,13 @@ "scripts": { "snap": "tap", "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "keywords": [ "npm", @@ -24,19 +25,19 @@ "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "read-package-json": "^7.0.0", - "read-package-json-fast": "^3.0.2", + "read-package-json-fast": "^4.0.0", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/git": "^5.0.0", + "@npmcli/git": "^6.0.0", "glob": "^10.2.2", - "hosted-git-info": "^7.0.0", - "json-parse-even-better-errors": "^3.0.0", - "normalize-package-data": "^6.0.0", - "proc-log": "^4.0.0", + "hosted-git-info": "^8.0.0", + "json-parse-even-better-errors": "^4.0.0", + "normalize-package-data": "^7.0.0", + "proc-log": "^5.0.0", "semver": "^7.5.3" }, "repository": { @@ -44,11 +45,11 @@ "url": "git+https://github.com/npm/package-json.git" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/@npmcli/promise-spawn/package.json b/deps/npm/node_modules/@npmcli/promise-spawn/package.json index 1b633f84596d20..9914063f85156d 100644 --- a/deps/npm/node_modules/@npmcli/promise-spawn/package.json +++ b/deps/npm/node_modules/@npmcli/promise-spawn/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/promise-spawn", - "version": "7.0.2", + "version": "8.0.1", "files": [ "bin/", "lib/" @@ -16,12 +16,13 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", "postlint": "template-oss-check", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -31,20 +32,20 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "spawk": "^1.7.1", "tap": "^16.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true }, "dependencies": { - "which": "^4.0.0" + "which": "^5.0.0" } } diff --git a/deps/npm/node_modules/@npmcli/query/package.json b/deps/npm/node_modules/@npmcli/query/package.json index ad45c18c44cd64..14f7fbfaac0168 100644 --- a/deps/npm/node_modules/@npmcli/query/package.json +++ b/deps/npm/node_modules/@npmcli/query/package.json @@ -1,16 +1,17 @@ { "name": "@npmcli/query", - "version": "3.1.0", + "version": "4.0.0", "description": "npm query parser and tools", "main": "lib/index.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "contributors": [ { @@ -35,24 +36,24 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", + "version": "4.23.3", "publish": true }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.2.0" }, "dependencies": { - "postcss-selector-parser": "^6.0.10" + "postcss-selector-parser": "^6.1.2" }, "repository": { "type": "git", - "url": "https://github.com/npm/query.git" + "url": "git+https://github.com/npm/query.git" }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/@npmcli/redact/package.json b/deps/npm/node_modules/@npmcli/redact/package.json index 831387ca541061..649f82ef5ca89b 100644 --- a/deps/npm/node_modules/@npmcli/redact/package.json +++ b/deps/npm/node_modules/@npmcli/redact/package.json @@ -1,6 +1,6 @@ { "name": "@npmcli/redact", - "version": "2.0.1", + "version": "3.0.0", "description": "Redact sensitive npm information from output", "main": "lib/index.js", "exports": { @@ -10,12 +10,13 @@ }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "keywords": [], "author": "GitHub Inc.", @@ -26,11 +27,11 @@ ], "repository": { "type": "git", - "url": "https://github.com/npm/redact.git" + "url": "git+https://github.com/npm/redact.git" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", + "version": "4.23.3", "publish": true }, "tap": { @@ -41,11 +42,11 @@ "timeout": 120 }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.2", - "@npmcli/template-oss": "4.21.3", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.10" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" } } diff --git a/deps/npm/node_modules/@npmcli/run-script/package.json b/deps/npm/node_modules/@npmcli/run-script/package.json index 8a83e726fbeb2c..e5fd2fef62e5ce 100644 --- a/deps/npm/node_modules/@npmcli/run-script/package.json +++ b/deps/npm/node_modules/@npmcli/run-script/package.json @@ -1,32 +1,32 @@ { "name": "@npmcli/run-script", - "version": "8.1.0", + "version": "9.0.1", "description": "Run a lifecycle script for a package (descendant of npm-lifecycle)", "author": "GitHub Inc.", "license": "ISC", "scripts": { "test": "tap", - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "postlint": "template-oss-check", "snap": "tap", "posttest": "npm run lint", "template-oss-apply": "template-oss-apply --force" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.4", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "spawk": "^1.8.1", "tap": "^16.0.1" }, "dependencies": { - "@npmcli/node-gyp": "^3.0.0", - "@npmcli/package-json": "^5.0.0", - "@npmcli/promise-spawn": "^7.0.0", + "@npmcli/node-gyp": "^4.0.0", + "@npmcli/package-json": "^6.0.0", + "@npmcli/promise-spawn": "^8.0.0", "node-gyp": "^10.0.0", - "proc-log": "^4.0.0", - "which": "^4.0.0" + "proc-log": "^5.0.0", + "which": "^5.0.0" }, "files": [ "bin/", @@ -35,14 +35,14 @@ "main": "lib/run-script.js", "repository": { "type": "git", - "url": "https://github.com/npm/run-script.git" + "url": "git+https://github.com/npm/run-script.git" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.4", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js new file mode 100644 index 00000000000000..c541b93001517e --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/agents.js @@ -0,0 +1,206 @@ +'use strict' + +const net = require('net') +const tls = require('tls') +const { once } = require('events') +const timers = require('timers/promises') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js') +const Errors = require('./errors.js') +const { Agent: AgentBase } = require('agent-base') + +module.exports = class Agent extends AgentBase { + #options + #timeouts + #proxy + #noProxy + #ProxyAgent + + constructor (options = {}) { + const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options) + + super(normalizedOptions) + + this.#options = normalizedOptions + this.#timeouts = timeouts + + if (proxy) { + this.#proxy = new URL(proxy) + this.#noProxy = noProxy + this.#ProxyAgent = getProxyAgent(proxy) + } + } + + get proxy () { + return this.#proxy ? { url: this.#proxy } : {} + } + + #getProxy (options) { + if (!this.#proxy) { + return + } + + const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, { + proxy: this.#proxy, + noProxy: this.#noProxy, + }) + + if (!proxy) { + return + } + + const cacheKey = cacheOptions({ + ...options, + ...this.#options, + timeouts: this.#timeouts, + proxy, + }) + + if (proxyCache.has(cacheKey)) { + return proxyCache.get(cacheKey) + } + + let ProxyAgent = this.#ProxyAgent + if (Array.isArray(ProxyAgent)) { + ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0] + } + + const proxyAgent = new ProxyAgent(proxy, { + ...this.#options, + socketOptions: { family: this.#options.family }, + }) + proxyCache.set(cacheKey, proxyAgent) + + return proxyAgent + } + + // takes an array of promises and races them against the connection timeout + // which will throw the necessary error if it is hit. This will return the + // result of the promise race. + async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) { + if (timeout) { + const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal }) + .then(() => { + throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`) + }).catch((err) => { + if (err.name === 'AbortError') { + return + } + throw err + }) + promises.push(connectionTimeout) + } + + let result + try { + result = await Promise.race(promises) + ac.abort() + } catch (err) { + ac.abort() + throw err + } + return result + } + + async connect (request, options) { + // if the connection does not have its own lookup function + // set, then use the one from our options + options.lookup ??= this.#options.lookup + + let socket + let timeout = this.#timeouts.connection + const isSecureEndpoint = this.isSecureEndpoint(options) + + const proxy = this.#getProxy(options) + if (proxy) { + // some of the proxies will wait for the socket to fully connect before + // returning so we have to await this while also racing it against the + // connection timeout. + const start = Date.now() + socket = await this.#timeoutConnection({ + options, + timeout, + promises: [proxy.connect(request, options)], + }) + // see how much time proxy.connect took and subtract it from + // the timeout + if (timeout) { + timeout = timeout - (Date.now() - start) + } + } else { + socket = (isSecureEndpoint ? tls : net).connect(options) + } + + socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) + socket.setNoDelay(this.keepAlive) + + const abortController = new AbortController() + const { signal } = abortController + + const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting'] + ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal }) + : Promise.resolve() + + await this.#timeoutConnection({ + options, + timeout, + promises: [ + connectPromise, + once(socket, 'error', { signal }).then((err) => { + throw err[0] + }), + ], + }, abortController) + + if (this.#timeouts.idle) { + socket.setTimeout(this.#timeouts.idle, () => { + socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`)) + }) + } + + return socket + } + + addRequest (request, options) { + const proxy = this.#getProxy(options) + // it would be better to call proxy.addRequest here but this causes the + // http-proxy-agent to call its super.addRequest which causes the request + // to be added to the agent twice. since we only support 3 agents + // currently (see the required agents in proxy.js) we have manually + // checked that the only public methods we need to call are called in the + // next block. this could change in the future and presumably we would get + // failing tests until we have properly called the necessary methods on + // each of our proxy agents + if (proxy?.setRequestProps) { + proxy.setRequestProps(request, options) + } + + request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') + + if (this.#timeouts.response) { + let responseTimeout + request.once('finish', () => { + setTimeout(() => { + request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy)) + }, this.#timeouts.response) + }) + request.once('response', () => { + clearTimeout(responseTimeout) + }) + } + + if (this.#timeouts.transfer) { + let transferTimeout + request.once('response', (res) => { + setTimeout(() => { + res.destroy(new Errors.TransferTimeoutError(request, this.#proxy)) + }, this.#timeouts.transfer) + res.once('close', () => { + clearTimeout(transferTimeout) + }) + }) + } + + return super.addRequest(request, options) + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js new file mode 100644 index 00000000000000..3c6946c566d736 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/dns.js @@ -0,0 +1,53 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const dns = require('dns') + +// this is a factory so that each request can have its own opts (i.e. ttl) +// while still sharing the cache across all requests +const cache = new LRUCache({ max: 50 }) + +const getOptions = ({ + family = 0, + hints = dns.ADDRCONFIG, + all = false, + verbatim = undefined, + ttl = 5 * 60 * 1000, + lookup = dns.lookup, +}) => ({ + // hints and lookup are returned since both are top level properties to (net|tls).connect + hints, + lookup: (hostname, ...args) => { + const callback = args.pop() // callback is always last arg + const lookupOptions = args[0] ?? {} + + const options = { + family, + hints, + all, + verbatim, + ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), + } + + const key = JSON.stringify({ hostname, ...options }) + + if (cache.has(key)) { + const cached = cache.get(key) + return process.nextTick(callback, null, ...cached) + } + + lookup(hostname, options, (err, ...result) => { + if (err) { + return callback(err) + } + + cache.set(key, result, { ttl }) + return callback(null, ...result) + }) + }, +}) + +module.exports = { + cache, + getOptions, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js new file mode 100644 index 00000000000000..70475aec8eb357 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/errors.js @@ -0,0 +1,61 @@ +'use strict' + +class InvalidProxyProtocolError extends Error { + constructor (url) { + super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) + this.code = 'EINVALIDPROXY' + this.proxy = url + } +} + +class ConnectionTimeoutError extends Error { + constructor (host) { + super(`Timeout connecting to host \`${host}\``) + this.code = 'ECONNECTIONTIMEOUT' + this.host = host + } +} + +class IdleTimeoutError extends Error { + constructor (host) { + super(`Idle timeout reached for host \`${host}\``) + this.code = 'EIDLETIMEOUT' + this.host = host + } +} + +class ResponseTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Response timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `connecting to host \`${request.host}\`` + super(msg) + this.code = 'ERESPONSETIMEOUT' + this.proxy = proxy + this.request = request + } +} + +class TransferTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Transfer timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `for \`${request.host}\`` + super(msg) + this.code = 'ETRANSFERTIMEOUT' + this.proxy = proxy + this.request = request + } +} + +module.exports = { + InvalidProxyProtocolError, + ConnectionTimeoutError, + IdleTimeoutError, + ResponseTimeoutError, + TransferTimeoutError, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js new file mode 100644 index 00000000000000..b33d6eaef07a21 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/index.js @@ -0,0 +1,56 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, proxyCache } = require('./proxy.js') +const dns = require('./dns.js') +const Agent = require('./agents.js') + +const agentCache = new LRUCache({ max: 20 }) + +const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => { + // false has meaning so this can't be a simple truthiness check + if (agent != null) { + return agent + } + + url = new URL(url) + + const proxyForUrl = getProxy(url, { proxy, noProxy }) + const normalizedOptions = { + ...normalizeOptions(options), + proxy: proxyForUrl, + } + + const cacheKey = cacheOptions({ + ...normalizedOptions, + secureEndpoint: url.protocol === 'https:', + }) + + if (agentCache.has(cacheKey)) { + return agentCache.get(cacheKey) + } + + const newAgent = new Agent(normalizedOptions) + agentCache.set(cacheKey, newAgent) + + return newAgent +} + +module.exports = { + getAgent, + Agent, + // these are exported for backwards compatability + HttpAgent: Agent, + HttpsAgent: Agent, + cache: { + proxy: proxyCache, + agent: agentCache, + dns: dns.cache, + clear: () => { + proxyCache.clear() + agentCache.clear() + dns.cache.clear() + }, + }, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js new file mode 100644 index 00000000000000..0bf53f725f0846 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/options.js @@ -0,0 +1,86 @@ +'use strict' + +const dns = require('./dns') + +const normalizeOptions = (opts) => { + const family = parseInt(opts.family ?? '0', 10) + const keepAlive = opts.keepAlive ?? true + + const normalized = { + // nodejs http agent options. these are all the defaults + // but kept here to increase the likelihood of cache hits + // https://nodejs.org/api/http.html#new-agentoptions + keepAliveMsecs: keepAlive ? 1000 : undefined, + maxSockets: opts.maxSockets ?? 15, + maxTotalSockets: Infinity, + maxFreeSockets: keepAlive ? 256 : undefined, + scheduling: 'fifo', + // then spread the rest of the options + ...opts, + // we already set these to their defaults that we want + family, + keepAlive, + // our custom timeout options + timeouts: { + // the standard timeout option is mapped to our idle timeout + // and then deleted below + idle: opts.timeout ?? 0, + connection: 0, + response: 0, + transfer: 0, + ...opts.timeouts, + }, + // get the dns options that go at the top level of socket connection + ...dns.getOptions({ family, ...opts.dns }), + } + + // remove timeout since we already used it to set our own idle timeout + delete normalized.timeout + + return normalized +} + +const createKey = (obj) => { + let key = '' + const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) + for (let [k, v] of sorted) { + if (v == null) { + v = 'null' + } else if (v instanceof URL) { + v = v.toString() + } else if (typeof v === 'object') { + v = createKey(v) + } + key += `${k}:${v}:` + } + return key +} + +const cacheOptions = ({ secureEndpoint, ...options }) => createKey({ + secureEndpoint: !!secureEndpoint, + // socket connect options + family: options.family, + hints: options.hints, + localAddress: options.localAddress, + // tls specific connect options + strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false, + ca: secureEndpoint ? options.ca : null, + cert: secureEndpoint ? options.cert : null, + key: secureEndpoint ? options.key : null, + // http agent options + keepAlive: options.keepAlive, + keepAliveMsecs: options.keepAliveMsecs, + maxSockets: options.maxSockets, + maxTotalSockets: options.maxTotalSockets, + maxFreeSockets: options.maxFreeSockets, + scheduling: options.scheduling, + // timeout options + timeouts: options.timeouts, + // proxy + proxy: options.proxy, +}) + +module.exports = { + normalizeOptions, + cacheOptions, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js new file mode 100644 index 00000000000000..6272e929e57bcf --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/lib/proxy.js @@ -0,0 +1,88 @@ +'use strict' + +const { HttpProxyAgent } = require('http-proxy-agent') +const { HttpsProxyAgent } = require('https-proxy-agent') +const { SocksProxyAgent } = require('socks-proxy-agent') +const { LRUCache } = require('lru-cache') +const { InvalidProxyProtocolError } = require('./errors.js') + +const PROXY_CACHE = new LRUCache({ max: 20 }) + +const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) + +const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) + +const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => { + key = key.toLowerCase() + if (PROXY_ENV_KEYS.has(key)) { + acc[key] = value + } + return acc +}, {}) + +const getProxyAgent = (url) => { + url = new URL(url) + + const protocol = url.protocol.slice(0, -1) + if (SOCKS_PROTOCOLS.has(protocol)) { + return SocksProxyAgent + } + if (protocol === 'https' || protocol === 'http') { + return [HttpProxyAgent, HttpsProxyAgent] + } + + throw new InvalidProxyProtocolError(url) +} + +const isNoProxy = (url, noProxy) => { + if (typeof noProxy === 'string') { + noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) + } + + if (!noProxy || !noProxy.length) { + return false + } + + const hostSegments = url.hostname.split('.').reverse() + + return noProxy.some((no) => { + const noSegments = no.split('.').filter(Boolean).reverse() + if (!noSegments.length) { + return false + } + + for (let i = 0; i < noSegments.length; i++) { + if (hostSegments[i] !== noSegments[i]) { + return false + } + } + + return true + }) +} + +const getProxy = (url, { proxy, noProxy }) => { + url = new URL(url) + + if (!proxy) { + proxy = url.protocol === 'https:' + ? PROXY_ENV.https_proxy + : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy + } + + if (!noProxy) { + noProxy = PROXY_ENV.no_proxy + } + + if (!proxy || isNoProxy(url, noProxy)) { + return null + } + + return new URL(proxy) +} + +module.exports = { + getProxyAgent, + getProxy, + proxyCache: PROXY_CACHE, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json new file mode 100644 index 00000000000000..ef5b4e3228cc46 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/agent/package.json @@ -0,0 +1,60 @@ +{ + "name": "@npmcli/agent", + "version": "2.2.2", + "description": "the http/https agent used by the npm cli", + "main": "lib/index.js", + "scripts": { + "gencerts": "bash scripts/create-cert.sh", + "test": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/agent/issues" + }, + "homepage": "https://github.com/npm/agent#readme", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "publish": "true" + }, + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "minipass-fetch": "^3.0.3", + "nock": "^13.2.7", + "semver": "^7.5.4", + "simple-socks": "^3.1.0", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/agent.git" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md new file mode 100644 index 00000000000000..5fc208ff122e08 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js new file mode 100644 index 00000000000000..cb5982f79077ac --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/get-options.js @@ -0,0 +1,20 @@ +// given an input that may or may not be an object, return an object that has +// a copy of every defined property listed in 'copy'. if the input is not an +// object, assign it to the property named by 'wrap' +const getOptions = (input, { copy, wrap }) => { + const result = {} + + if (input && typeof input === 'object') { + for (const prop of copy) { + if (input[prop] !== undefined) { + result[prop] = input[prop] + } + } + } else { + result[wrap] = input + } + + return result +} + +module.exports = getOptions diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js new file mode 100644 index 00000000000000..4d13bc037359d7 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/common/node.js @@ -0,0 +1,9 @@ +const semver = require('semver') + +const satisfies = (range) => { + return semver.satisfies(process.version, range, { includePrerelease: true }) +} + +module.exports = { + satisfies, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE new file mode 100644 index 00000000000000..93546dfb7655bf --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/LICENSE @@ -0,0 +1,15 @@ +(The MIT License) + +Copyright (c) 2011-2017 JP Richardson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files +(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, + merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js new file mode 100644 index 00000000000000..1cd1e05d0c533d --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/errors.js @@ -0,0 +1,129 @@ +'use strict' +const { inspect } = require('util') + +// adapted from node's internal/errors +// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js + +// close copy of node's internal SystemError class. +class SystemError { + constructor (code, prefix, context) { + // XXX context.code is undefined in all constructors used in cp/polyfill + // that may be a bug copied from node, maybe the constructor should use + // `code` not `errno`? nodejs/node#41104 + let message = `${prefix}: ${context.syscall} returned ` + + `${context.code} (${context.message})` + + if (context.path !== undefined) { + message += ` ${context.path}` + } + if (context.dest !== undefined) { + message += ` => ${context.dest}` + } + + this.code = code + Object.defineProperties(this, { + name: { + value: 'SystemError', + enumerable: false, + writable: true, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + info: { + value: context, + enumerable: true, + configurable: true, + writable: false, + }, + errno: { + get () { + return context.errno + }, + set (value) { + context.errno = value + }, + enumerable: true, + configurable: true, + }, + syscall: { + get () { + return context.syscall + }, + set (value) { + context.syscall = value + }, + enumerable: true, + configurable: true, + }, + }) + + if (context.path !== undefined) { + Object.defineProperty(this, 'path', { + get () { + return context.path + }, + set (value) { + context.path = value + }, + enumerable: true, + configurable: true, + }) + } + + if (context.dest !== undefined) { + Object.defineProperty(this, 'dest', { + get () { + return context.dest + }, + set (value) { + context.dest = value + }, + enumerable: true, + configurable: true, + }) + } + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } + + [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { + return inspect(this, { + ...ctx, + getters: true, + customInspect: false, + }) + } +} + +function E (code, message) { + module.exports[code] = class NodeError extends SystemError { + constructor (ctx) { + super(code, message, ctx) + } + } +} + +E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') +E('ERR_FS_CP_EEXIST', 'Target already exists') +E('ERR_FS_CP_EINVAL', 'Invalid src or dest') +E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') +E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') +E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') +E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') +E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') +E('ERR_FS_EISDIR', 'Path is a directory') + +module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { + constructor (name, expected, actual) { + super() + this.code = 'ERR_INVALID_ARG_TYPE' + this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js new file mode 100644 index 00000000000000..972ce7aa12abef --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/index.js @@ -0,0 +1,22 @@ +const fs = require('fs/promises') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const polyfill = require('./polyfill.js') + +// node 16.7.0 added fs.cp +const useNative = node.satisfies('>=16.7.0') + +const cp = async (src, dest, opts) => { + const options = getOptions(opts, { + copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], + }) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? fs.cp(src, dest, options) + : polyfill(src, dest, options) +} + +module.exports = cp diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js new file mode 100644 index 00000000000000..80eb10de971918 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/cp/polyfill.js @@ -0,0 +1,428 @@ +// this file is a modified version of the code in node 17.2.0 +// which is, in turn, a modified version of the fs-extra module on npm +// node core changes: +// - Use of the assert module has been replaced with core's error system. +// - All code related to the glob dependency has been removed. +// - Bring your own custom fs module is not currently supported. +// - Some basic code cleanup. +// changes here: +// - remove all callback related code +// - drop sync support +// - change assertions back to non-internal methods (see options.js) +// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows +'use strict' + +const { + ERR_FS_CP_DIR_TO_NON_DIR, + ERR_FS_CP_EEXIST, + ERR_FS_CP_EINVAL, + ERR_FS_CP_FIFO_PIPE, + ERR_FS_CP_NON_DIR_TO_DIR, + ERR_FS_CP_SOCKET, + ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, + ERR_FS_CP_UNKNOWN, + ERR_FS_EISDIR, + ERR_INVALID_ARG_TYPE, +} = require('./errors.js') +const { + constants: { + errno: { + EEXIST, + EISDIR, + EINVAL, + ENOTDIR, + }, + }, +} = require('os') +const { + chmod, + copyFile, + lstat, + mkdir, + readdir, + readlink, + stat, + symlink, + unlink, + utimes, +} = require('fs/promises') +const { + dirname, + isAbsolute, + join, + parse, + resolve, + sep, + toNamespacedPath, +} = require('path') +const { fileURLToPath } = require('url') + +const defaultOptions = { + dereference: false, + errorOnExist: false, + filter: undefined, + force: true, + preserveTimestamps: false, + recursive: false, +} + +async function cp (src, dest, opts) { + if (opts != null && typeof opts !== 'object') { + throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) + } + return cpFn( + toNamespacedPath(getValidatedPath(src)), + toNamespacedPath(getValidatedPath(dest)), + { ...defaultOptions, ...opts }) +} + +function getValidatedPath (fileURLOrPath) { + const path = fileURLOrPath != null && fileURLOrPath.href + && fileURLOrPath.origin + ? fileURLToPath(fileURLOrPath) + : fileURLOrPath + return path +} + +async function cpFn (src, dest, opts) { + // Warn about using preserveTimestamps on 32-bit node + // istanbul ignore next + if (opts.preserveTimestamps && process.arch === 'ia32') { + const warning = 'Using the preserveTimestamps option in 32-bit ' + + 'node is not recommended' + process.emitWarning(warning, 'TimestampPrecisionWarning') + } + const stats = await checkPaths(src, dest, opts) + const { srcStat, destStat } = stats + await checkParentPaths(src, srcStat, dest) + if (opts.filter) { + return handleFilter(checkParentDir, destStat, src, dest, opts) + } + return checkParentDir(destStat, src, dest, opts) +} + +async function checkPaths (src, dest, opts) { + const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) + if (destStat) { + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: 'src and dest cannot be the same', + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + if (srcStat.isDirectory() && !destStat.isDirectory()) { + throw new ERR_FS_CP_DIR_TO_NON_DIR({ + message: `cannot overwrite directory ${src} ` + + `with non-directory ${dest}`, + path: dest, + syscall: 'cp', + errno: EISDIR, + }) + } + if (!srcStat.isDirectory() && destStat.isDirectory()) { + throw new ERR_FS_CP_NON_DIR_TO_DIR({ + message: `cannot overwrite non-directory ${src} ` + + `with directory ${dest}`, + path: dest, + syscall: 'cp', + errno: ENOTDIR, + }) + } + } + + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return { srcStat, destStat } +} + +function areIdentical (srcStat, destStat) { + return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && + destStat.dev === srcStat.dev +} + +function getStats (src, dest, opts) { + const statFunc = opts.dereference ? + (file) => stat(file, { bigint: true }) : + (file) => lstat(file, { bigint: true }) + return Promise.all([ + statFunc(src), + statFunc(dest).catch((err) => { + // istanbul ignore next: unsure how to cover. + if (err.code === 'ENOENT') { + return null + } + // istanbul ignore next: unsure how to cover. + throw err + }), + ]) +} + +async function checkParentDir (destStat, src, dest, opts) { + const destParent = dirname(dest) + const dirExists = await pathExists(destParent) + if (dirExists) { + return getStatsForCopy(destStat, src, dest, opts) + } + await mkdir(destParent, { recursive: true }) + return getStatsForCopy(destStat, src, dest, opts) +} + +function pathExists (dest) { + return stat(dest).then( + () => true, + // istanbul ignore next: not sure when this would occur + (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) +} + +// Recursively check if dest parent is a subdirectory of src. +// It works for all file types including symlinks since it +// checks the src and dest inodes. It starts from the deepest +// parent and stops once it reaches the src parent or the root path. +async function checkParentPaths (src, srcStat, dest) { + const srcParent = resolve(dirname(src)) + const destParent = resolve(dirname(dest)) + if (destParent === srcParent || destParent === parse(destParent).root) { + return + } + let destStat + try { + destStat = await stat(destParent, { bigint: true }) + } catch (err) { + // istanbul ignore else: not sure when this would occur + if (err.code === 'ENOENT') { + return + } + // istanbul ignore next: not sure when this would occur + throw err + } + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return checkParentPaths(src, srcStat, destParent) +} + +const normalizePathToArray = (path) => + resolve(path).split(sep).filter(Boolean) + +// Return true if dest is a subdir of src, otherwise false. +// It only checks the path strings. +function isSrcSubdir (src, dest) { + const srcArr = normalizePathToArray(src) + const destArr = normalizePathToArray(dest) + return srcArr.every((cur, i) => destArr[i] === cur) +} + +async function handleFilter (onInclude, destStat, src, dest, opts, cb) { + const include = await opts.filter(src, dest) + if (include) { + return onInclude(destStat, src, dest, opts, cb) + } +} + +function startCopy (destStat, src, dest, opts) { + if (opts.filter) { + return handleFilter(getStatsForCopy, destStat, src, dest, opts) + } + return getStatsForCopy(destStat, src, dest, opts) +} + +async function getStatsForCopy (destStat, src, dest, opts) { + const statFn = opts.dereference ? stat : lstat + const srcStat = await statFn(src) + // istanbul ignore else: can't portably test FIFO + if (srcStat.isDirectory() && opts.recursive) { + return onDir(srcStat, destStat, src, dest, opts) + } else if (srcStat.isDirectory()) { + throw new ERR_FS_EISDIR({ + message: `${src} is a directory (not copied)`, + path: src, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) { + return onFile(srcStat, destStat, src, dest, opts) + } else if (srcStat.isSymbolicLink()) { + return onLink(destStat, src, dest) + } else if (srcStat.isSocket()) { + throw new ERR_FS_CP_SOCKET({ + message: `cannot copy a socket file: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFIFO()) { + throw new ERR_FS_CP_FIFO_PIPE({ + message: `cannot copy a FIFO pipe: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // istanbul ignore next: should be unreachable + throw new ERR_FS_CP_UNKNOWN({ + message: `cannot copy an unknown file type: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) +} + +function onFile (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return _copyFile(srcStat, src, dest, opts) + } + return mayCopyFile(srcStat, src, dest, opts) +} + +async function mayCopyFile (srcStat, src, dest, opts) { + if (opts.force) { + await unlink(dest) + return _copyFile(srcStat, src, dest, opts) + } else if (opts.errorOnExist) { + throw new ERR_FS_CP_EEXIST({ + message: `${dest} already exists`, + path: dest, + syscall: 'cp', + errno: EEXIST, + }) + } +} + +async function _copyFile (srcStat, src, dest, opts) { + await copyFile(src, dest) + if (opts.preserveTimestamps) { + return handleTimestampsAndMode(srcStat.mode, src, dest) + } + return setDestMode(dest, srcStat.mode) +} + +async function handleTimestampsAndMode (srcMode, src, dest) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) { + await makeFileWritable(dest, srcMode) + return setDestTimestampsAndMode(srcMode, src, dest) + } + return setDestTimestampsAndMode(srcMode, src, dest) +} + +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 +} + +function makeFileWritable (dest, srcMode) { + return setDestMode(dest, srcMode | 0o200) +} + +async function setDestTimestampsAndMode (srcMode, src, dest) { + await setDestTimestamps(src, dest) + return setDestMode(dest, srcMode) +} + +function setDestMode (dest, srcMode) { + return chmod(dest, srcMode) +} + +async function setDestTimestamps (src, dest) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + const updatedSrcStat = await stat(src) + return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) +} + +function onDir (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return mkDirAndCopy(srcStat.mode, src, dest, opts) + } + return copyDir(src, dest, opts) +} + +async function mkDirAndCopy (srcMode, src, dest, opts) { + await mkdir(dest) + await copyDir(src, dest, opts) + return setDestMode(dest, srcMode) +} + +async function copyDir (src, dest, opts) { + const dir = await readdir(src) + for (let i = 0; i < dir.length; i++) { + const item = dir[i] + const srcItem = join(src, item) + const destItem = join(dest, item) + const { destStat } = await checkPaths(srcItem, destItem, opts) + await startCopy(destStat, srcItem, destItem, opts) + } +} + +async function onLink (destStat, src, dest) { + let resolvedSrc = await readlink(src) + if (!isAbsolute(resolvedSrc)) { + resolvedSrc = resolve(dirname(src), resolvedSrc) + } + if (!destStat) { + return symlink(resolvedSrc, dest) + } + let resolvedDest + try { + resolvedDest = await readlink(dest) + } catch (err) { + // Dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + // istanbul ignore next: can only test on windows + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { + return symlink(resolvedSrc, dest) + } + // istanbul ignore next: should not be possible + throw err + } + if (!isAbsolute(resolvedDest)) { + resolvedDest = resolve(dirname(dest), resolvedDest) + } + if (isSrcSubdir(resolvedSrc, resolvedDest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + + `${resolvedDest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // Do not copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + const srcStat = await stat(src) + if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { + throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ + message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return copyLink(resolvedSrc, dest) +} + +async function copyLink (resolvedSrc, dest) { + await unlink(dest) + return symlink(resolvedSrc, dest) +} + +module.exports = cp diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js new file mode 100644 index 00000000000000..81c746304cc428 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/index.js @@ -0,0 +1,13 @@ +'use strict' + +const cp = require('./cp/index.js') +const withTempDir = require('./with-temp-dir.js') +const readdirScoped = require('./readdir-scoped.js') +const moveFile = require('./move-file.js') + +module.exports = { + cp, + withTempDir, + readdirScoped, + moveFile, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js new file mode 100644 index 00000000000000..d56e06d384659a --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/move-file.js @@ -0,0 +1,78 @@ +const { dirname, join, resolve, relative, isAbsolute } = require('path') +const fs = require('fs/promises') + +const pathExists = async path => { + try { + await fs.access(path) + return true + } catch (er) { + return er.code !== 'ENOENT' + } +} + +const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { + if (!source || !destination) { + throw new TypeError('`source` and `destination` file required') + } + + options = { + overwrite: true, + ...options, + } + + if (!options.overwrite && await pathExists(destination)) { + throw new Error(`The destination file exists: ${destination}`) + } + + await fs.mkdir(dirname(destination), { recursive: true }) + + try { + await fs.rename(source, destination) + } catch (error) { + if (error.code === 'EXDEV' || error.code === 'EPERM') { + const sourceStat = await fs.lstat(source) + if (sourceStat.isDirectory()) { + const files = await fs.readdir(source) + await Promise.all(files.map((file) => + moveFile(join(source, file), join(destination, file), options, false, symlinks) + )) + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + await fs.copyFile(source, destination) + } + } else { + throw error + } + } + + if (root) { + await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { + let target = await fs.readlink(symSource) + // junction symlinks in windows will be absolute paths, so we need to + // make sure they point to the symlink destination + if (isAbsolute(target)) { + target = resolve(symDestination, relative(symSource, target)) + } + // try to determine what the actual file is so we can create the correct + // type of symlink in windows + let targetStat = 'file' + try { + targetStat = await fs.stat(resolve(dirname(symSource), target)) + if (targetStat.isDirectory()) { + targetStat = 'junction' + } + } catch { + // targetStat remains 'file' + } + await fs.symlink( + target, + symDestination, + targetStat + ) + })) + await fs.rm(source, { recursive: true, force: true }) + } +} + +module.exports = moveFile diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js new file mode 100644 index 00000000000000..cd601dfbe7486b --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/readdir-scoped.js @@ -0,0 +1,20 @@ +const { readdir } = require('fs/promises') +const { join } = require('path') + +const readdirScoped = async (dir) => { + const results = [] + + for (const item of await readdir(dir)) { + if (item.startsWith('@')) { + for (const scopedItem of await readdir(join(dir, item))) { + results.push(join(item, scopedItem)) + } + } else { + results.push(item) + } + } + + return results +} + +module.exports = readdirScoped diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js new file mode 100644 index 00000000000000..0738ac4f29e1be --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/lib/with-temp-dir.js @@ -0,0 +1,39 @@ +const { join, sep } = require('path') + +const getOptions = require('./common/get-options.js') +const { mkdir, mkdtemp, rm } = require('fs/promises') + +// create a temp directory, ensure its permissions match its parent, then call +// the supplied function passing it the path to the directory. clean up after +// the function finishes, whether it throws or not +const withTempDir = async (root, fn, opts) => { + const options = getOptions(opts, { + copy: ['tmpPrefix'], + }) + // create the directory + await mkdir(root, { recursive: true }) + + const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || '')) + let err + let result + + try { + result = await fn(target) + } catch (_err) { + err = _err + } + + try { + await rm(target, { force: true, recursive: true }) + } catch { + // ignore errors + } + + if (err) { + throw err + } + + return result +} + +module.exports = withTempDir diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json new file mode 100644 index 00000000000000..5261a11b78000e --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/@npmcli/fs/package.json @@ -0,0 +1,52 @@ +{ + "name": "@npmcli/fs", + "version": "3.1.1", + "description": "filesystem utilities for the npm cli", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "snap": "tap", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/fs.git" + }, + "keywords": [ + "npm", + "oss" + ], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.1" + }, + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md new file mode 100644 index 00000000000000..8d28acf866d932 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js new file mode 100644 index 00000000000000..ad5a76a4f73f26 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js new file mode 100644 index 00000000000000..5f6192c3cec566 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/read.js @@ -0,0 +1,165 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = size ? { size } : await fs.stat(cpath) + return { stat, cpath, sri } + }) + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + + if (stat.size !== data.length) { + throw sizeError(stat.size, data.length) + } + + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = size ? { size } : await fs.stat(cpath) + return { stat, cpath, sri } + }) + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js new file mode 100644 index 00000000000000..ce58d679e4cb25 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js new file mode 100644 index 00000000000000..e7187abca8788a --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/content/write.js @@ -0,0 +1,206 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + this.handleContentP.catch(error => this.emit('error', error)) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js new file mode 100644 index 00000000000000..89c28f2f257d48 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,336 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +const pMap = require('p-map') +const lsStreamConcurrency = 5 + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await pMap(buckets, async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await pMap(subbuckets, async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await pMap(subbucketEntries, async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js new file mode 100644 index 00000000000000..80ec206c7ecaaa --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js new file mode 100644 index 00000000000000..c9b0da5f3a271b --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js new file mode 100644 index 00000000000000..2ecc60912e4563 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const { LRUCache } = require('lru-cache') + +const MEMOIZED = new LRUCache({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js new file mode 100644 index 00000000000000..9fc932d5f6dec5 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js new file mode 100644 index 00000000000000..a94760c7cf2430 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js new file mode 100644 index 00000000000000..8500c1c16a429f --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 00000000000000..445599b5038088 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 00000000000000..0bf5302136ebeb --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js new file mode 100644 index 00000000000000..d7423da1295b68 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime () { + return { startTime: new Date() } +} + +async function markEndTime () { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/package.json new file mode 100644 index 00000000000000..6e6219158ed759 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/cacache/package.json @@ -0,0 +1,82 @@ +{ + "name": "cacache", + "version": "18.0.4", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/cacache.git" + }, + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "windowsCI": false, + "version": "4.22.0", + "publish": "true" + }, + "author": "GitHub Inc.", + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 00000000000000..1808eb2844231c --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright 2017-2022 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 00000000000000..bfcfacbcc95e18 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,471 @@ +const { Request, Response } = require('minipass-fetch') +const { Minipass } = require('minipass') +const MinipassFlush = require('minipass-flush') +const cacache = require('cacache') +const url = require('url') + +const CachingMinipassPipeline = require('../pipeline.js') +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'link', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + + // options on which we must match the request and vary the response + options: { + compress: options.compress != null ? options.compress : request.compress, + }, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) { + metadata.status = response.status + } + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) { + metadata.reqHeaders.host = host + } + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + for (const name of options.cacheAdditionalHeaders) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else { + this.key = cacheKey(request) + } + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // clean out entries with a buggy content-encoding value + if (entry.metadata && + entry.metadata.resHeaders && + entry.metadata.resHeaders['content-encoding'] === null) { + return false + } + + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) { + return !!(entry.metadata && entry.metadata.status) + } + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') { + return + } + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + ...this.entry.metadata.options, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if ( + this.request.method !== 'GET' || + ![200, 301, 308].includes(this.response.status) || + !this.policy.storable() + ) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + integrity: this.options.integrity, + integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }).catch((err) => { + body.emit('error', err) + }) + + body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + // this is always true since if we aren't reusing the one from the remote fetch, we + // are using the one from cacache + body.hasIntegrityEmitter = true + + const onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + // re-emit the integrity and size events on our new response body so they can be reused + cacheStream.on('integrity', i => body.emit('integrity', i)) + cacheStream.on('size', s => body.emit('size', s)) + // stick a flag on here so downstream users will know if they can expect integrity events + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + // eslint-disable-next-line promise/catch-or-return + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + } else { + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + } + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const headers = { ...this.policy.responseHeaders() } + + const onResume = () => { + const cacheStream = cacache.get.stream.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { + await CacheEntry.invalidate(this.request, this.options) + } + body.emit('error', err) + cacheStream.resume() + }) + // emit the integrity and size events based on our metadata so we're consistent + body.emit('integrity', this.entry.integrity) + body.emit('size', Number(headers['content-length'])) + cacheStream.pipe(body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) { + return this.respond(request.method, options, 'stale') + } + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if ( + !hasOwnProperty(metadata.resHeaders, name) && + hasOwnProperty(this.entry.metadata.resHeaders, name) + ) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + } + + for (const name of options.cacheAdditionalHeaders) { + const inMeta = hasOwnProperty(metadata.resHeaders, name) + const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) + const inPolicy = hasOwnProperty(this.policy.response.headers, name) + + // if the header is in the existing entry, but it is not in the metadata + // then we need to write it to the metadata as this will refresh the on-disk cache + if (!inMeta && inEntry) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + // if the header is in the metadata, but not in the policy, then we need to set + // it in the policy so that it's included in the immediate response. future + // responses will load a new cache entry, so we don't need to change that + if (!inPolicy && inMeta) { + this.policy.response.headers[name] = metadata.resHeaders[name] + } + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 00000000000000..67a66573bebe66 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,11 @@ +class NotCachedError extends Error { + constructor (url) { + /* eslint-disable-next-line max-len */ + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 00000000000000..0de49d23fb9336 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,49 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') { + throw new NotCachedError(request.url) + } + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const newEntry = new CacheEntry({ request, response, options }) + return newEntry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') { + return entry.revalidate(request, options) + } + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) { + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + } + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) { + return + } + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 00000000000000..f7684d562b7fae --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 00000000000000..ada3c8600dae92 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + compress: request.compress, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) { + return false + } + + // user explicitly asked not to cache + if (options.cache === 'no-store') { + return false + } + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) { + return false + } + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) { + return false + } + + if (this.request.compress !== _req.compress) { + return false + } + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { + return false + } + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { + return false + } + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { + return false + } + + if (this.options.integrity) { + return ssri.parse(this.options.integrity).match(this.entry.integrity) + } + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 00000000000000..233ba67e165502 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,118 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) { + return false + } + + if (options.redirect === 'manual') { + return false + } + + if (options.redirect === 'error') { + throw new FetchError(`redirect mode is set to error: ${request.url}`, + 'no-redirect', { code: 'ENOREDIRECT' }) + } + + if (!response.headers.has('location')) { + throw new FetchError(`redirect location header missing for: ${request.url}`, + 'no-location', { code: 'EINVALIDREDIRECT' }) + } + + if (request.counter >= request.follow) { + throw new FetchError(`maximum redirect reached at: ${request.url}`, + 'max-redirect', { code: 'EMAXREDIRECT' }) + } + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + /** + * @license + * Copyright (c) 2010-2012 Mikeal Rogers + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an "AS + * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) { + request.headers.delete('authorization') + request.headers.delete('cookie') + } + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if ( + response.status === 303 || + (request.method === 'POST' && [301, 302].includes(response.status)) + ) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) { + await cache.invalidate(request, options) + } + + if (!canFollowRedirect(request, response, options)) { + return response + } + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 00000000000000..2f12e8e1b61131 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,41 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return wrappedFetch(finalUrl, finalOptions) + } + + defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => + makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 00000000000000..f77511279f831d --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,54 @@ +const dns = require('dns') + +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const { strictSSL, ...options } = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + options.rejectUnauthorized = strictSSL !== false + + if (!options.retry) { + options.retry = { retries: 0 } + } else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) { + options.retry = { retries } + } else { + options.retry = { retries: 0 } + } + } else if (typeof options.retry === 'number') { + options.retry = { retries: options.retry } + } else { + options.retry = { retries: 0, ...options.retry } + } + + options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) { + options.cache = 'no-store' + } + } + + options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) { + options.cachePath = options.cacheManager + } + + return options +} + +module.exports = configureOptions diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js new file mode 100644 index 00000000000000..b1d221b2d0ce31 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/pipeline.js @@ -0,0 +1,41 @@ +'use strict' + +const MinipassPipeline = require('minipass-pipeline') + +class CachingMinipassPipeline extends MinipassPipeline { + #events = [] + #data = new Map() + + constructor (opts, ...streams) { + // CRITICAL: do NOT pass the streams to the call to super(), this will start + // the flow of data and potentially cause the events we need to catch to emit + // before we've finished our own setup. instead we call super() with no args, + // finish our setup, and then push the streams into ourselves to start the + // data flow + super() + this.#events = opts.events + + /* istanbul ignore next - coverage disabled because this is pointless to test here */ + if (streams.length) { + this.push(...streams) + } + } + + on (event, handler) { + if (this.#events.includes(event) && this.#data.has(event)) { + return handler(...this.#data.get(event)) + } + + return super.on(event, handler) + } + + emit (event, ...data) { + if (this.#events.includes(event)) { + this.#data.set(event, data) + } + + return super.emit(event, ...data) + } +} + +module.exports = CachingMinipassPipeline diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 00000000000000..8554564074de6e --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,131 @@ +const { Minipass } = require('minipass') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') +const { log } = require('proc-log') + +const CachingMinipassPipeline = require('./pipeline.js') +const { getAgent } = require('@npmcli/agent') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + // from @npmcli/agent + 'ECONNECTIONTIMEOUT', + 'EIDLETIMEOUT', + 'ERESPONSETIMEOUT', + 'ETRANSFERTIMEOUT', + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) + // EINVALIDPROXY // invalid protocol from @npmcli/agent + // EINVALIDRESPONSE // invalid status code from @npmcli/agent +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) { + request.headers.set('connection', agent ? 'keep-alive' : 'close') + } + + if (!request.headers.has('user-agent')) { + request.headers.set('user-agent', USER_AGENT) + } + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ + algorithms: _opts.algorithms, + integrity: _opts.integrity, + size: _opts.size, + }) + const pipeline = new CachingMinipassPipeline({ + events: ['integrity', 'size'], + }, res.body, integrityStream) + // we also propagate the integrity and size events out to the pipeline so we can use + // this new response body as an integrityEmitter for cacache + integrityStream.on('integrity', i => pipeline.emit('integrity', i)) + integrityStream.on('size', s => pipeline.emit('size', s)) + res = new fetch.Response(pipeline, res) + // set an explicit flag so we know if our response body will emit integrity and size + res.body.hasIntegrityEmitter = true + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') { + options.onRetry(res) + } + + /* eslint-disable-next-line max-len */ + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`) + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) { + throw err + } + + if (typeof options.onRetry === 'function') { + options.onRetry(err) + } + + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`) + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') { + return err + } + + throw err + }) +} + +module.exports = remoteFetch diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json new file mode 100644 index 00000000000000..7adb4d1e7f9719 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/make-fetch-happen/package.json @@ -0,0 +1,75 @@ +{ + "name": "make-fetch-happen", + "version": "13.0.1", + "description": "Opinionated, caching, retrying fetch client", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "eslint": "eslint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/make-fetch-happen.git" + }, + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.4", + "nock": "^13.2.4", + "safe-buffer": "^5.2.1", + "standard-version": "^9.3.2", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true, + "timeout": 60, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.4", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE new file mode 100644 index 00000000000000..3c3410cdc12ee3 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/LICENSE @@ -0,0 +1,28 @@ +The MIT License (MIT) + +Copyright (c) Isaac Z. Schlueter and Contributors +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +--- + +Note: This is a derivative work based on "node-fetch" by David Frank, +modified and distributed under the terms of the MIT license above. +https://github.com/bitinn/node-fetch diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js new file mode 100644 index 00000000000000..b18f643269e375 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/abort-error.js @@ -0,0 +1,17 @@ +'use strict' +class AbortError extends Error { + constructor (message) { + super(message) + this.code = 'FETCH_ABORTED' + this.type = 'aborted' + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'AbortError' + } + + // don't allow name to be overridden, but don't throw either + set name (s) {} +} +module.exports = AbortError diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js new file mode 100644 index 00000000000000..121b1730102e72 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/blob.js @@ -0,0 +1,97 @@ +'use strict' +const { Minipass } = require('minipass') +const TYPE = Symbol('type') +const BUFFER = Symbol('buffer') + +class Blob { + constructor (blobParts, options) { + this[TYPE] = '' + + const buffers = [] + let size = 0 + + if (blobParts) { + const a = blobParts + const length = Number(a.length) + for (let i = 0; i < length; i++) { + const element = a[i] + const buffer = element instanceof Buffer ? element + : ArrayBuffer.isView(element) + ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) + : element instanceof ArrayBuffer ? Buffer.from(element) + : element instanceof Blob ? element[BUFFER] + : typeof element === 'string' ? Buffer.from(element) + : Buffer.from(String(element)) + size += buffer.length + buffers.push(buffer) + } + } + + this[BUFFER] = Buffer.concat(buffers, size) + + const type = options && options.type !== undefined + && String(options.type).toLowerCase() + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type + } + } + + get size () { + return this[BUFFER].length + } + + get type () { + return this[TYPE] + } + + text () { + return Promise.resolve(this[BUFFER].toString()) + } + + arrayBuffer () { + const buf = this[BUFFER] + const off = buf.byteOffset + const len = buf.byteLength + const ab = buf.buffer.slice(off, off + len) + return Promise.resolve(ab) + } + + stream () { + return new Minipass().end(this[BUFFER]) + } + + slice (start, end, type) { + const size = this.size + const relativeStart = start === undefined ? 0 + : start < 0 ? Math.max(size + start, 0) + : Math.min(start, size) + const relativeEnd = end === undefined ? size + : end < 0 ? Math.max(size + end, 0) + : Math.min(end, size) + const span = Math.max(relativeEnd - relativeStart, 0) + + const buffer = this[BUFFER] + const slicedBuffer = buffer.slice( + relativeStart, + relativeStart + span + ) + const blob = new Blob([], { type }) + blob[BUFFER] = slicedBuffer + return blob + } + + get [Symbol.toStringTag] () { + return 'Blob' + } + + static get BUFFER () { + return BUFFER + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, +}) + +module.exports = Blob diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js new file mode 100644 index 00000000000000..62286bd1de0d91 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/body.js @@ -0,0 +1,350 @@ +'use strict' +const { Minipass } = require('minipass') +const MinipassSized = require('minipass-sized') + +const Blob = require('./blob.js') +const { BUFFER } = Blob +const FetchError = require('./fetch-error.js') + +// optional dependency on 'encoding' +let convert +try { + convert = require('encoding').convert +} catch (e) { + // defer error until textConverted is called +} + +const INTERNALS = Symbol('Body internals') +const CONSUME_BODY = Symbol('consumeBody') + +class Body { + constructor (bodyArg, options = {}) { + const { size = 0, timeout = 0 } = options + const body = bodyArg === undefined || bodyArg === null ? null + : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) + : isBlob(bodyArg) ? bodyArg + : Buffer.isBuffer(bodyArg) ? bodyArg + : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' + ? Buffer.from(bodyArg) + : ArrayBuffer.isView(bodyArg) + ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) + : Minipass.isStream(bodyArg) ? bodyArg + : Buffer.from(String(bodyArg)) + + this[INTERNALS] = { + body, + disturbed: false, + error: null, + } + + this.size = size + this.timeout = timeout + + if (Minipass.isStream(body)) { + body.on('error', er => { + const error = er.name === 'AbortError' ? er + : new FetchError(`Invalid response while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + this[INTERNALS].error = error + }) + } + } + + get body () { + return this[INTERNALS].body + } + + get bodyUsed () { + return this[INTERNALS].disturbed + } + + arrayBuffer () { + return this[CONSUME_BODY]().then(buf => + buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) + } + + blob () { + const ct = this.headers && this.headers.get('content-type') || '' + return this[CONSUME_BODY]().then(buf => Object.assign( + new Blob([], { type: ct.toLowerCase() }), + { [BUFFER]: buf } + )) + } + + async json () { + const buf = await this[CONSUME_BODY]() + try { + return JSON.parse(buf.toString()) + } catch (er) { + throw new FetchError( + `invalid json response body at ${this.url} reason: ${er.message}`, + 'invalid-json' + ) + } + } + + text () { + return this[CONSUME_BODY]().then(buf => buf.toString()) + } + + buffer () { + return this[CONSUME_BODY]() + } + + textConverted () { + return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) + } + + [CONSUME_BODY] () { + if (this[INTERNALS].disturbed) { + return Promise.reject(new TypeError(`body used already for: ${ + this.url}`)) + } + + this[INTERNALS].disturbed = true + + if (this[INTERNALS].error) { + return Promise.reject(this[INTERNALS].error) + } + + // body is null + if (this.body === null) { + return Promise.resolve(Buffer.alloc(0)) + } + + if (Buffer.isBuffer(this.body)) { + return Promise.resolve(this.body) + } + + const upstream = isBlob(this.body) ? this.body.stream() : this.body + + /* istanbul ignore if: should never happen */ + if (!Minipass.isStream(upstream)) { + return Promise.resolve(Buffer.alloc(0)) + } + + const stream = this.size && upstream instanceof MinipassSized ? upstream + : !this.size && upstream instanceof Minipass && + !(upstream instanceof MinipassSized) ? upstream + : this.size ? new MinipassSized({ size: this.size }) + : new Minipass() + + // allow timeout on slow response body, but only if the stream is still writable. this + // makes the timeout center on the socket stream from lib/index.js rather than the + // intermediary minipass stream we create to receive the data + const resTimeout = this.timeout && stream.writable ? setTimeout(() => { + stream.emit('error', new FetchError( + `Response timeout while trying to fetch ${ + this.url} (over ${this.timeout}ms)`, 'body-timeout')) + }, this.timeout) : null + + // do not keep the process open just for this timeout, even + // though we expect it'll get cleared eventually. + if (resTimeout && resTimeout.unref) { + resTimeout.unref() + } + + // do the pipe in the promise, because the pipe() can send too much + // data through right away and upset the MP Sized object + return new Promise((resolve) => { + // if the stream is some other kind of stream, then pipe through a MP + // so we can collect it more easily. + if (stream !== upstream) { + upstream.on('error', er => stream.emit('error', er)) + upstream.pipe(stream) + } + resolve() + }).then(() => stream.concat()).then(buf => { + clearTimeout(resTimeout) + return buf + }).catch(er => { + clearTimeout(resTimeout) + // request was aborted, reject with this Error + if (er.name === 'AbortError' || er.name === 'FetchError') { + throw er + } else if (er.name === 'RangeError') { + throw new FetchError(`Could not create Buffer from response body for ${ + this.url}: ${er.message}`, 'system', er) + } else { + // other errors, such as incorrect content-encoding or content-length + throw new FetchError(`Invalid response body while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + } + }) + } + + static clone (instance) { + if (instance.bodyUsed) { + throw new Error('cannot clone body after it is used') + } + + const body = instance.body + + // check that body is a stream and not form-data object + // NB: can't clone the form-data object without having it as a dependency + if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { + // create a dedicated tee stream so that we don't lose data + // potentially sitting in the body stream's buffer by writing it + // immediately to p1 and not having it for p2. + const tee = new Minipass() + const p1 = new Minipass() + const p2 = new Minipass() + tee.on('error', er => { + p1.emit('error', er) + p2.emit('error', er) + }) + body.on('error', er => tee.emit('error', er)) + tee.pipe(p1) + tee.pipe(p2) + body.pipe(tee) + // set instance body to one fork, return the other + instance[INTERNALS].body = p1 + return p2 + } else { + return instance.body + } + } + + static extractContentType (body) { + return body === null || body === undefined ? null + : typeof body === 'string' ? 'text/plain;charset=UTF-8' + : isURLSearchParams(body) + ? 'application/x-www-form-urlencoded;charset=UTF-8' + : isBlob(body) ? body.type || null + : Buffer.isBuffer(body) ? null + : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null + : ArrayBuffer.isView(body) ? null + : typeof body.getBoundary === 'function' + ? `multipart/form-data;boundary=${body.getBoundary()}` + : Minipass.isStream(body) ? null + : 'text/plain;charset=UTF-8' + } + + static getTotalBytes (instance) { + const { body } = instance + return (body === null || body === undefined) ? 0 + : isBlob(body) ? body.size + : Buffer.isBuffer(body) ? body.length + : body && typeof body.getLengthSync === 'function' && ( + // detect form data input from form-data module + body._lengthRetrievers && + /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x + body.hasKnownLength && body.hasKnownLength()) // 2.x + ? body.getLengthSync() + : null + } + + static writeToStream (dest, instance) { + const { body } = instance + + if (body === null || body === undefined) { + dest.end() + } else if (Buffer.isBuffer(body) || typeof body === 'string') { + dest.end(body) + } else { + // body is stream or blob + const stream = isBlob(body) ? body.stream() : body + stream.on('error', er => dest.emit('error', er)).pipe(dest) + } + + return dest + } +} + +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true }, +}) + +const isURLSearchParams = obj => + // Duck-typing as a necessary condition. + (typeof obj !== 'object' || + typeof obj.append !== 'function' || + typeof obj.delete !== 'function' || + typeof obj.get !== 'function' || + typeof obj.getAll !== 'function' || + typeof obj.has !== 'function' || + typeof obj.set !== 'function') ? false + // Brand-checking and more duck-typing as optional condition. + : obj.constructor.name === 'URLSearchParams' || + Object.prototype.toString.call(obj) === '[object URLSearchParams]' || + typeof obj.sort === 'function' + +const isBlob = obj => + typeof obj === 'object' && + typeof obj.arrayBuffer === 'function' && + typeof obj.type === 'string' && + typeof obj.stream === 'function' && + typeof obj.constructor === 'function' && + typeof obj.constructor.name === 'string' && + /^(Blob|File)$/.test(obj.constructor.name) && + /^(Blob|File)$/.test(obj[Symbol.toStringTag]) + +const convertBody = (buffer, headers) => { + /* istanbul ignore if */ + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function') + } + + const ct = headers && headers.get('content-type') + let charset = 'utf-8' + let res + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct) + } + + // no charset in content type, peek at response body for at most 1024 bytes + const str = buffer.slice(0, 1024).toString() + + // html5 + if (!res && str) { + res = / this.expect + ? 'max-size' : type + this.message = message + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'FetchError' + } + + // don't allow name to be overwritten + set name (n) {} + + get [Symbol.toStringTag] () { + return 'FetchError' + } +} +module.exports = FetchError diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js new file mode 100644 index 00000000000000..dd6e854d5ba399 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/headers.js @@ -0,0 +1,267 @@ +'use strict' +const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/ +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ + +const validateName = name => { + name = `${name}` + if (invalidTokenRegex.test(name) || name === '') { + throw new TypeError(`${name} is not a legal HTTP header name`) + } +} + +const validateValue = value => { + value = `${value}` + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`) + } +} + +const find = (map, name) => { + name = name.toLowerCase() + for (const key in map) { + if (key.toLowerCase() === name) { + return key + } + } + return undefined +} + +const MAP = Symbol('map') +class Headers { + constructor (init = undefined) { + this[MAP] = Object.create(null) + if (init instanceof Headers) { + const rawHeaders = init.raw() + const headerNames = Object.keys(rawHeaders) + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value) + } + } + return + } + + // no-op + if (init === undefined || init === null) { + return + } + + if (typeof init === 'object') { + const method = init[Symbol.iterator] + if (method !== null && method !== undefined) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable') + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = [] + for (const pair of init) { + if (typeof pair !== 'object' || + typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable') + } + const arrPair = Array.from(pair) + if (arrPair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple') + } + pairs.push(arrPair) + } + + for (const pair of pairs) { + this.append(pair[0], pair[1]) + } + } else { + // record + for (const key of Object.keys(init)) { + this.append(key, init[key]) + } + } + } else { + throw new TypeError('Provided initializer must be an object') + } + } + + get (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key === undefined) { + return null + } + + return this[MAP][key].join(', ') + } + + forEach (callback, thisArg = undefined) { + let pairs = getHeaders(this) + for (let i = 0; i < pairs.length; i++) { + const [name, value] = pairs[i] + callback.call(thisArg, value, name, this) + // refresh in case the callback added more headers + pairs = getHeaders(this) + } + } + + set (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + this[MAP][key !== undefined ? key : name] = [value] + } + + append (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + if (key !== undefined) { + this[MAP][key].push(value) + } else { + this[MAP][name] = [value] + } + } + + has (name) { + name = `${name}` + validateName(name) + return find(this[MAP], name) !== undefined + } + + delete (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key !== undefined) { + delete this[MAP][key] + } + } + + raw () { + return this[MAP] + } + + keys () { + return new HeadersIterator(this, 'key') + } + + values () { + return new HeadersIterator(this, 'value') + } + + [Symbol.iterator] () { + return new HeadersIterator(this, 'key+value') + } + + entries () { + return new HeadersIterator(this, 'key+value') + } + + get [Symbol.toStringTag] () { + return 'Headers' + } + + static exportNodeCompatibleHeaders (headers) { + const obj = Object.assign(Object.create(null), headers[MAP]) + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host') + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0] + } + + return obj + } + + static createHeadersLenient (obj) { + const headers = new Headers() + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue + } + + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue + } + + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val] + } else { + headers[MAP][name].push(val) + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]] + } + } + return headers + } +} + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true }, +}) + +const getHeaders = (headers, kind = 'key+value') => + Object.keys(headers[MAP]).sort().map( + kind === 'key' ? k => k.toLowerCase() + : kind === 'value' ? k => headers[MAP][k].join(', ') + : k => [k.toLowerCase(), headers[MAP][k].join(', ')] + ) + +const INTERNAL = Symbol('internal') + +class HeadersIterator { + constructor (target, kind) { + this[INTERNAL] = { + target, + kind, + index: 0, + } + } + + get [Symbol.toStringTag] () { + return 'HeadersIterator' + } + + next () { + /* istanbul ignore if: should be impossible */ + if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) { + throw new TypeError('Value of `this` is not a HeadersIterator') + } + + const { target, kind, index } = this[INTERNAL] + const values = getHeaders(target, kind) + const len = values.length + if (index >= len) { + return { + value: undefined, + done: true, + } + } + + this[INTERNAL].index++ + + return { value: values[index], done: false } + } +} + +// manually extend because 'extends' requires a ctor +Object.setPrototypeOf(HeadersIterator.prototype, + Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) + +module.exports = Headers diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js new file mode 100644 index 00000000000000..da402161670e65 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/index.js @@ -0,0 +1,377 @@ +'use strict' +const { URL } = require('url') +const http = require('http') +const https = require('https') +const zlib = require('minizlib') +const { Minipass } = require('minipass') + +const Body = require('./body.js') +const { writeToStream, getTotalBytes } = Body +const Response = require('./response.js') +const Headers = require('./headers.js') +const { createHeadersLenient } = Headers +const Request = require('./request.js') +const { getNodeRequestOptions } = Request +const FetchError = require('./fetch-error.js') +const AbortError = require('./abort-error.js') + +// XXX this should really be split up and unit-ized for easier testing +// and better DRY implementation of data/http request aborting +const fetch = async (url, opts) => { + if (/^data:/.test(url)) { + const request = new Request(url, opts) + // delay 1 promise tick so that the consumer can abort right away + return Promise.resolve().then(() => new Promise((resolve, reject) => { + let type, data + try { + const { pathname, search } = new URL(url) + const split = pathname.split(',') + if (split.length < 2) { + throw new Error('invalid data: URI') + } + const mime = split.shift() + const base64 = /;base64$/.test(mime) + type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime + const rawData = decodeURIComponent(split.join(',') + search) + data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData) + } catch (er) { + return reject(new FetchError(`[${request.method}] ${ + request.url} invalid URL, ${er.message}`, 'system', er)) + } + + const { signal } = request + if (signal && signal.aborted) { + return reject(new AbortError('The user aborted a request.')) + } + + const headers = { 'Content-Length': data.length } + if (type) { + headers['Content-Type'] = type + } + return resolve(new Response(data, { headers })) + })) + } + + return new Promise((resolve, reject) => { + // build request object + const request = new Request(url, opts) + let options + try { + options = getNodeRequestOptions(request) + } catch (er) { + return reject(er) + } + + const send = (options.protocol === 'https:' ? https : http).request + const { signal } = request + let response = null + const abort = () => { + const error = new AbortError('The user aborted a request.') + reject(error) + if (Minipass.isStream(request.body) && + typeof request.body.destroy === 'function') { + request.body.destroy(error) + } + if (response && response.body) { + response.body.emit('error', error) + } + } + + if (signal && signal.aborted) { + return abort() + } + + const abortAndFinalize = () => { + abort() + finalize() + } + + const finalize = () => { + req.abort() + if (signal) { + signal.removeEventListener('abort', abortAndFinalize) + } + clearTimeout(reqTimeout) + } + + // send request + const req = send(options) + + if (signal) { + signal.addEventListener('abort', abortAndFinalize) + } + + let reqTimeout = null + if (request.timeout) { + req.once('socket', () => { + reqTimeout = setTimeout(() => { + reject(new FetchError(`network timeout at: ${ + request.url}`, 'request-timeout')) + finalize() + }, request.timeout) + }) + } + + req.on('error', er => { + // if a 'response' event is emitted before the 'error' event, then by the + // time this handler is run it's too late to reject the Promise for the + // response. instead, we forward the error event to the response stream + // so that the error will surface to the user when they try to consume + // the body. this is done as a side effect of aborting the request except + // for in windows, where we must forward the event manually, otherwise + // there is no longer a ref'd socket attached to the request and the + // stream never ends so the event loop runs out of work and the process + // exits without warning. + // coverage skipped here due to the difficulty in testing + // istanbul ignore next + if (req.res) { + req.res.emit('error', er) + } + reject(new FetchError(`request to ${request.url} failed, reason: ${ + er.message}`, 'system', er)) + finalize() + }) + + req.on('response', res => { + clearTimeout(reqTimeout) + + const headers = createHeadersLenient(res.headers) + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location') + + // HTTP fetch step 5.3 + let locationURL = null + try { + locationURL = location === null ? null : new URL(location, request.url).toString() + } catch { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + /* eslint-disable-next-line max-len */ + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')) + finalize() + return + } + } + + // HTTP fetch step 5.5 + if (request.redirect === 'error') { + reject(new FetchError('uri requested responds with a redirect, ' + + `redirect mode is set to error: ${request.url}`, 'no-redirect')) + finalize() + return + } else if (request.redirect === 'manual') { + // node-fetch-specific step: make manual redirect a bit easier to + // use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL) + } catch (err) { + /* istanbul ignore next: nodejs server prevent invalid + response headers, we can't test this through normal + request */ + reject(err) + } + } + } else if (request.redirect === 'follow' && locationURL !== null) { + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${ + request.url}`, 'max-redirect')) + finalize() + return + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && + request.body && + getTotalBytes(request) === null) { + reject(new FetchError( + 'Cannot follow redirect with body being a readable stream', + 'unsupported-redirect' + )) + finalize() + return + } + + // Update host due to redirection + request.headers.set('host', (new URL(locationURL)).host) + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + } + + // if the redirect is to a new hostname, strip the authorization and cookie headers + const parsedOriginal = new URL(request.url) + const parsedRedirect = new URL(locationURL) + if (parsedOriginal.hostname !== parsedRedirect.hostname) { + requestOpts.headers.delete('authorization') + requestOpts.headers.delete('cookie') + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || ( + (res.statusCode === 301 || res.statusCode === 302) && + request.method === 'POST' + )) { + requestOpts.method = 'GET' + requestOpts.body = undefined + requestOpts.headers.delete('content-length') + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))) + finalize() + return + } + } // end if(isRedirect) + + // prepare response + res.once('end', () => + signal && signal.removeEventListener('abort', abortAndFinalize)) + + const body = new Minipass() + // if an error occurs, either on the response stream itself, on one of the + // decoder streams, or a response length timeout from the Body class, we + // forward the error through to our internal body stream. If we see an + // error event on that, we call finalize to abort the request and ensure + // we don't leave a socket believing a request is in flight. + // this is difficult to test, so lacks specific coverage. + body.on('error', finalize) + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) + res.on('data', (chunk) => body.write(chunk)) + res.on('end', () => body.end()) + + const responseOptions = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter, + trailer: new Promise(resolveTrailer => + res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))), + } + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding') + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || + request.method === 'HEAD' || + codings === null || + res.statusCode === 204 || + res.statusCode === 304) { + response = new Response(body, responseOptions) + resolve(response) + return + } + + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH, + } + + // for gzip + if (codings === 'gzip' || codings === 'x-gzip') { + const unzip = new zlib.Gunzip(zlibOptions) + response = new Response( + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), + responseOptions + ) + resolve(response) + return + } + + // for deflate + if (codings === 'deflate' || codings === 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new Minipass()) + raw.once('data', chunk => { + // see http://stackoverflow.com/questions/37519828 + const decoder = (chunk[0] & 0x0F) === 0x08 + ? new zlib.Inflate() + : new zlib.InflateRaw() + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + }) + return + } + + // for br + if (codings === 'br') { + // ignoring coverage so tests don't have to fake support (or lack of) for brotli + // istanbul ignore next + try { + var decoder = new zlib.BrotliDecompress() + } catch (err) { + reject(err) + finalize() + return + } + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + return + } + + // otherwise, use response as-is + response = new Response(body, responseOptions) + resolve(response) + }) + + writeToStream(req, request) + }) +} + +module.exports = fetch + +fetch.isRedirect = code => + code === 301 || + code === 302 || + code === 303 || + code === 307 || + code === 308 + +fetch.Headers = Headers +fetch.Request = Request +fetch.Response = Response +fetch.FetchError = FetchError +fetch.AbortError = AbortError diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js new file mode 100644 index 00000000000000..054439e6699107 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/request.js @@ -0,0 +1,282 @@ +'use strict' +const { URL } = require('url') +const { Minipass } = require('minipass') +const Headers = require('./headers.js') +const { exportNodeCompatibleHeaders } = Headers +const Body = require('./body.js') +const { clone, extractContentType, getTotalBytes } = Body + +const version = require('../package.json').version +const defaultUserAgent = + `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` + +const INTERNALS = Symbol('Request internals') + +const isRequest = input => + typeof input === 'object' && typeof input[INTERNALS] === 'object' + +const isAbortSignal = signal => { + const proto = ( + signal + && typeof signal === 'object' + && Object.getPrototypeOf(signal) + ) + return !!(proto && proto.constructor.name === 'AbortSignal') +} + +class Request extends Body { + constructor (input, init = {}) { + const parsedURL = isRequest(input) ? new URL(input.url) + : input && input.href ? new URL(input.href) + : new URL(`${input}`) + + if (isRequest(input)) { + init = { ...input[INTERNALS], ...init } + } else if (!input || typeof input === 'string') { + input = {} + } + + const method = (init.method || input.method || 'GET').toUpperCase() + const isGETHEAD = method === 'GET' || method === 'HEAD' + + if ((init.body !== null && init.body !== undefined || + isRequest(input) && input.body !== null) && isGETHEAD) { + throw new TypeError('Request with GET/HEAD method cannot have body') + } + + const inputBody = init.body !== null && init.body !== undefined ? init.body + : isRequest(input) && input.body !== null ? clone(input) + : null + + super(inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0, + }) + + const headers = new Headers(init.headers || input.headers || {}) + + if (inputBody !== null && inputBody !== undefined && + !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + const signal = 'signal' in init ? init.signal + : null + + if (signal !== null && signal !== undefined && !isAbortSignal(signal)) { + throw new TypeError('Expected signal must be an instanceof AbortSignal') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = init + + this[INTERNALS] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow + : input.follow !== undefined ? input.follow + : 20 + this.compress = init.compress !== undefined ? init.compress + : input.compress !== undefined ? input.compress + : true + this.counter = init.counter || input.counter || 0 + this.agent = init.agent || input.agent + } + + get method () { + return this[INTERNALS].method + } + + get url () { + return this[INTERNALS].parsedURL.toString() + } + + get headers () { + return this[INTERNALS].headers + } + + get redirect () { + return this[INTERNALS].redirect + } + + get signal () { + return this[INTERNALS].signal + } + + clone () { + return new Request(this) + } + + get [Symbol.toStringTag] () { + return 'Request' + } + + static getNodeRequestOptions (request) { + const parsedURL = request[INTERNALS].parsedURL + const headers = new Headers(request[INTERNALS].headers) + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*') + } + + // Basic fetch + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported') + } + + if (request.signal && + Minipass.isStream(request.body) && + typeof request.body.destroy !== 'function') { + throw new Error( + 'Cancellation of streamed requests with AbortSignal is not supported') + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + const contentLengthValue = + (request.body === null || request.body === undefined) && + /^(POST|PUT)$/i.test(request.method) ? '0' + : request.body !== null && request.body !== undefined + ? getTotalBytes(request) + : null + + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue + '') + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', defaultUserAgent) + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate') + } + + const agent = typeof request.agent === 'function' + ? request.agent(parsedURL) + : request.agent + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = request[INTERNALS] + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + // we cannot spread parsedURL directly, so we have to read each property one-by-one + // and map them to the equivalent https?.request() method options + const urlProps = { + auth: parsedURL.username || parsedURL.password + ? `${parsedURL.username}:${parsedURL.password}` + : '', + host: parsedURL.host, + hostname: parsedURL.hostname, + path: `${parsedURL.pathname}${parsedURL.search}`, + port: parsedURL.port, + protocol: parsedURL.protocol, + } + + return { + ...urlProps, + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + timeout: request.timeout, + } + } +} + +module.exports = Request + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true }, +}) diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js new file mode 100644 index 00000000000000..54cb52db3594a7 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/lib/response.js @@ -0,0 +1,90 @@ +'use strict' +const http = require('http') +const { STATUS_CODES } = http + +const Headers = require('./headers.js') +const Body = require('./body.js') +const { clone, extractContentType } = Body + +const INTERNALS = Symbol('Response internals') + +class Response extends Body { + constructor (body = null, opts = {}) { + super(body, opts) + + const status = opts.status || 200 + const headers = new Headers(opts.headers) + + if (body !== null && body !== undefined && !headers.has('Content-Type')) { + const contentType = extractContentType(body) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + this[INTERNALS] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter, + trailer: Promise.resolve(opts.trailer || new Headers()), + } + } + + get trailer () { + return this[INTERNALS].trailer + } + + get url () { + return this[INTERNALS].url || '' + } + + get status () { + return this[INTERNALS].status + } + + get ok () { + return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 + } + + get redirected () { + return this[INTERNALS].counter > 0 + } + + get statusText () { + return this[INTERNALS].statusText + } + + get headers () { + return this[INTERNALS].headers + } + + clone () { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected, + trailer: this.trailer, + }) + } + + get [Symbol.toStringTag] () { + return 'Response' + } +} + +module.exports = Response + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true }, +}) diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json new file mode 100644 index 00000000000000..d491a7fba126d0 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/minipass-fetch/package.json @@ -0,0 +1,69 @@ +{ + "name": "minipass-fetch", + "version": "3.0.5", + "description": "An implementation of window.fetch in Node.js using Minipass streams", + "license": "MIT", + "main": "lib/index.js", + "scripts": { + "test:tls-fixtures": "./test/fixtures/tls/setup.sh", + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "tap": { + "coverage-map": "map.js", + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "@ungap/url-search-params": "^0.2.2", + "abort-controller": "^3.0.0", + "abortcontroller-polyfill": "~1.7.3", + "encoding": "^0.1.13", + "form-data": "^4.0.0", + "nock": "^13.2.4", + "parted": "^0.1.1", + "string-to-arraybuffer": "^1.0.2", + "tap": "^16.0.0" + }, + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/minipass-fetch.git" + }, + "keywords": [ + "fetch", + "minipass", + "node-fetch", + "window.fetch" + ], + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE new file mode 100644 index 00000000000000..83837797202b70 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) GitHub, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js new file mode 100644 index 00000000000000..86d90861078dab --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/lib/index.js @@ -0,0 +1,153 @@ +const META = Symbol('proc-log.meta') +module.exports = { + META: META, + output: { + LEVELS: [ + 'standard', + 'error', + 'buffer', + 'flush', + ], + KEYS: { + standard: 'standard', + error: 'error', + buffer: 'buffer', + flush: 'flush', + }, + standard: function (...args) { + return process.emit('output', 'standard', ...args) + }, + error: function (...args) { + return process.emit('output', 'error', ...args) + }, + buffer: function (...args) { + return process.emit('output', 'buffer', ...args) + }, + flush: function (...args) { + return process.emit('output', 'flush', ...args) + }, + }, + log: { + LEVELS: [ + 'notice', + 'error', + 'warn', + 'info', + 'verbose', + 'http', + 'silly', + 'timing', + 'pause', + 'resume', + ], + KEYS: { + notice: 'notice', + error: 'error', + warn: 'warn', + info: 'info', + verbose: 'verbose', + http: 'http', + silly: 'silly', + timing: 'timing', + pause: 'pause', + resume: 'resume', + }, + error: function (...args) { + return process.emit('log', 'error', ...args) + }, + notice: function (...args) { + return process.emit('log', 'notice', ...args) + }, + warn: function (...args) { + return process.emit('log', 'warn', ...args) + }, + info: function (...args) { + return process.emit('log', 'info', ...args) + }, + verbose: function (...args) { + return process.emit('log', 'verbose', ...args) + }, + http: function (...args) { + return process.emit('log', 'http', ...args) + }, + silly: function (...args) { + return process.emit('log', 'silly', ...args) + }, + timing: function (...args) { + return process.emit('log', 'timing', ...args) + }, + pause: function () { + return process.emit('log', 'pause') + }, + resume: function () { + return process.emit('log', 'resume') + }, + }, + time: { + LEVELS: [ + 'start', + 'end', + ], + KEYS: { + start: 'start', + end: 'end', + }, + start: function (name, fn) { + process.emit('time', 'start', name) + function end () { + return process.emit('time', 'end', name) + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function (name) { + return process.emit('time', 'end', name) + }, + }, + input: { + LEVELS: [ + 'start', + 'end', + 'read', + ], + KEYS: { + start: 'start', + end: 'end', + read: 'read', + }, + start: function (fn) { + process.emit('input', 'start') + function end () { + return process.emit('input', 'end') + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function () { + return process.emit('input', 'end') + }, + read: function (...args) { + let resolve, reject + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve + reject = _reject + }) + process.emit('input', 'read', resolve, reject, ...args) + return promise + }, + }, +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/package.json new file mode 100644 index 00000000000000..4ab89102ecc9b5 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/proc-log/package.json @@ -0,0 +1,45 @@ +{ + "name": "proc-log", + "version": "4.2.0", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "description": "just emit 'log' events on the process object", + "repository": { + "type": "git", + "url": "https://github.com/npm/proc-log.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "posttest": "npm run lint", + "postsnap": "eslint index.js test/*.js --fix", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "template-oss-apply": "template-oss-apply --force" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "tap": "^16.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md new file mode 100644 index 00000000000000..e335388869f50f --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright 2021 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js new file mode 100644 index 00000000000000..7d749ed480fb98 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/lib/index.js @@ -0,0 +1,580 @@ +'use strict' + +const crypto = require('crypto') +const { Minipass } = require('minipass') + +const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256'] +const DEFAULT_ALGORITHMS = ['sha512'] + +// TODO: this should really be a hardcoded list of algorithms we support, +// rather than [a-z0-9]. +const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i +const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/ +const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/ +const VCHAR_REGEX = /^[\x21-\x7E]+$/ + +const getOptString = options => options?.length ? `?${options.join('?')}` : '' + +class IntegrityStream extends Minipass { + #emittedIntegrity + #emittedSize + #emittedVerified + + constructor (opts) { + super() + this.size = 0 + this.opts = opts + + // may be overridden later, but set now for class consistency + this.#getOptions() + + // options used for calculating stream. can't be changed. + if (opts?.algorithms) { + this.algorithms = [...opts.algorithms] + } else { + this.algorithms = [...DEFAULT_ALGORITHMS] + } + if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) { + this.algorithms.push(this.algorithm) + } + + this.hashes = this.algorithms.map(crypto.createHash) + } + + #getOptions () { + // For verification + this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null + this.expectedSize = this.opts?.size + + if (!this.sri) { + this.algorithm = null + } else if (this.sri.isHash) { + this.goodSri = true + this.algorithm = this.sri.algorithm + } else { + this.goodSri = !this.sri.isEmpty() + this.algorithm = this.sri.pickAlgorithm(this.opts) + } + + this.digests = this.goodSri ? this.sri[this.algorithm] : null + this.optString = getOptString(this.opts?.options) + } + + on (ev, handler) { + if (ev === 'size' && this.#emittedSize) { + return handler(this.#emittedSize) + } + + if (ev === 'integrity' && this.#emittedIntegrity) { + return handler(this.#emittedIntegrity) + } + + if (ev === 'verified' && this.#emittedVerified) { + return handler(this.#emittedVerified) + } + + return super.on(ev, handler) + } + + emit (ev, data) { + if (ev === 'end') { + this.#onEnd() + } + return super.emit(ev, data) + } + + write (data) { + this.size += data.length + this.hashes.forEach(h => h.update(data)) + return super.write(data) + } + + #onEnd () { + if (!this.goodSri) { + this.#getOptions() + } + const newSri = parse(this.hashes.map((h, i) => { + return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}` + }).join(' '), this.opts) + // Integrity verification mode + const match = this.goodSri && newSri.match(this.sri, this.opts) + if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) { + /* eslint-disable-next-line max-len */ + const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`) + err.code = 'EBADSIZE' + err.found = this.size + err.expected = this.expectedSize + err.sri = this.sri + this.emit('error', err) + } else if (this.sri && !match) { + /* eslint-disable-next-line max-len */ + const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = this.digests + err.algorithm = this.algorithm + err.sri = this.sri + this.emit('error', err) + } else { + this.#emittedSize = this.size + this.emit('size', this.size) + this.#emittedIntegrity = newSri + this.emit('integrity', newSri) + if (match) { + this.#emittedVerified = match + this.emit('verified', match) + } + } + } +} + +class Hash { + get isHash () { + return true + } + + constructor (hash, opts) { + const strict = opts?.strict + this.source = hash.trim() + + // set default values so that we make V8 happy to + // always see a familiar object template. + this.digest = '' + this.algorithm = '' + this.options = [] + + // 3.1. Integrity metadata (called "Hash" by ssri) + // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description + const match = this.source.match( + strict + ? STRICT_SRI_REGEX + : SRI_REGEX + ) + if (!match) { + return + } + if (strict && !SPEC_ALGORITHMS.includes(match[1])) { + return + } + this.algorithm = match[1] + this.digest = match[2] + + const rawOpts = match[3] + if (rawOpts) { + this.options = rawOpts.slice(1).split('?') + } + } + + hexDigest () { + return this.digest && Buffer.from(this.digest, 'base64').toString('hex') + } + + toJSON () { + return this.toString() + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + if (other.isIntegrity) { + const algo = other.pickAlgorithm(opts, [this.algorithm]) + + if (!algo) { + return false + } + + const foundHash = other[algo].find(hash => hash.digest === this.digest) + + if (foundHash) { + return foundHash + } + + return false + } + return other.digest === this.digest ? other : false + } + + toString (opts) { + if (opts?.strict) { + // Strict mode enforces the standard as close to the foot of the + // letter as it can. + if (!( + // The spec has very restricted productions for algorithms. + // https://www.w3.org/TR/CSP2/#source-list-syntax + SPEC_ALGORITHMS.includes(this.algorithm) && + // Usually, if someone insists on using a "different" base64, we + // leave it as-is, since there's multiple standards, and the + // specified is not a URL-safe variant. + // https://www.w3.org/TR/CSP2/#base64_value + this.digest.match(BASE64_REGEX) && + // Option syntax is strictly visual chars. + // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression + // https://tools.ietf.org/html/rfc5234#appendix-B.1 + this.options.every(opt => opt.match(VCHAR_REGEX)) + )) { + return '' + } + } + return `${this.algorithm}-${this.digest}${getOptString(this.options)}` + } +} + +function integrityHashToString (toString, sep, opts, hashes) { + const toStringIsNotEmpty = toString !== '' + + let shouldAddFirstSep = false + let complement = '' + + const lastIndex = hashes.length - 1 + + for (let i = 0; i < lastIndex; i++) { + const hashString = Hash.prototype.toString.call(hashes[i], opts) + + if (hashString) { + shouldAddFirstSep = true + + complement += hashString + complement += sep + } + } + + const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts) + + if (finalHashString) { + shouldAddFirstSep = true + complement += finalHashString + } + + if (toStringIsNotEmpty && shouldAddFirstSep) { + return toString + sep + complement + } + + return toString + complement +} + +class Integrity { + get isIntegrity () { + return true + } + + toJSON () { + return this.toString() + } + + isEmpty () { + return Object.keys(this).length === 0 + } + + toString (opts) { + let sep = opts?.sep || ' ' + let toString = '' + + if (opts?.strict) { + // Entries must be separated by whitespace, according to spec. + sep = sep.replace(/\S+/g, ' ') + + for (const hash of SPEC_ALGORITHMS) { + if (this[hash]) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + } else { + for (const hash of Object.keys(this)) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + + return toString + } + + concat (integrity, opts) { + const other = typeof integrity === 'string' + ? integrity + : stringify(integrity, opts) + return parse(`${this.toString(opts)} ${other}`, opts) + } + + hexDigest () { + return parse(this, { single: true }).hexDigest() + } + + // add additional hashes to an integrity value, but prevent + // *changing* an existing integrity hash. + merge (integrity, opts) { + const other = parse(integrity, opts) + for (const algo in other) { + if (this[algo]) { + if (!this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest))) { + throw new Error('hashes do not match, cannot update integrity') + } + } else { + this[algo] = other[algo] + } + } + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + const algo = other.pickAlgorithm(opts, Object.keys(this)) + return ( + !!algo && + this[algo] && + other[algo] && + this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest + ) + ) + ) || false + } + + // Pick the highest priority algorithm present, optionally also limited to a + // set of hashes found in another integrity. When limiting it may return + // nothing. + pickAlgorithm (opts, hashes) { + const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash + const keys = Object.keys(this).filter(k => { + if (hashes?.length) { + return hashes.includes(k) + } + return true + }) + if (keys.length) { + return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc) + } + // no intersection between this and hashes, + return null + } +} + +module.exports.parse = parse +function parse (sri, opts) { + if (!sri) { + return null + } + if (typeof sri === 'string') { + return _parse(sri, opts) + } else if (sri.algorithm && sri.digest) { + const fullSri = new Integrity() + fullSri[sri.algorithm] = [sri] + return _parse(stringify(fullSri, opts), opts) + } else { + return _parse(stringify(sri, opts), opts) + } +} + +function _parse (integrity, opts) { + // 3.4.3. Parse metadata + // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + if (opts?.single) { + return new Hash(integrity, opts) + } + const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => { + const hash = new Hash(string, opts) + if (hash.algorithm && hash.digest) { + const algo = hash.algorithm + if (!acc[algo]) { + acc[algo] = [] + } + acc[algo].push(hash) + } + return acc + }, new Integrity()) + return hashes.isEmpty() ? null : hashes +} + +module.exports.stringify = stringify +function stringify (obj, opts) { + if (obj.algorithm && obj.digest) { + return Hash.prototype.toString.call(obj, opts) + } else if (typeof obj === 'string') { + return stringify(parse(obj, opts), opts) + } else { + return Integrity.prototype.toString.call(obj, opts) + } +} + +module.exports.fromHex = fromHex +function fromHex (hexDigest, algorithm, opts) { + const optString = getOptString(opts?.options) + return parse( + `${algorithm}-${ + Buffer.from(hexDigest, 'hex').toString('base64') + }${optString}`, opts + ) +} + +module.exports.fromData = fromData +function fromData (data, opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + return algorithms.reduce((acc, algo) => { + const digest = crypto.createHash(algo).update(data).digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the string we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) +} + +module.exports.fromStream = fromStream +function fromStream (stream, opts) { + const istream = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(istream) + stream.on('error', reject) + istream.on('error', reject) + let sri + istream.on('integrity', s => { + sri = s + }) + istream.on('end', () => resolve(sri)) + istream.resume() + }) +} + +module.exports.checkData = checkData +function checkData (data, sri, opts) { + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + if (opts?.error) { + throw Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + ) + } else { + return false + } + } + const algorithm = sri.pickAlgorithm(opts) + const digest = crypto.createHash(algorithm).update(data).digest('base64') + const newSri = parse({ algorithm, digest }) + const match = newSri.match(sri, opts) + opts = opts || {} + if (match || !(opts.error)) { + return match + } else if (typeof opts.size === 'number' && (data.length !== opts.size)) { + /* eslint-disable-next-line max-len */ + const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`) + err.code = 'EBADSIZE' + err.found = data.length + err.expected = opts.size + err.sri = sri + throw err + } else { + /* eslint-disable-next-line max-len */ + const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = sri + err.algorithm = algorithm + err.sri = sri + throw err + } +} + +module.exports.checkStream = checkStream +function checkStream (stream, sri, opts) { + opts = opts || Object.create(null) + opts.integrity = sri + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + return Promise.reject(Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + )) + } + const checker = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(checker) + stream.on('error', reject) + checker.on('error', reject) + let verified + checker.on('verified', s => { + verified = s + }) + checker.on('end', () => resolve(verified)) + checker.resume() + }) +} + +module.exports.integrityStream = integrityStream +function integrityStream (opts = Object.create(null)) { + return new IntegrityStream(opts) +} + +module.exports.create = createIntegrity +function createIntegrity (opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + + const hashes = algorithms.map(crypto.createHash) + + return { + update: function (chunk, enc) { + hashes.forEach(h => h.update(chunk, enc)) + return this + }, + digest: function () { + const integrity = algorithms.reduce((acc, algo) => { + const digest = hashes.shift().digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the hash we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) + + return integrity + }, + } +} + +const NODE_HASHES = crypto.getHashes() + +// This is a Best Effort™ at a reasonable priority for hash algos +const DEFAULT_PRIORITY = [ + 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', + // TODO - it's unclear _which_ of these Node will actually use as its name + // for the algorithm, so we guesswork it based on the OpenSSL names. + 'sha3', + 'sha3-256', 'sha3-384', 'sha3-512', + 'sha3_256', 'sha3_384', 'sha3_512', +].filter(algo => NODE_HASHES.includes(algo)) + +function getPrioritizedHash (algo1, algo2) { + /* eslint-disable-next-line max-len */ + return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) + ? algo1 + : algo2 +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/package.json new file mode 100644 index 00000000000000..28395414e4643c --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/ssri/package.json @@ -0,0 +1,65 @@ +{ + "name": "ssri", + "version": "10.0.6", + "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "prerelease": "npm t", + "postrelease": "npm publish", + "posttest": "npm run lint", + "test": "tap", + "coverage": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap" + }, + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/ssri.git" + }, + "keywords": [ + "w3c", + "web", + "security", + "integrity", + "checksum", + "hashing", + "subresource integrity", + "sri", + "sri hash", + "sri string", + "sri generator", + "html" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE new file mode 100644 index 00000000000000..69619c125ea7ef --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/LICENSE @@ -0,0 +1,5 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js new file mode 100644 index 00000000000000..d067d2e709809a --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/lib/index.js @@ -0,0 +1,7 @@ +var path = require('path') + +var uniqueSlug = require('unique-slug') + +module.exports = function (filepath, prefix, uniq) { + return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/package.json new file mode 100644 index 00000000000000..b2fbf0666489a6 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-filename/package.json @@ -0,0 +1,51 @@ +{ + "name": "unique-filename", + "version": "3.0.0", + "description": "Generate a unique filename for use in temporary directories or caches.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/unique-filename.git" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/unique-filename/issues" + }, + "homepage": "https://github.com/iarna/unique-filename", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.5.1", + "tap": "^16.3.0" + }, + "dependencies": { + "unique-slug": "^4.0.0" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.5.1" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE new file mode 100644 index 00000000000000..7953647e7760b8 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js new file mode 100644 index 00000000000000..1bac84d95d7307 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/lib/index.js @@ -0,0 +1,11 @@ +'use strict' +var MurmurHash3 = require('imurmurhash') + +module.exports = function (uniq) { + if (uniq) { + var hash = new MurmurHash3(uniq) + return ('00000000' + hash.result().toString(16)).slice(-8) + } else { + return (Math.random().toString(16) + '0000000').slice(2, 10) + } +} diff --git a/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/package.json b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/package.json new file mode 100644 index 00000000000000..33732cdbb42859 --- /dev/null +++ b/deps/npm/node_modules/@sigstore/sign/node_modules/unique-slug/package.json @@ -0,0 +1,47 @@ +{ + "name": "unique-slug", + "version": "4.0.0", + "description": "Generate a unique character string suitible for use in files and URLs.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^3.1.0", + "@npmcli/template-oss": "4.5.1", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/unique-slug.git" + }, + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.5.1" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/abbrev/package.json b/deps/npm/node_modules/abbrev/package.json index e26400445631ad..9dfcc0095d7dc6 100644 --- a/deps/npm/node_modules/abbrev/package.json +++ b/deps/npm/node_modules/abbrev/package.json @@ -1,26 +1,27 @@ { "name": "abbrev", - "version": "2.0.0", + "version": "3.0.0", "description": "Like ruby's abbrev module, but in js", "author": "GitHub Inc.", "main": "lib/index.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", - "url": "https://github.com/npm/abbrev-js.git" + "url": "git+https://github.com/npm/abbrev-js.git" }, "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.8.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "tap": { @@ -34,10 +35,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.8.0" + "version": "4.23.3", + "publish": true } } diff --git a/deps/npm/node_modules/bin-links/lib/link-gently.js b/deps/npm/node_modules/bin-links/lib/link-gently.js index d1e955ec99b029..a39d3bced57b13 100644 --- a/deps/npm/node_modules/bin-links/lib/link-gently.js +++ b/deps/npm/node_modules/bin-links/lib/link-gently.js @@ -6,10 +6,16 @@ const { resolve, dirname } = require('path') const { lstat, mkdir, readlink, rm, symlink } = require('fs/promises') -const throwNonEnoent = er => { - if (er.code !== 'ENOENT') { - throw er +const { log } = require('proc-log') +const throwSignificant = er => { + if (er.code === 'ENOENT') { + return } + if (er.code === 'EACCES') { + log.warn('error adding file', er.message) + return + } + throw er } const rmOpts = { @@ -37,8 +43,8 @@ const linkGently = async ({ path, to, from, absFrom, force }) => { // or at least a warning, but npm has always behaved this // way in the past, so it'd be a breaking change return Promise.all([ - lstat(absFrom).catch(throwNonEnoent), - lstat(to).catch(throwNonEnoent), + lstat(absFrom).catch(throwSignificant), + lstat(to).catch(throwSignificant), ]).then(([stFrom, stTo]) => { // not present in package, skip it if (!stFrom) { diff --git a/deps/npm/node_modules/bin-links/package.json b/deps/npm/node_modules/bin-links/package.json index 1872756bb4b48c..22858d660ae0b9 100644 --- a/deps/npm/node_modules/bin-links/package.json +++ b/deps/npm/node_modules/bin-links/package.json @@ -1,16 +1,17 @@ { "name": "bin-links", - "version": "4.0.4", + "version": "5.0.0", "description": "JavaScript package binary linker", "main": "./lib/index.js", "scripts": { "snap": "tap", "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -23,14 +24,15 @@ ], "license": "ISC", "dependencies": { - "cmd-shim": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0", - "read-cmd-shim": "^4.0.0", - "write-file-atomic": "^5.0.0" + "cmd-shim": "^7.0.0", + "npm-normalize-package-bin": "^4.0.0", + "proc-log": "^5.0.0", + "read-cmd-shim": "^5.0.0", + "write-file-atomic": "^6.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "require-inject": "^1.4.4", "tap": "^16.0.1" }, @@ -47,13 +49,13 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.22.0", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/cacache/lib/entry-index.js index 89c28f2f257d48..0e09b10818d097 100644 --- a/deps/npm/node_modules/cacache/lib/entry-index.js +++ b/deps/npm/node_modules/cacache/lib/entry-index.js @@ -19,7 +19,6 @@ const hashToSegments = require('./util/hash-to-segments') const indexV = require('../package.json')['cache-version'].index const { moveFile } = require('@npmcli/fs') -const pMap = require('p-map') const lsStreamConcurrency = 5 module.exports.NotFoundError = class NotFoundError extends Error { @@ -184,6 +183,7 @@ function lsStream (cache) { // Set all this up to run on the stream and then just return the stream Promise.resolve().then(async () => { + const { default: pMap } = await import('p-map') const buckets = await readdirOrEmpty(indexDir) await pMap(buckets, async (bucket) => { const bucketPath = path.join(indexDir, bucket) diff --git a/deps/npm/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/cacache/lib/verify.js index d7423da1295b68..dcff3aa73f3173 100644 --- a/deps/npm/node_modules/cacache/lib/verify.js +++ b/deps/npm/node_modules/cacache/lib/verify.js @@ -8,7 +8,6 @@ const { truncate, writeFile, } = require('fs/promises') -const pMap = require('p-map') const contentPath = require('./content/path') const fsm = require('fs-minipass') const glob = require('./util/glob.js') @@ -93,6 +92,7 @@ async function fixPerms (cache, opts) { // async function garbageCollect (cache, opts) { opts.log.silly('verify', 'garbage collecting content') + const { default: pMap } = await import('p-map') const indexStream = index.lsStream(cache) const liveContent = new Set() indexStream.on('data', (entry) => { @@ -176,6 +176,7 @@ async function verifyContent (filepath, sri) { async function rebuildIndex (cache, opts) { opts.log.silly('verify', 'rebuilding index') + const { default: pMap } = await import('p-map') const entries = await index.ls(cache) const stats = { missingContent: 0, diff --git a/deps/npm/node_modules/cacache/node_modules/chownr/LICENSE.md b/deps/npm/node_modules/cacache/node_modules/chownr/LICENSE.md new file mode 100644 index 00000000000000..881248b6d7f0ca --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/chownr/LICENSE.md @@ -0,0 +1,63 @@ +All packages under `src/` are licensed according to the terms in +their respective `LICENSE` or `LICENSE.md` files. + +The remainder of this project is licensed under the Blue Oak +Model License, as follows: + +----- + +# Blue Oak Model License + +Version 1.0.0 + +## Purpose + +This license gives everyone as much permission to work with +this software as possible, while protecting contributors +from liability. + +## Acceptance + +In order to receive this license, you must agree to its +rules. The rules of this license are both obligations +under that agreement and conditions to your license. +You must not do anything with this software that triggers +a rule that you cannot or will not follow. + +## Copyright + +Each contributor licenses you to do everything with this +software that would otherwise infringe that contributor's +copyright in it. + +## Notices + +You must ensure that everyone who gets a copy of +any part of this software from you, with or without +changes, also gets the text of this license or a link to +. + +## Excuse + +If anyone notifies you in writing that you have not +complied with [Notices](#notices), you can keep your +license by taking all practical steps to comply within 30 +days after the notice. If you do not do so, your license +ends immediately. + +## Patent + +Each contributor licenses you to do everything with this +software that would otherwise infringe any patent claims +they can license or become able to license. + +## Reliability + +No contributor can revoke this license. + +## No Liability + +***As far as the law allows, this software comes as is, +without any warranty or condition, and no contributor +will be liable to anyone for any damages related to this +software or this license, under any kind of legal claim.*** diff --git a/deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/index.js b/deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/index.js new file mode 100644 index 00000000000000..6a7b68d5eac26e --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/index.js @@ -0,0 +1,93 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.chownrSync = exports.chownr = void 0; +const node_fs_1 = __importDefault(require("node:fs")); +const node_path_1 = __importDefault(require("node:path")); +const lchownSync = (path, uid, gid) => { + try { + return node_fs_1.default.lchownSync(path, uid, gid); + } + catch (er) { + if (er?.code !== 'ENOENT') + throw er; + } +}; +const chown = (cpath, uid, gid, cb) => { + node_fs_1.default.lchown(cpath, uid, gid, er => { + // Skip ENOENT error + cb(er && er?.code !== 'ENOENT' ? er : null); + }); +}; +const chownrKid = (p, child, uid, gid, cb) => { + if (child.isDirectory()) { + (0, exports.chownr)(node_path_1.default.resolve(p, child.name), uid, gid, (er) => { + if (er) + return cb(er); + const cpath = node_path_1.default.resolve(p, child.name); + chown(cpath, uid, gid, cb); + }); + } + else { + const cpath = node_path_1.default.resolve(p, child.name); + chown(cpath, uid, gid, cb); + } +}; +const chownr = (p, uid, gid, cb) => { + node_fs_1.default.readdir(p, { withFileTypes: true }, (er, children) => { + // any error other than ENOTDIR or ENOTSUP means it's not readable, + // or doesn't exist. give up. + if (er) { + if (er.code === 'ENOENT') + return cb(); + else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP') + return cb(er); + } + if (er || !children.length) + return chown(p, uid, gid, cb); + let len = children.length; + let errState = null; + const then = (er) => { + /* c8 ignore start */ + if (errState) + return; + /* c8 ignore stop */ + if (er) + return cb((errState = er)); + if (--len === 0) + return chown(p, uid, gid, cb); + }; + for (const child of children) { + chownrKid(p, child, uid, gid, then); + } + }); +}; +exports.chownr = chownr; +const chownrKidSync = (p, child, uid, gid) => { + if (child.isDirectory()) + (0, exports.chownrSync)(node_path_1.default.resolve(p, child.name), uid, gid); + lchownSync(node_path_1.default.resolve(p, child.name), uid, gid); +}; +const chownrSync = (p, uid, gid) => { + let children; + try { + children = node_fs_1.default.readdirSync(p, { withFileTypes: true }); + } + catch (er) { + const e = er; + if (e?.code === 'ENOENT') + return; + else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP') + return lchownSync(p, uid, gid); + else + throw e; + } + for (const child of children) { + chownrKidSync(p, child, uid, gid); + } + return lchownSync(p, uid, gid); +}; +exports.chownrSync = chownrSync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/package.json b/deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/chownr/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/index.js b/deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/index.js new file mode 100644 index 00000000000000..5c2815297a67cb --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/index.js @@ -0,0 +1,85 @@ +import fs from 'node:fs'; +import path from 'node:path'; +const lchownSync = (path, uid, gid) => { + try { + return fs.lchownSync(path, uid, gid); + } + catch (er) { + if (er?.code !== 'ENOENT') + throw er; + } +}; +const chown = (cpath, uid, gid, cb) => { + fs.lchown(cpath, uid, gid, er => { + // Skip ENOENT error + cb(er && er?.code !== 'ENOENT' ? er : null); + }); +}; +const chownrKid = (p, child, uid, gid, cb) => { + if (child.isDirectory()) { + chownr(path.resolve(p, child.name), uid, gid, (er) => { + if (er) + return cb(er); + const cpath = path.resolve(p, child.name); + chown(cpath, uid, gid, cb); + }); + } + else { + const cpath = path.resolve(p, child.name); + chown(cpath, uid, gid, cb); + } +}; +export const chownr = (p, uid, gid, cb) => { + fs.readdir(p, { withFileTypes: true }, (er, children) => { + // any error other than ENOTDIR or ENOTSUP means it's not readable, + // or doesn't exist. give up. + if (er) { + if (er.code === 'ENOENT') + return cb(); + else if (er.code !== 'ENOTDIR' && er.code !== 'ENOTSUP') + return cb(er); + } + if (er || !children.length) + return chown(p, uid, gid, cb); + let len = children.length; + let errState = null; + const then = (er) => { + /* c8 ignore start */ + if (errState) + return; + /* c8 ignore stop */ + if (er) + return cb((errState = er)); + if (--len === 0) + return chown(p, uid, gid, cb); + }; + for (const child of children) { + chownrKid(p, child, uid, gid, then); + } + }); +}; +const chownrKidSync = (p, child, uid, gid) => { + if (child.isDirectory()) + chownrSync(path.resolve(p, child.name), uid, gid); + lchownSync(path.resolve(p, child.name), uid, gid); +}; +export const chownrSync = (p, uid, gid) => { + let children; + try { + children = fs.readdirSync(p, { withFileTypes: true }); + } + catch (er) { + const e = er; + if (e?.code === 'ENOENT') + return; + else if (e?.code === 'ENOTDIR' || e?.code === 'ENOTSUP') + return lchownSync(p, uid, gid); + else + throw e; + } + for (const child of children) { + chownrKidSync(p, child, uid, gid); + } + return lchownSync(p, uid, gid); +}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/package.json b/deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/chownr/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/cacache/node_modules/chownr/package.json b/deps/npm/node_modules/cacache/node_modules/chownr/package.json new file mode 100644 index 00000000000000..09aa6b2e2e576d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/chownr/package.json @@ -0,0 +1,69 @@ +{ + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "name": "chownr", + "description": "like `chown -R`", + "version": "3.0.0", + "repository": { + "type": "git", + "url": "git://github.com/isaacs/chownr.git" + }, + "files": [ + "dist" + ], + "devDependencies": { + "@types/node": "^20.12.5", + "mkdirp": "^3.0.1", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tap": "^18.7.2", + "tshy": "^1.13.1", + "typedoc": "^0.25.12" + }, + "scripts": { + "prepare": "tshy", + "pretest": "npm run prepare", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "license": "BlueOak-1.0.0", + "engines": { + "node": ">=18" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/LICENSE b/deps/npm/node_modules/cacache/node_modules/minizlib/LICENSE new file mode 100644 index 00000000000000..49f7efe431c9ea --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/LICENSE @@ -0,0 +1,26 @@ +Minizlib was created by Isaac Z. Schlueter. +It is a derivative work of the Node.js project. + +""" +Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors +Copyright (c) 2017-2023 Node.js contributors. All rights reserved. +Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +""" diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/constants.js b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/constants.js new file mode 100644 index 00000000000000..dfc2c1957bfc99 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/constants.js @@ -0,0 +1,123 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.constants = void 0; +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +const zlib_1 = __importDefault(require("zlib")); +/* c8 ignore start */ +const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +exports.constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/index.js b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/index.js new file mode 100644 index 00000000000000..ad65eef0495076 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/index.js @@ -0,0 +1,352 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0; +const assert_1 = __importDefault(require("assert")); +const buffer_1 = require("buffer"); +const minipass_1 = require("minipass"); +const zlib_1 = __importDefault(require("zlib")); +const constants_js_1 = require("./constants.js"); +var constants_js_2 = require("./constants.js"); +Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } }); +const OriginalBufferConcat = buffer_1.Buffer.concat; +const _superWrite = Symbol('_superWrite'); +class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +exports.ZlibError = ZlibError; +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends minipass_1.Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new zlib_1.default[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = buffer_1.Buffer.from(chunk, encoding); + if (this.#sawError) + return; + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + buffer_1.Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + buffer_1.Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + buffer_1.Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](buffer_1.Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](buffer_1.Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH; + opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants_js_1.constants.Z_SYNC_FLUSH); + (0, assert_1.default)(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +exports.Zlib = Zlib; +// minimal 2-byte header +class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +exports.Deflate = Deflate; +class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +exports.Inflate = Inflate; +class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +exports.Gzip = Gzip; +class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +exports.Gunzip = Gunzip; +// raw - no header +class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +exports.DeflateRaw = DeflateRaw; +class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +exports.InflateRaw = InflateRaw; +// auto-detect header. +class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +exports.Unzip = Unzip; +class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +exports.Brotli = Brotli; +class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +exports.BrotliCompress = BrotliCompress; +class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +exports.BrotliDecompress = BrotliDecompress; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/package.json b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/constants.js b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/constants.js new file mode 100644 index 00000000000000..7faf40be5068d0 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/constants.js @@ -0,0 +1,117 @@ +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +import realZlib from 'zlib'; +/* c8 ignore start */ +const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +export const constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/index.js b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/index.js new file mode 100644 index 00000000000000..a6269b505f47cc --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/index.js @@ -0,0 +1,333 @@ +import assert from 'assert'; +import { Buffer } from 'buffer'; +import { Minipass } from 'minipass'; +import realZlib from 'zlib'; +import { constants } from './constants.js'; +export { constants } from './constants.js'; +const OriginalBufferConcat = Buffer.concat; +const _superWrite = Symbol('_superWrite'); +export class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new realZlib[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + assert(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = Buffer.from(chunk, encoding); + if (this.#sawError) + return; + assert(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +export class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants.Z_FINISH; + opts.fullFlushFlag = constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants.Z_SYNC_FLUSH); + assert(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +// minimal 2-byte header +export class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +export class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +export class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +export class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +// raw - no header +export class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +export class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +// auto-detect header. +export class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +export class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +export class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +export class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/package.json b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/cacache/node_modules/minizlib/package.json b/deps/npm/node_modules/cacache/node_modules/minizlib/package.json new file mode 100644 index 00000000000000..e94623ff43d353 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/minizlib/package.json @@ -0,0 +1,81 @@ +{ + "name": "minizlib", + "version": "3.0.1", + "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.", + "main": "./dist/commonjs/index.js", + "dependencies": { + "minipass": "^7.0.4", + "rimraf": "^5.0.5" + }, + "scripts": { + "prepare": "tshy", + "pretest": "npm run prepare", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minizlib.git" + }, + "keywords": [ + "zlib", + "gzip", + "gunzip", + "deflate", + "inflate", + "compression", + "zip", + "unzip" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "MIT", + "devDependencies": { + "@types/node": "^20.11.29", + "mkdirp": "^3.0.1", + "tap": "^18.7.1", + "tshy": "^1.12.0", + "typedoc": "^0.25.12" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">= 18" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/LICENSE b/deps/npm/node_modules/cacache/node_modules/mkdirp/LICENSE new file mode 100644 index 00000000000000..0a034db7a73b5d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/LICENSE @@ -0,0 +1,21 @@ +Copyright (c) 2011-2023 James Halliday (mail@substack.net) and Isaac Z. Schlueter (i@izs.me) + +This project is free software released under the MIT license: + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/package.json b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/package.json new file mode 100644 index 00000000000000..9d04a66e16cd93 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/package.json @@ -0,0 +1,91 @@ +{ + "name": "mkdirp", + "description": "Recursively mkdir, like `mkdir -p`", + "version": "3.0.1", + "keywords": [ + "mkdir", + "directory", + "make dir", + "make", + "dir", + "recursive", + "native" + ], + "bin": "./dist/cjs/src/bin.js", + "main": "./dist/cjs/src/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/mjs/index.d.ts", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/src/index.d.ts", + "default": "./dist/cjs/src/index.js" + } + } + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", + "postprepare": "bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "benchmark": "node benchmark/index.js", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/brace-expansion": "^1.1.0", + "@types/node": "^18.11.9", + "@types/tap": "^15.0.7", + "c8": "^7.12.0", + "eslint-config-prettier": "^8.6.0", + "prettier": "^2.8.2", + "tap": "^16.3.3", + "ts-node": "^10.9.1", + "typedoc": "^0.23.21", + "typescript": "^4.9.3" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "repository": { + "type": "git", + "url": "https://github.com/isaacs/node-mkdirp.git" + }, + "license": "MIT", + "engines": { + "node": ">=10" + } +} diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts new file mode 100644 index 00000000000000..34e005228653c8 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts @@ -0,0 +1,3 @@ +#!/usr/bin/env node +export {}; +//# sourceMappingURL=bin.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts.map new file mode 100644 index 00000000000000..c10c656ec75109 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"bin.d.ts","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":""} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js new file mode 100755 index 00000000000000..757aae1fd96cb2 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js @@ -0,0 +1,80 @@ +#!/usr/bin/env node +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +const package_json_1 = require("../package.json"); +const usage = () => ` +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories + that don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m If a directory needs to be created, set the mode as an octal + --mode= permission string. + + -v --version Print the mkdirp version number + + -h --help Print this helpful banner + + -p --print Print the first directories created for each path provided + + --manual Use manual implementation, even if native is available +`; +const dirs = []; +const opts = {}; +let doPrint = false; +let dashdash = false; +let manual = false; +for (const arg of process.argv.slice(2)) { + if (dashdash) + dirs.push(arg); + else if (arg === '--') + dashdash = true; + else if (arg === '--manual') + manual = true; + else if (/^-h/.test(arg) || /^--help/.test(arg)) { + console.log(usage()); + process.exit(0); + } + else if (arg === '-v' || arg === '--version') { + console.log(package_json_1.version); + process.exit(0); + } + else if (arg === '-p' || arg === '--print') { + doPrint = true; + } + else if (/^-m/.test(arg) || /^--mode=/.test(arg)) { + // these don't get covered in CI, but work locally + // weird because the tests below show as passing in the output. + /* c8 ignore start */ + const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8); + if (isNaN(mode)) { + console.error(`invalid mode argument: ${arg}\nMust be an octal number.`); + process.exit(1); + } + /* c8 ignore stop */ + opts.mode = mode; + } + else + dirs.push(arg); +} +const index_js_1 = require("./index.js"); +const impl = manual ? index_js_1.mkdirp.manual : index_js_1.mkdirp; +if (dirs.length === 0) { + console.error(usage()); +} +// these don't get covered in CI, but work locally +/* c8 ignore start */ +Promise.all(dirs.map(dir => impl(dir, opts))) + .then(made => (doPrint ? made.forEach(m => m && console.log(m)) : null)) + .catch(er => { + console.error(er.message); + if (er.code) + console.error(' code: ' + er.code); + process.exit(1); +}); +/* c8 ignore stop */ +//# sourceMappingURL=bin.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js.map new file mode 100644 index 00000000000000..d99295301b5fa7 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/bin.js.map @@ -0,0 +1 @@ +{"version":3,"file":"bin.js","sourceRoot":"","sources":["../../../src/bin.ts"],"names":[],"mappings":";;;AAEA,kDAAyC;AAGzC,MAAM,KAAK,GAAG,GAAG,EAAE,CAAC;;;;;;;;;;;;;;;;;;;;CAoBnB,CAAA;AAED,MAAM,IAAI,GAAa,EAAE,CAAA;AACzB,MAAM,IAAI,GAAkB,EAAE,CAAA;AAC9B,IAAI,OAAO,GAAY,KAAK,CAAA;AAC5B,IAAI,QAAQ,GAAG,KAAK,CAAA;AACpB,IAAI,MAAM,GAAG,KAAK,CAAA;AAClB,KAAK,MAAM,GAAG,IAAI,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,EAAE;IACvC,IAAI,QAAQ;QAAE,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;SACvB,IAAI,GAAG,KAAK,IAAI;QAAE,QAAQ,GAAG,IAAI,CAAA;SACjC,IAAI,GAAG,KAAK,UAAU;QAAE,MAAM,GAAG,IAAI,CAAA;SACrC,IAAI,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QAC/C,OAAO,CAAC,GAAG,CAAC,KAAK,EAAE,CAAC,CAAA;QACpB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;SAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,WAAW,EAAE;QAC9C,OAAO,CAAC,GAAG,CAAC,sBAAO,CAAC,CAAA;QACpB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;KAChB;SAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,SAAS,EAAE;QAC5C,OAAO,GAAG,IAAI,CAAA;KACf;SAAM,IAAI,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,IAAI,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE;QAClD,kDAAkD;QAClD,+DAA+D;QAC/D,qBAAqB;QACrB,MAAM,IAAI,GAAG,QAAQ,CAAC,GAAG,CAAC,OAAO,CAAC,eAAe,EAAE,EAAE,CAAC,EAAE,CAAC,CAAC,CAAA;QAC1D,IAAI,KAAK,CAAC,IAAI,CAAC,EAAE;YACf,OAAO,CAAC,KAAK,CAAC,0BAA0B,GAAG,4BAA4B,CAAC,CAAA;YACxE,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;SAChB;QACD,oBAAoB;QACpB,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;KACjB;;QAAM,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;CACtB;AAED,yCAAmC;AACnC,MAAM,IAAI,GAAG,MAAM,CAAC,CAAC,CAAC,iBAAM,CAAC,MAAM,CAAC,CAAC,CAAC,iBAAM,CAAA;AAC5C,IAAI,IAAI,CAAC,MAAM,KAAK,CAAC,EAAE;IACrB,OAAO,CAAC,KAAK,CAAC,KAAK,EAAE,CAAC,CAAA;CACvB;AAED,kDAAkD;AAClD,qBAAqB;AACrB,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,IAAI,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,CAAC;KAC1C,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,EAAE,CAAC,CAAC,IAAI,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC;KACvE,KAAK,CAAC,EAAE,CAAC,EAAE;IACV,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,OAAO,CAAC,CAAA;IACzB,IAAI,EAAE,CAAC,IAAI;QAAE,OAAO,CAAC,KAAK,CAAC,UAAU,GAAG,EAAE,CAAC,IAAI,CAAC,CAAA;IAChD,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;AACjB,CAAC,CAAC,CAAA;AACJ,oBAAoB","sourcesContent":["#!/usr/bin/env node\n\nimport { version } from '../package.json'\nimport { MkdirpOptions } from './opts-arg.js'\n\nconst usage = () => `\nusage: mkdirp [DIR1,DIR2..] {OPTIONS}\n\n Create each supplied directory including any necessary parent directories\n that don't yet exist.\n\n If the directory already exists, do nothing.\n\nOPTIONS are:\n\n -m If a directory needs to be created, set the mode as an octal\n --mode= permission string.\n\n -v --version Print the mkdirp version number\n\n -h --help Print this helpful banner\n\n -p --print Print the first directories created for each path provided\n\n --manual Use manual implementation, even if native is available\n`\n\nconst dirs: string[] = []\nconst opts: MkdirpOptions = {}\nlet doPrint: boolean = false\nlet dashdash = false\nlet manual = false\nfor (const arg of process.argv.slice(2)) {\n if (dashdash) dirs.push(arg)\n else if (arg === '--') dashdash = true\n else if (arg === '--manual') manual = true\n else if (/^-h/.test(arg) || /^--help/.test(arg)) {\n console.log(usage())\n process.exit(0)\n } else if (arg === '-v' || arg === '--version') {\n console.log(version)\n process.exit(0)\n } else if (arg === '-p' || arg === '--print') {\n doPrint = true\n } else if (/^-m/.test(arg) || /^--mode=/.test(arg)) {\n // these don't get covered in CI, but work locally\n // weird because the tests below show as passing in the output.\n /* c8 ignore start */\n const mode = parseInt(arg.replace(/^(-m|--mode=)/, ''), 8)\n if (isNaN(mode)) {\n console.error(`invalid mode argument: ${arg}\\nMust be an octal number.`)\n process.exit(1)\n }\n /* c8 ignore stop */\n opts.mode = mode\n } else dirs.push(arg)\n}\n\nimport { mkdirp } from './index.js'\nconst impl = manual ? mkdirp.manual : mkdirp\nif (dirs.length === 0) {\n console.error(usage())\n}\n\n// these don't get covered in CI, but work locally\n/* c8 ignore start */\nPromise.all(dirs.map(dir => impl(dir, opts)))\n .then(made => (doPrint ? made.forEach(m => m && console.log(m)) : null))\n .catch(er => {\n console.error(er.message)\n if (er.code) console.error(' code: ' + er.code)\n process.exit(1)\n })\n/* c8 ignore stop */\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts new file mode 100644 index 00000000000000..e47794b3bb72a3 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts @@ -0,0 +1,4 @@ +import { MkdirpOptionsResolved } from './opts-arg.js'; +export declare const findMade: (opts: MkdirpOptionsResolved, parent: string, path?: string) => Promise; +export declare const findMadeSync: (opts: MkdirpOptionsResolved, parent: string, path?: string) => undefined | string; +//# sourceMappingURL=find-made.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts.map new file mode 100644 index 00000000000000..00d5d1a4dbefdf --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"find-made.d.ts","sourceRoot":"","sources":["../../../src/find-made.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,qBAAqB,EAAE,MAAM,eAAe,CAAA;AAErD,eAAO,MAAM,QAAQ,SACb,qBAAqB,UACnB,MAAM,SACP,MAAM,KACZ,QAAQ,SAAS,GAAG,MAAM,CAe5B,CAAA;AAED,eAAO,MAAM,YAAY,SACjB,qBAAqB,UACnB,MAAM,SACP,MAAM,KACZ,SAAS,GAAG,MAad,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js new file mode 100644 index 00000000000000..e831ef27cadc1d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js @@ -0,0 +1,35 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.findMadeSync = exports.findMade = void 0; +const path_1 = require("path"); +const findMade = async (opts, parent, path) => { + // we never want the 'made' return value to be a root directory + if (path === parent) { + return; + } + return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later + // will fail later + er => { + const fer = er; + return fer && fer.code === 'ENOENT' + ? (0, exports.findMade)(opts, (0, path_1.dirname)(parent), parent) + : undefined; + }); +}; +exports.findMade = findMade; +const findMadeSync = (opts, parent, path) => { + if (path === parent) { + return undefined; + } + try { + return opts.statSync(parent).isDirectory() ? path : undefined; + } + catch (er) { + const fer = er; + return fer && fer.code === 'ENOENT' + ? (0, exports.findMadeSync)(opts, (0, path_1.dirname)(parent), parent) + : undefined; + } +}; +exports.findMadeSync = findMadeSync; +//# sourceMappingURL=find-made.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js.map new file mode 100644 index 00000000000000..30a0d66398878d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/find-made.js.map @@ -0,0 +1 @@ +{"version":3,"file":"find-made.js","sourceRoot":"","sources":["../../../src/find-made.ts"],"names":[],"mappings":";;;AAAA,+BAA8B;AAGvB,MAAM,QAAQ,GAAG,KAAK,EAC3B,IAA2B,EAC3B,MAAc,EACd,IAAa,EACgB,EAAE;IAC/B,+DAA+D;IAC/D,IAAI,IAAI,KAAK,MAAM,EAAE;QACnB,OAAM;KACP;IAED,OAAO,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAChC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,kBAAkB;IAC/D,AAD6C,kBAAkB;IAC/D,EAAE,CAAC,EAAE;QACH,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,OAAO,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ;YACjC,CAAC,CAAC,IAAA,gBAAQ,EAAC,IAAI,EAAE,IAAA,cAAO,EAAC,MAAM,CAAC,EAAE,MAAM,CAAC;YACzC,CAAC,CAAC,SAAS,CAAA;IACf,CAAC,CACF,CAAA;AACH,CAAC,CAAA;AAnBY,QAAA,QAAQ,YAmBpB;AAEM,MAAM,YAAY,GAAG,CAC1B,IAA2B,EAC3B,MAAc,EACd,IAAa,EACO,EAAE;IACtB,IAAI,IAAI,KAAK,MAAM,EAAE;QACnB,OAAO,SAAS,CAAA;KACjB;IAED,IAAI;QACF,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAA;KAC9D;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,OAAO,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ;YACjC,CAAC,CAAC,IAAA,oBAAY,EAAC,IAAI,EAAE,IAAA,cAAO,EAAC,MAAM,CAAC,EAAE,MAAM,CAAC;YAC7C,CAAC,CAAC,SAAS,CAAA;KACd;AACH,CAAC,CAAA;AAjBY,QAAA,YAAY,gBAiBxB","sourcesContent":["import { dirname } from 'path'\nimport { MkdirpOptionsResolved } from './opts-arg.js'\n\nexport const findMade = async (\n opts: MkdirpOptionsResolved,\n parent: string,\n path?: string\n): Promise => {\n // we never want the 'made' return value to be a root directory\n if (path === parent) {\n return\n }\n\n return opts.statAsync(parent).then(\n st => (st.isDirectory() ? path : undefined), // will fail later\n er => {\n const fer = er as NodeJS.ErrnoException\n return fer && fer.code === 'ENOENT'\n ? findMade(opts, dirname(parent), parent)\n : undefined\n }\n )\n}\n\nexport const findMadeSync = (\n opts: MkdirpOptionsResolved,\n parent: string,\n path?: string\n): undefined | string => {\n if (path === parent) {\n return undefined\n }\n\n try {\n return opts.statSync(parent).isDirectory() ? path : undefined\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n return fer && fer.code === 'ENOENT'\n ? findMadeSync(opts, dirname(parent), parent)\n : undefined\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts new file mode 100644 index 00000000000000..fc9e43b3a45de1 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts @@ -0,0 +1,39 @@ +import { MkdirpOptions } from './opts-arg.js'; +export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'; +export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'; +export { useNative, useNativeSync } from './use-native.js'; +export declare const mkdirpSync: (path: string, opts?: MkdirpOptions) => string | void; +export declare const sync: (path: string, opts?: MkdirpOptions) => string | void; +export declare const manual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; +}; +export declare const manualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; +export declare const native: ((path: string, options?: MkdirpOptions | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; +}; +export declare const nativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; +export declare const mkdirp: ((path: string, opts?: MkdirpOptions) => Promise) & { + mkdirpSync: (path: string, opts?: MkdirpOptions) => string | void; + mkdirpNative: ((path: string, options?: MkdirpOptions | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + }; + mkdirpNativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + mkdirpManual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + }; + mkdirpManualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + sync: (path: string, opts?: MkdirpOptions) => string | void; + native: ((path: string, options?: MkdirpOptions | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + }; + nativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + manual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + }; + manualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + useNative: ((opts?: MkdirpOptions | undefined) => boolean) & { + sync: (opts?: MkdirpOptions | undefined) => boolean; + }; + useNativeSync: (opts?: MkdirpOptions | undefined) => boolean; +}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts.map new file mode 100644 index 00000000000000..0e915bbc9a0c7a --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAItD,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAG1D,eAAO,MAAM,UAAU,SAAU,MAAM,SAAS,aAAa,kBAM5D,CAAA;AAED,eAAO,MAAM,IAAI,SARgB,MAAM,SAAS,aAAa,kBAQ/B,CAAA;AAC9B,eAAO,MAAM,MAAM;;CAAe,CAAA;AAClC,eAAO,MAAM,UAAU,oHAAmB,CAAA;AAC1C,eAAO,MAAM,MAAM;;CAAe,CAAA;AAClC,eAAO,MAAM,UAAU,kFAAmB,CAAA;AAC1C,eAAO,MAAM,MAAM,UACJ,MAAM,SAAS,aAAa;uBAdV,MAAM,SAAS,aAAa;;;;;;;;;iBAA5B,MAAM,SAAS,aAAa;;;;;;;;;;;;;CAoC5D,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js new file mode 100644 index 00000000000000..ab9dc62cddda36 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js @@ -0,0 +1,53 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.mkdirp = exports.nativeSync = exports.native = exports.manualSync = exports.manual = exports.sync = exports.mkdirpSync = exports.useNativeSync = exports.useNative = exports.mkdirpNativeSync = exports.mkdirpNative = exports.mkdirpManualSync = exports.mkdirpManual = void 0; +const mkdirp_manual_js_1 = require("./mkdirp-manual.js"); +const mkdirp_native_js_1 = require("./mkdirp-native.js"); +const opts_arg_js_1 = require("./opts-arg.js"); +const path_arg_js_1 = require("./path-arg.js"); +const use_native_js_1 = require("./use-native.js"); +/* c8 ignore start */ +var mkdirp_manual_js_2 = require("./mkdirp-manual.js"); +Object.defineProperty(exports, "mkdirpManual", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManual; } }); +Object.defineProperty(exports, "mkdirpManualSync", { enumerable: true, get: function () { return mkdirp_manual_js_2.mkdirpManualSync; } }); +var mkdirp_native_js_2 = require("./mkdirp-native.js"); +Object.defineProperty(exports, "mkdirpNative", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNative; } }); +Object.defineProperty(exports, "mkdirpNativeSync", { enumerable: true, get: function () { return mkdirp_native_js_2.mkdirpNativeSync; } }); +var use_native_js_2 = require("./use-native.js"); +Object.defineProperty(exports, "useNative", { enumerable: true, get: function () { return use_native_js_2.useNative; } }); +Object.defineProperty(exports, "useNativeSync", { enumerable: true, get: function () { return use_native_js_2.useNativeSync; } }); +/* c8 ignore stop */ +const mkdirpSync = (path, opts) => { + path = (0, path_arg_js_1.pathArg)(path); + const resolved = (0, opts_arg_js_1.optsArg)(opts); + return (0, use_native_js_1.useNativeSync)(resolved) + ? (0, mkdirp_native_js_1.mkdirpNativeSync)(path, resolved) + : (0, mkdirp_manual_js_1.mkdirpManualSync)(path, resolved); +}; +exports.mkdirpSync = mkdirpSync; +exports.sync = exports.mkdirpSync; +exports.manual = mkdirp_manual_js_1.mkdirpManual; +exports.manualSync = mkdirp_manual_js_1.mkdirpManualSync; +exports.native = mkdirp_native_js_1.mkdirpNative; +exports.nativeSync = mkdirp_native_js_1.mkdirpNativeSync; +exports.mkdirp = Object.assign(async (path, opts) => { + path = (0, path_arg_js_1.pathArg)(path); + const resolved = (0, opts_arg_js_1.optsArg)(opts); + return (0, use_native_js_1.useNative)(resolved) + ? (0, mkdirp_native_js_1.mkdirpNative)(path, resolved) + : (0, mkdirp_manual_js_1.mkdirpManual)(path, resolved); +}, { + mkdirpSync: exports.mkdirpSync, + mkdirpNative: mkdirp_native_js_1.mkdirpNative, + mkdirpNativeSync: mkdirp_native_js_1.mkdirpNativeSync, + mkdirpManual: mkdirp_manual_js_1.mkdirpManual, + mkdirpManualSync: mkdirp_manual_js_1.mkdirpManualSync, + sync: exports.mkdirpSync, + native: mkdirp_native_js_1.mkdirpNative, + nativeSync: mkdirp_native_js_1.mkdirpNativeSync, + manual: mkdirp_manual_js_1.mkdirpManual, + manualSync: mkdirp_manual_js_1.mkdirpManualSync, + useNative: use_native_js_1.useNative, + useNativeSync: use_native_js_1.useNativeSync, +}); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js.map new file mode 100644 index 00000000000000..fdb572677a98ef --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../../src/index.ts"],"names":[],"mappings":";;;AAAA,yDAAmE;AACnE,yDAAmE;AACnE,+CAAsD;AACtD,+CAAuC;AACvC,mDAA0D;AAC1D,qBAAqB;AACrB,uDAAmE;AAA1D,gHAAA,YAAY,OAAA;AAAE,oHAAA,gBAAgB,OAAA;AACvC,uDAAmE;AAA1D,gHAAA,YAAY,OAAA;AAAE,oHAAA,gBAAgB,OAAA;AACvC,iDAA0D;AAAjD,0GAAA,SAAS,OAAA;AAAE,8GAAA,aAAa,OAAA;AACjC,oBAAoB;AAEb,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,IAAoB,EAAE,EAAE;IAC/D,IAAI,GAAG,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAA;IACpB,MAAM,QAAQ,GAAG,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAA;IAC9B,OAAO,IAAA,6BAAa,EAAC,QAAQ,CAAC;QAC5B,CAAC,CAAC,IAAA,mCAAgB,EAAC,IAAI,EAAE,QAAQ,CAAC;QAClC,CAAC,CAAC,IAAA,mCAAgB,EAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AACtC,CAAC,CAAA;AANY,QAAA,UAAU,cAMtB;AAEY,QAAA,IAAI,GAAG,kBAAU,CAAA;AACjB,QAAA,MAAM,GAAG,+BAAY,CAAA;AACrB,QAAA,UAAU,GAAG,mCAAgB,CAAA;AAC7B,QAAA,MAAM,GAAG,+BAAY,CAAA;AACrB,QAAA,UAAU,GAAG,mCAAgB,CAAA;AAC7B,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CACjC,KAAK,EAAE,IAAY,EAAE,IAAoB,EAAE,EAAE;IAC3C,IAAI,GAAG,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAA;IACpB,MAAM,QAAQ,GAAG,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAA;IAC9B,OAAO,IAAA,yBAAS,EAAC,QAAQ,CAAC;QACxB,CAAC,CAAC,IAAA,+BAAY,EAAC,IAAI,EAAE,QAAQ,CAAC;QAC9B,CAAC,CAAC,IAAA,+BAAY,EAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AAClC,CAAC,EACD;IACE,UAAU,EAAV,kBAAU;IACV,YAAY,EAAZ,+BAAY;IACZ,gBAAgB,EAAhB,mCAAgB;IAChB,YAAY,EAAZ,+BAAY;IACZ,gBAAgB,EAAhB,mCAAgB;IAEhB,IAAI,EAAE,kBAAU;IAChB,MAAM,EAAE,+BAAY;IACpB,UAAU,EAAE,mCAAgB;IAC5B,MAAM,EAAE,+BAAY;IACpB,UAAU,EAAE,mCAAgB;IAC5B,SAAS,EAAT,yBAAS;IACT,aAAa,EAAb,6BAAa;CACd,CACF,CAAA","sourcesContent":["import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nimport { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\nimport { pathArg } from './path-arg.js'\nimport { useNative, useNativeSync } from './use-native.js'\n/* c8 ignore start */\nexport { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nexport { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'\nexport { useNative, useNativeSync } from './use-native.js'\n/* c8 ignore stop */\n\nexport const mkdirpSync = (path: string, opts?: MkdirpOptions) => {\n path = pathArg(path)\n const resolved = optsArg(opts)\n return useNativeSync(resolved)\n ? mkdirpNativeSync(path, resolved)\n : mkdirpManualSync(path, resolved)\n}\n\nexport const sync = mkdirpSync\nexport const manual = mkdirpManual\nexport const manualSync = mkdirpManualSync\nexport const native = mkdirpNative\nexport const nativeSync = mkdirpNativeSync\nexport const mkdirp = Object.assign(\n async (path: string, opts?: MkdirpOptions) => {\n path = pathArg(path)\n const resolved = optsArg(opts)\n return useNative(resolved)\n ? mkdirpNative(path, resolved)\n : mkdirpManual(path, resolved)\n },\n {\n mkdirpSync,\n mkdirpNative,\n mkdirpNativeSync,\n mkdirpManual,\n mkdirpManualSync,\n\n sync: mkdirpSync,\n native: mkdirpNative,\n nativeSync: mkdirpNativeSync,\n manual: mkdirpManual,\n manualSync: mkdirpManualSync,\n useNative,\n useNativeSync,\n }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts new file mode 100644 index 00000000000000..e49cdf9f1bd122 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts @@ -0,0 +1,6 @@ +import { MkdirpOptions } from './opts-arg.js'; +export declare const mkdirpManualSync: (path: string, options?: MkdirpOptions, made?: string | undefined | void) => string | undefined | void; +export declare const mkdirpManual: ((path: string, options?: MkdirpOptions, made?: string | undefined | void) => Promise) & { + sync: (path: string, options?: MkdirpOptions, made?: string | undefined | void) => string | undefined | void; +}; +//# sourceMappingURL=mkdirp-manual.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts.map new file mode 100644 index 00000000000000..9301bab1ffb35b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-manual.d.ts","sourceRoot":"","sources":["../../../src/mkdirp-manual.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAEtD,eAAO,MAAM,gBAAgB,SACrB,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,MAAM,GAAG,SAAS,GAAG,IAmCvB,CAAA;AAED,eAAO,MAAM,YAAY,UAEf,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,QAAQ,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;iBA7C/B,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,MAAM,GAAG,SAAS,GAAG,IAAI;CAqF3B,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js new file mode 100644 index 00000000000000..d9bd1d8bb5a49b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js @@ -0,0 +1,79 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.mkdirpManual = exports.mkdirpManualSync = void 0; +const path_1 = require("path"); +const opts_arg_js_1 = require("./opts-arg.js"); +const mkdirpManualSync = (path, options, made) => { + const parent = (0, path_1.dirname)(path); + const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: false }; + if (parent === path) { + try { + return opts.mkdirSync(path, opts); + } + catch (er) { + // swallowed by recursive implementation on posix systems + // any other error is a failure + const fer = er; + if (fer && fer.code !== 'EISDIR') { + throw er; + } + return; + } + } + try { + opts.mkdirSync(path, opts); + return made || path; + } + catch (er) { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return (0, exports.mkdirpManualSync)(path, opts, (0, exports.mkdirpManualSync)(parent, opts, made)); + } + if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') { + throw er; + } + try { + if (!opts.statSync(path).isDirectory()) + throw er; + } + catch (_) { + throw er; + } + } +}; +exports.mkdirpManualSync = mkdirpManualSync; +exports.mkdirpManual = Object.assign(async (path, options, made) => { + const opts = (0, opts_arg_js_1.optsArg)(options); + opts.recursive = false; + const parent = (0, path_1.dirname)(path); + if (parent === path) { + return opts.mkdirAsync(path, opts).catch(er => { + // swallowed by recursive implementation on posix systems + // any other error is a failure + const fer = er; + if (fer && fer.code !== 'EISDIR') { + throw er; + } + }); + } + return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return (0, exports.mkdirpManual)(parent, opts).then((made) => (0, exports.mkdirpManual)(path, opts, made)); + } + if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') { + throw er; + } + return opts.statAsync(path).then(st => { + if (st.isDirectory()) { + return made; + } + else { + throw er; + } + }, () => { + throw er; + }); + }); +}, { sync: exports.mkdirpManualSync }); +//# sourceMappingURL=mkdirp-manual.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js.map new file mode 100644 index 00000000000000..ff7ba24dca32ad --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-manual.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-manual.js","sourceRoot":"","sources":["../../../src/mkdirp-manual.ts"],"names":[],"mappings":";;;AAAA,+BAA8B;AAC9B,+CAAsD;AAE/C,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,OAAuB,EACvB,IAAgC,EACL,EAAE;IAC7B,MAAM,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,IAAI,GAAG,EAAE,GAAG,IAAA,qBAAO,EAAC,OAAO,CAAC,EAAE,SAAS,EAAE,KAAK,EAAE,CAAA;IAEtD,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,IAAI;YACF,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SAClC;QAAC,OAAO,EAAE,EAAE;YACX,yDAAyD;YACzD,+BAA+B;YAC/B,MAAM,GAAG,GAAG,EAA2B,CAAA;YACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAChC,MAAM,EAAE,CAAA;aACT;YACD,OAAM;SACP;KACF;IAED,IAAI;QACF,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QAC1B,OAAO,IAAI,IAAI,IAAI,CAAA;KACpB;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,IAAA,wBAAgB,EAAC,IAAI,EAAE,IAAI,EAAE,IAAA,wBAAgB,EAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;SAC1E;QACD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,EAAE;YAC/D,MAAM,EAAE,CAAA;SACT;QACD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;gBAAE,MAAM,EAAE,CAAA;SACjD;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,EAAE,CAAA;SACT;KACF;AACH,CAAC,CAAA;AAvCY,QAAA,gBAAgB,oBAuC5B;AAEY,QAAA,YAAY,GAAG,MAAM,CAAC,MAAM,CACvC,KAAK,EACH,IAAY,EACZ,OAAuB,EACvB,IAAgC,EACI,EAAE;IACtC,MAAM,IAAI,GAAG,IAAA,qBAAO,EAAC,OAAO,CAAC,CAAA;IAC7B,IAAI,CAAC,SAAS,GAAG,KAAK,CAAA;IACtB,MAAM,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;YAC5C,yDAAyD;YACzD,+BAA+B;YAC/B,MAAM,GAAG,GAAG,EAA2B,CAAA;YACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAChC,MAAM,EAAE,CAAA;aACT;QACH,CAAC,CAAC,CAAA;KACH;IAED,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,IAAI,CACrC,GAAG,EAAE,CAAC,IAAI,IAAI,IAAI,EAClB,KAAK,EAAC,EAAE,EAAC,EAAE;QACT,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,IAAA,oBAAY,EAAC,MAAM,EAAE,IAAI,CAAC,CAAC,IAAI,CACpC,CAAC,IAAgC,EAAE,EAAE,CAAC,IAAA,oBAAY,EAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CACrE,CAAA;SACF;QACD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,EAAE;YACxD,MAAM,EAAE,CAAA;SACT;QACD,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,IAAI,CAC9B,EAAE,CAAC,EAAE;YACH,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE;gBACpB,OAAO,IAAI,CAAA;aACZ;iBAAM;gBACL,MAAM,EAAE,CAAA;aACT;QACH,CAAC,EACD,GAAG,EAAE;YACH,MAAM,EAAE,CAAA;QACV,CAAC,CACF,CAAA;IACH,CAAC,CACF,CAAA;AACH,CAAC,EACD,EAAE,IAAI,EAAE,wBAAgB,EAAE,CAC3B,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nexport const mkdirpManualSync = (\n path: string,\n options?: MkdirpOptions,\n made?: string | undefined | void\n): string | undefined | void => {\n const parent = dirname(path)\n const opts = { ...optsArg(options), recursive: false }\n\n if (parent === path) {\n try {\n return opts.mkdirSync(path, opts)\n } catch (er) {\n // swallowed by recursive implementation on posix systems\n // any other error is a failure\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code !== 'EISDIR') {\n throw er\n }\n return\n }\n }\n\n try {\n opts.mkdirSync(path, opts)\n return made || path\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))\n }\n if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {\n throw er\n }\n try {\n if (!opts.statSync(path).isDirectory()) throw er\n } catch (_) {\n throw er\n }\n }\n}\n\nexport const mkdirpManual = Object.assign(\n async (\n path: string,\n options?: MkdirpOptions,\n made?: string | undefined | void\n ): Promise => {\n const opts = optsArg(options)\n opts.recursive = false\n const parent = dirname(path)\n if (parent === path) {\n return opts.mkdirAsync(path, opts).catch(er => {\n // swallowed by recursive implementation on posix systems\n // any other error is a failure\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code !== 'EISDIR') {\n throw er\n }\n })\n }\n\n return opts.mkdirAsync(path, opts).then(\n () => made || path,\n async er => {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManual(parent, opts).then(\n (made?: string | undefined | void) => mkdirpManual(path, opts, made)\n )\n }\n if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {\n throw er\n }\n return opts.statAsync(path).then(\n st => {\n if (st.isDirectory()) {\n return made\n } else {\n throw er\n }\n },\n () => {\n throw er\n }\n )\n }\n )\n },\n { sync: mkdirpManualSync }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts new file mode 100644 index 00000000000000..28b64814b2545a --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts @@ -0,0 +1,6 @@ +import { MkdirpOptions } from './opts-arg.js'; +export declare const mkdirpNativeSync: (path: string, options?: MkdirpOptions) => string | void | undefined; +export declare const mkdirpNative: ((path: string, options?: MkdirpOptions) => Promise) & { + sync: (path: string, options?: MkdirpOptions) => string | void | undefined; +}; +//# sourceMappingURL=mkdirp-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts.map new file mode 100644 index 00000000000000..379c0f6591c686 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-native.d.ts","sourceRoot":"","sources":["../../../src/mkdirp-native.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAEtD,eAAO,MAAM,gBAAgB,SACrB,MAAM,YACF,aAAa,KACtB,MAAM,GAAG,IAAI,GAAG,SAoBlB,CAAA;AAED,eAAO,MAAM,YAAY,UAEf,MAAM,YACF,aAAa,KACtB,QAAQ,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;iBA5B/B,MAAM,YACF,aAAa,KACtB,MAAM,GAAG,IAAI,GAAG,SAAS;CAgD3B,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js new file mode 100644 index 00000000000000..9f00567d7cc200 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.mkdirpNative = exports.mkdirpNativeSync = void 0; +const path_1 = require("path"); +const find_made_js_1 = require("./find-made.js"); +const mkdirp_manual_js_1 = require("./mkdirp-manual.js"); +const opts_arg_js_1 = require("./opts-arg.js"); +const mkdirpNativeSync = (path, options) => { + const opts = (0, opts_arg_js_1.optsArg)(options); + opts.recursive = true; + const parent = (0, path_1.dirname)(path); + if (parent === path) { + return opts.mkdirSync(path, opts); + } + const made = (0, find_made_js_1.findMadeSync)(opts, path); + try { + opts.mkdirSync(path, opts); + return made; + } + catch (er) { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return (0, mkdirp_manual_js_1.mkdirpManualSync)(path, opts); + } + else { + throw er; + } + } +}; +exports.mkdirpNativeSync = mkdirpNativeSync; +exports.mkdirpNative = Object.assign(async (path, options) => { + const opts = { ...(0, opts_arg_js_1.optsArg)(options), recursive: true }; + const parent = (0, path_1.dirname)(path); + if (parent === path) { + return await opts.mkdirAsync(path, opts); + } + return (0, find_made_js_1.findMade)(opts, path).then((made) => opts + .mkdirAsync(path, opts) + .then(m => made || m) + .catch(er => { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return (0, mkdirp_manual_js_1.mkdirpManual)(path, opts); + } + else { + throw er; + } + })); +}, { sync: exports.mkdirpNativeSync }); +//# sourceMappingURL=mkdirp-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js.map new file mode 100644 index 00000000000000..1f889ee98876cc --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/mkdirp-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-native.js","sourceRoot":"","sources":["../../../src/mkdirp-native.ts"],"names":[],"mappings":";;;AAAA,+BAA8B;AAC9B,iDAAuD;AACvD,yDAAmE;AACnE,+CAAsD;AAE/C,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,OAAuB,EACI,EAAE;IAC7B,MAAM,IAAI,GAAG,IAAA,qBAAO,EAAC,OAAO,CAAC,CAAA;IAC7B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACrB,MAAM,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KAClC;IAED,MAAM,IAAI,GAAG,IAAA,2BAAY,EAAC,IAAI,EAAE,IAAI,CAAC,CAAA;IACrC,IAAI;QACF,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QAC1B,OAAO,IAAI,CAAA;KACZ;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,IAAA,mCAAgB,EAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SACpC;aAAM;YACL,MAAM,EAAE,CAAA;SACT;KACF;AACH,CAAC,CAAA;AAvBY,QAAA,gBAAgB,oBAuB5B;AAEY,QAAA,YAAY,GAAG,MAAM,CAAC,MAAM,CACvC,KAAK,EACH,IAAY,EACZ,OAAuB,EACa,EAAE;IACtC,MAAM,IAAI,GAAG,EAAE,GAAG,IAAA,qBAAO,EAAC,OAAO,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,CAAA;IACrD,MAAM,MAAM,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KACzC;IAED,OAAO,IAAA,uBAAQ,EAAC,IAAI,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,IAAyB,EAAE,EAAE,CAC7D,IAAI;SACD,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC;SACtB,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,IAAI,CAAC,CAAC;SACpB,KAAK,CAAC,EAAE,CAAC,EAAE;QACV,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,IAAA,+BAAY,EAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SAChC;aAAM;YACL,MAAM,EAAE,CAAA;SACT;IACH,CAAC,CAAC,CACL,CAAA;AACH,CAAC,EACD,EAAE,IAAI,EAAE,wBAAgB,EAAE,CAC3B,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { findMade, findMadeSync } from './find-made.js'\nimport { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nexport const mkdirpNativeSync = (\n path: string,\n options?: MkdirpOptions\n): string | void | undefined => {\n const opts = optsArg(options)\n opts.recursive = true\n const parent = dirname(path)\n if (parent === path) {\n return opts.mkdirSync(path, opts)\n }\n\n const made = findMadeSync(opts, path)\n try {\n opts.mkdirSync(path, opts)\n return made\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManualSync(path, opts)\n } else {\n throw er\n }\n }\n}\n\nexport const mkdirpNative = Object.assign(\n async (\n path: string,\n options?: MkdirpOptions\n ): Promise => {\n const opts = { ...optsArg(options), recursive: true }\n const parent = dirname(path)\n if (parent === path) {\n return await opts.mkdirAsync(path, opts)\n }\n\n return findMade(opts, path).then((made?: string | undefined) =>\n opts\n .mkdirAsync(path, opts)\n .then(m => made || m)\n .catch(er => {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManual(path, opts)\n } else {\n throw er\n }\n })\n )\n },\n { sync: mkdirpNativeSync }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts new file mode 100644 index 00000000000000..73d076b3b6923c --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts @@ -0,0 +1,42 @@ +/// +/// +import { MakeDirectoryOptions, Stats } from 'fs'; +export interface FsProvider { + stat?: (path: string, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any) => any; + mkdir?: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }, callback: (err: NodeJS.ErrnoException | null, made?: string) => any) => any; + statSync?: (path: string) => Stats; + mkdirSync?: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => string | undefined; +} +interface Options extends FsProvider { + mode?: number | string; + fs?: FsProvider; + mkdirAsync?: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => Promise; + statAsync?: (path: string) => Promise; +} +export type MkdirpOptions = Options | number | string; +export interface MkdirpOptionsResolved { + mode: number; + fs: FsProvider; + mkdirAsync: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => Promise; + statAsync: (path: string) => Promise; + stat: (path: string, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any) => any; + mkdir: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }, callback: (err: NodeJS.ErrnoException | null, made?: string) => any) => any; + statSync: (path: string) => Stats; + mkdirSync: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => string | undefined; + recursive?: boolean; +} +export declare const optsArg: (opts?: MkdirpOptions) => MkdirpOptionsResolved; +export {}; +//# sourceMappingURL=opts-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts.map new file mode 100644 index 00000000000000..e575161714f651 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"opts-arg.d.ts","sourceRoot":"","sources":["../../../src/opts-arg.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,oBAAoB,EAIpB,KAAK,EAEN,MAAM,IAAI,CAAA;AAEX,MAAM,WAAW,UAAU;IACzB,IAAI,CAAC,EAAE,CACL,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,KAAK,EAAE,KAAK,KAAK,GAAG,KAC/D,GAAG,CAAA;IACR,KAAK,CAAC,EAAE,CACN,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,EACpD,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,GAAG,KAChE,GAAG,CAAA;IACR,QAAQ,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,KAAK,CAAA;IAClC,SAAS,CAAC,EAAE,CACV,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,MAAM,GAAG,SAAS,CAAA;CACxB;AAED,UAAU,OAAQ,SAAQ,UAAU;IAClC,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;IACtB,EAAE,CAAC,EAAE,UAAU,CAAA;IACf,UAAU,CAAC,EAAE,CACX,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAAA;IAChC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAA;CAC7C;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,GAAG,MAAM,CAAA;AAErD,MAAM,WAAW,qBAAqB;IACpC,IAAI,EAAE,MAAM,CAAA;IACZ,EAAE,EAAE,UAAU,CAAA;IACd,UAAU,EAAE,CACV,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAAA;IAChC,SAAS,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAA;IAC3C,IAAI,EAAE,CACJ,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,KAAK,EAAE,KAAK,KAAK,GAAG,KAC/D,GAAG,CAAA;IACR,KAAK,EAAE,CACL,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,EACpD,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,GAAG,KAChE,GAAG,CAAA;IACR,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,KAAK,CAAA;IACjC,SAAS,EAAE,CACT,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,MAAM,GAAG,SAAS,CAAA;IACvB,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB;AAED,eAAO,MAAM,OAAO,UAAW,aAAa,KAAG,qBA2C9C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js new file mode 100644 index 00000000000000..e8f486c0905957 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js @@ -0,0 +1,38 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.optsArg = void 0; +const fs_1 = require("fs"); +const optsArg = (opts) => { + if (!opts) { + opts = { mode: 0o777 }; + } + else if (typeof opts === 'object') { + opts = { mode: 0o777, ...opts }; + } + else if (typeof opts === 'number') { + opts = { mode: opts }; + } + else if (typeof opts === 'string') { + opts = { mode: parseInt(opts, 8) }; + } + else { + throw new TypeError('invalid options argument'); + } + const resolved = opts; + const optsFs = opts.fs || {}; + opts.mkdir = opts.mkdir || optsFs.mkdir || fs_1.mkdir; + opts.mkdirAsync = opts.mkdirAsync + ? opts.mkdirAsync + : async (path, options) => { + return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made))); + }; + opts.stat = opts.stat || optsFs.stat || fs_1.stat; + opts.statAsync = opts.statAsync + ? opts.statAsync + : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats)))); + opts.statSync = opts.statSync || optsFs.statSync || fs_1.statSync; + opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || fs_1.mkdirSync; + return resolved; +}; +exports.optsArg = optsArg; +//# sourceMappingURL=opts-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js.map new file mode 100644 index 00000000000000..fd5590f40f54cd --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/opts-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"opts-arg.js","sourceRoot":"","sources":["../../../src/opts-arg.ts"],"names":[],"mappings":";;;AAAA,2BAOW;AAwDJ,MAAM,OAAO,GAAG,CAAC,IAAoB,EAAyB,EAAE;IACrE,IAAI,CAAC,IAAI,EAAE;QACT,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,CAAA;KACvB;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,IAAI,EAAE,CAAA;KAChC;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,CAAA;KACtB;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,CAAA;KACnC;SAAM;QACL,MAAM,IAAI,SAAS,CAAC,0BAA0B,CAAC,CAAA;KAChD;IAED,MAAM,QAAQ,GAAG,IAA6B,CAAA;IAC9C,MAAM,MAAM,GAAG,IAAI,CAAC,EAAE,IAAI,EAAE,CAAA;IAE5B,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,UAAK,CAAA;IAEhD,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU;QAC/B,CAAC,CAAC,IAAI,CAAC,UAAU;QACjB,CAAC,CAAC,KAAK,EACH,IAAY,EACZ,OAAuD,EAC1B,EAAE;YAC/B,OAAO,IAAI,OAAO,CAAqB,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAClD,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CACzC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CACzB,CACF,CAAA;QACH,CAAC,CAAA;IAEL,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,IAAI,SAAI,CAAA;IAC5C,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS;QAC7B,CAAC,CAAC,IAAI,CAAC,SAAS;QAChB,CAAC,CAAC,KAAK,EAAE,IAAY,EAAE,EAAE,CACrB,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,CAAA;IAEP,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,MAAM,CAAC,QAAQ,IAAI,aAAQ,CAAA;IAC5D,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,IAAI,MAAM,CAAC,SAAS,IAAI,cAAS,CAAA;IAEhE,OAAO,QAAQ,CAAA;AACjB,CAAC,CAAA;AA3CY,QAAA,OAAO,WA2CnB","sourcesContent":["import {\n MakeDirectoryOptions,\n mkdir,\n mkdirSync,\n stat,\n Stats,\n statSync,\n} from 'fs'\n\nexport interface FsProvider {\n stat?: (\n path: string,\n callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any\n ) => any\n mkdir?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean },\n callback: (err: NodeJS.ErrnoException | null, made?: string) => any\n ) => any\n statSync?: (path: string) => Stats\n mkdirSync?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => string | undefined\n}\n\ninterface Options extends FsProvider {\n mode?: number | string\n fs?: FsProvider\n mkdirAsync?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => Promise\n statAsync?: (path: string) => Promise\n}\n\nexport type MkdirpOptions = Options | number | string\n\nexport interface MkdirpOptionsResolved {\n mode: number\n fs: FsProvider\n mkdirAsync: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => Promise\n statAsync: (path: string) => Promise\n stat: (\n path: string,\n callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any\n ) => any\n mkdir: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean },\n callback: (err: NodeJS.ErrnoException | null, made?: string) => any\n ) => any\n statSync: (path: string) => Stats\n mkdirSync: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => string | undefined\n recursive?: boolean\n}\n\nexport const optsArg = (opts?: MkdirpOptions): MkdirpOptionsResolved => {\n if (!opts) {\n opts = { mode: 0o777 }\n } else if (typeof opts === 'object') {\n opts = { mode: 0o777, ...opts }\n } else if (typeof opts === 'number') {\n opts = { mode: opts }\n } else if (typeof opts === 'string') {\n opts = { mode: parseInt(opts, 8) }\n } else {\n throw new TypeError('invalid options argument')\n }\n\n const resolved = opts as MkdirpOptionsResolved\n const optsFs = opts.fs || {}\n\n opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir\n\n opts.mkdirAsync = opts.mkdirAsync\n ? opts.mkdirAsync\n : async (\n path: string,\n options: MakeDirectoryOptions & { recursive?: boolean }\n ): Promise => {\n return new Promise((res, rej) =>\n resolved.mkdir(path, options, (er, made) =>\n er ? rej(er) : res(made)\n )\n )\n }\n\n opts.stat = opts.stat || optsFs.stat || stat\n opts.statAsync = opts.statAsync\n ? opts.statAsync\n : async (path: string) =>\n new Promise((res, rej) =>\n resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats)))\n )\n\n opts.statSync = opts.statSync || optsFs.statSync || statSync\n opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync\n\n return resolved\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts new file mode 100644 index 00000000000000..ad0ccfc482a485 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts @@ -0,0 +1,2 @@ +export declare const pathArg: (path: string) => string; +//# sourceMappingURL=path-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts.map new file mode 100644 index 00000000000000..3b52b077c6c05c --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.d.ts","sourceRoot":"","sources":["../../../src/path-arg.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,OAAO,SAAU,MAAM,WAyBnC,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js new file mode 100644 index 00000000000000..a6b457f6e23d58 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js @@ -0,0 +1,28 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.pathArg = void 0; +const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform; +const path_1 = require("path"); +const pathArg = (path) => { + if (/\0/.test(path)) { + // simulate same failure that node raises + throw Object.assign(new TypeError('path must be a string without null bytes'), { + path, + code: 'ERR_INVALID_ARG_VALUE', + }); + } + path = (0, path_1.resolve)(path); + if (platform === 'win32') { + const badWinChars = /[*|"<>?:]/; + const { root } = (0, path_1.parse)(path); + if (badWinChars.test(path.substring(root.length))) { + throw Object.assign(new Error('Illegal characters in path.'), { + path, + code: 'EINVAL', + }); + } + } + return path; +}; +exports.pathArg = pathArg; +//# sourceMappingURL=path-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js.map new file mode 100644 index 00000000000000..ad3b5d38cad3cd --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/path-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.js","sourceRoot":"","sources":["../../../src/path-arg.ts"],"names":[],"mappings":";;;AAAA,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,2BAA2B,IAAI,OAAO,CAAC,QAAQ,CAAA;AAC5E,+BAAqC;AAC9B,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,EAAE;IACtC,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACnB,yCAAyC;QACzC,MAAM,MAAM,CAAC,MAAM,CACjB,IAAI,SAAS,CAAC,0CAA0C,CAAC,EACzD;YACE,IAAI;YACJ,IAAI,EAAE,uBAAuB;SAC9B,CACF,CAAA;KACF;IAED,IAAI,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IACpB,IAAI,QAAQ,KAAK,OAAO,EAAE;QACxB,MAAM,WAAW,GAAG,WAAW,CAAA;QAC/B,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE;YACjD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,IAAI;gBACJ,IAAI,EAAE,QAAQ;aACf,CAAC,CAAA;SACH;KACF;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAzBY,QAAA,OAAO,WAyBnB","sourcesContent":["const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform\nimport { parse, resolve } from 'path'\nexport const pathArg = (path: string) => {\n if (/\\0/.test(path)) {\n // simulate same failure that node raises\n throw Object.assign(\n new TypeError('path must be a string without null bytes'),\n {\n path,\n code: 'ERR_INVALID_ARG_VALUE',\n }\n )\n }\n\n path = resolve(path)\n if (platform === 'win32') {\n const badWinChars = /[*|\"<>?:]/\n const { root } = parse(path)\n if (badWinChars.test(path.substring(root.length))) {\n throw Object.assign(new Error('Illegal characters in path.'), {\n path,\n code: 'EINVAL',\n })\n }\n }\n\n return path\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts new file mode 100644 index 00000000000000..1c6cb619e30405 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts @@ -0,0 +1,6 @@ +import { MkdirpOptions } from './opts-arg.js'; +export declare const useNativeSync: (opts?: MkdirpOptions) => boolean; +export declare const useNative: ((opts?: MkdirpOptions) => boolean) & { + sync: (opts?: MkdirpOptions) => boolean; +}; +//# sourceMappingURL=use-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts.map new file mode 100644 index 00000000000000..7dc275e322ea3b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.d.ts","sourceRoot":"","sources":["../../../src/use-native.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAMtD,eAAO,MAAM,aAAa,UAEd,aAAa,YAA0C,CAAA;AAEnE,eAAO,MAAM,SAAS,WAGR,aAAa;kBALf,aAAa;CASxB,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js new file mode 100644 index 00000000000000..550b3452688ee5 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.useNative = exports.useNativeSync = void 0; +const fs_1 = require("fs"); +const opts_arg_js_1 = require("./opts-arg.js"); +const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version; +const versArr = version.replace(/^v/, '').split('.'); +const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12); +exports.useNativeSync = !hasNative + ? () => false + : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdirSync === fs_1.mkdirSync; +exports.useNative = Object.assign(!hasNative + ? () => false + : (opts) => (0, opts_arg_js_1.optsArg)(opts).mkdir === fs_1.mkdir, { + sync: exports.useNativeSync, +}); +//# sourceMappingURL=use-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js.map new file mode 100644 index 00000000000000..9a15efebb9ec28 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/cjs/src/use-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.js","sourceRoot":"","sources":["../../../src/use-native.ts"],"names":[],"mappings":";;;AAAA,2BAAqC;AACrC,+CAAsD;AAEtD,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,OAAO,CAAC,OAAO,CAAA;AAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;AACpD,MAAM,SAAS,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;AAElE,QAAA,aAAa,GAAG,CAAC,SAAS;IACrC,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,CAAC,IAAoB,EAAE,EAAE,CAAC,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAC,SAAS,KAAK,cAAS,CAAA;AAEtD,QAAA,SAAS,GAAG,MAAM,CAAC,MAAM,CACpC,CAAC,SAAS;IACR,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,CAAC,IAAoB,EAAE,EAAE,CAAC,IAAA,qBAAO,EAAC,IAAI,CAAC,CAAC,KAAK,KAAK,UAAK,EAC3D;IACE,IAAI,EAAE,qBAAa;CACpB,CACF,CAAA","sourcesContent":["import { mkdir, mkdirSync } from 'fs'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nconst version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version\nconst versArr = version.replace(/^v/, '').split('.')\nconst hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12)\n\nexport const useNativeSync = !hasNative\n ? () => false\n : (opts?: MkdirpOptions) => optsArg(opts).mkdirSync === mkdirSync\n\nexport const useNative = Object.assign(\n !hasNative\n ? () => false\n : (opts?: MkdirpOptions) => optsArg(opts).mkdir === mkdir,\n {\n sync: useNativeSync,\n }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts new file mode 100644 index 00000000000000..e47794b3bb72a3 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts @@ -0,0 +1,4 @@ +import { MkdirpOptionsResolved } from './opts-arg.js'; +export declare const findMade: (opts: MkdirpOptionsResolved, parent: string, path?: string) => Promise; +export declare const findMadeSync: (opts: MkdirpOptionsResolved, parent: string, path?: string) => undefined | string; +//# sourceMappingURL=find-made.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts.map new file mode 100644 index 00000000000000..411aad1410eb7a --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"find-made.d.ts","sourceRoot":"","sources":["../../src/find-made.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,qBAAqB,EAAE,MAAM,eAAe,CAAA;AAErD,eAAO,MAAM,QAAQ,SACb,qBAAqB,UACnB,MAAM,SACP,MAAM,KACZ,QAAQ,SAAS,GAAG,MAAM,CAe5B,CAAA;AAED,eAAO,MAAM,YAAY,SACjB,qBAAqB,UACnB,MAAM,SACP,MAAM,KACZ,SAAS,GAAG,MAad,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js new file mode 100644 index 00000000000000..3e72fd59a2c1fb --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js @@ -0,0 +1,30 @@ +import { dirname } from 'path'; +export const findMade = async (opts, parent, path) => { + // we never want the 'made' return value to be a root directory + if (path === parent) { + return; + } + return opts.statAsync(parent).then(st => (st.isDirectory() ? path : undefined), // will fail later + // will fail later + er => { + const fer = er; + return fer && fer.code === 'ENOENT' + ? findMade(opts, dirname(parent), parent) + : undefined; + }); +}; +export const findMadeSync = (opts, parent, path) => { + if (path === parent) { + return undefined; + } + try { + return opts.statSync(parent).isDirectory() ? path : undefined; + } + catch (er) { + const fer = er; + return fer && fer.code === 'ENOENT' + ? findMadeSync(opts, dirname(parent), parent) + : undefined; + } +}; +//# sourceMappingURL=find-made.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js.map new file mode 100644 index 00000000000000..7b58089c6266c1 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/find-made.js.map @@ -0,0 +1 @@ +{"version":3,"file":"find-made.js","sourceRoot":"","sources":["../../src/find-made.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAG9B,MAAM,CAAC,MAAM,QAAQ,GAAG,KAAK,EAC3B,IAA2B,EAC3B,MAAc,EACd,IAAa,EACgB,EAAE;IAC/B,+DAA+D;IAC/D,IAAI,IAAI,KAAK,MAAM,EAAE;QACnB,OAAM;KACP;IAED,OAAO,IAAI,CAAC,SAAS,CAAC,MAAM,CAAC,CAAC,IAAI,CAChC,EAAE,CAAC,EAAE,CAAC,CAAC,EAAE,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC,EAAE,kBAAkB;IAC/D,AAD6C,kBAAkB;IAC/D,EAAE,CAAC,EAAE;QACH,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,OAAO,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ;YACjC,CAAC,CAAC,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC;YACzC,CAAC,CAAC,SAAS,CAAA;IACf,CAAC,CACF,CAAA;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,YAAY,GAAG,CAC1B,IAA2B,EAC3B,MAAc,EACd,IAAa,EACO,EAAE;IACtB,IAAI,IAAI,KAAK,MAAM,EAAE;QACnB,OAAO,SAAS,CAAA;KACjB;IAED,IAAI;QACF,OAAO,IAAI,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAA;KAC9D;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,OAAO,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ;YACjC,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,OAAO,CAAC,MAAM,CAAC,EAAE,MAAM,CAAC;YAC7C,CAAC,CAAC,SAAS,CAAA;KACd;AACH,CAAC,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { MkdirpOptionsResolved } from './opts-arg.js'\n\nexport const findMade = async (\n opts: MkdirpOptionsResolved,\n parent: string,\n path?: string\n): Promise => {\n // we never want the 'made' return value to be a root directory\n if (path === parent) {\n return\n }\n\n return opts.statAsync(parent).then(\n st => (st.isDirectory() ? path : undefined), // will fail later\n er => {\n const fer = er as NodeJS.ErrnoException\n return fer && fer.code === 'ENOENT'\n ? findMade(opts, dirname(parent), parent)\n : undefined\n }\n )\n}\n\nexport const findMadeSync = (\n opts: MkdirpOptionsResolved,\n parent: string,\n path?: string\n): undefined | string => {\n if (path === parent) {\n return undefined\n }\n\n try {\n return opts.statSync(parent).isDirectory() ? path : undefined\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n return fer && fer.code === 'ENOENT'\n ? findMadeSync(opts, dirname(parent), parent)\n : undefined\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts new file mode 100644 index 00000000000000..fc9e43b3a45de1 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts @@ -0,0 +1,39 @@ +import { MkdirpOptions } from './opts-arg.js'; +export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'; +export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'; +export { useNative, useNativeSync } from './use-native.js'; +export declare const mkdirpSync: (path: string, opts?: MkdirpOptions) => string | void; +export declare const sync: (path: string, opts?: MkdirpOptions) => string | void; +export declare const manual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; +}; +export declare const manualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; +export declare const native: ((path: string, options?: MkdirpOptions | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; +}; +export declare const nativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; +export declare const mkdirp: ((path: string, opts?: MkdirpOptions) => Promise) & { + mkdirpSync: (path: string, opts?: MkdirpOptions) => string | void; + mkdirpNative: ((path: string, options?: MkdirpOptions | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + }; + mkdirpNativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + mkdirpManual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + }; + mkdirpManualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + sync: (path: string, opts?: MkdirpOptions) => string | void; + native: ((path: string, options?: MkdirpOptions | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + }; + nativeSync: (path: string, options?: MkdirpOptions | undefined) => string | void | undefined; + manual: ((path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => Promise) & { + sync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + }; + manualSync: (path: string, options?: MkdirpOptions | undefined, made?: string | void | undefined) => string | void | undefined; + useNative: ((opts?: MkdirpOptions | undefined) => boolean) & { + sync: (opts?: MkdirpOptions | undefined) => boolean; + }; + useNativeSync: (opts?: MkdirpOptions | undefined) => boolean; +}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts.map new file mode 100644 index 00000000000000..cfcc78083857b1 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAItD,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAG1D,eAAO,MAAM,UAAU,SAAU,MAAM,SAAS,aAAa,kBAM5D,CAAA;AAED,eAAO,MAAM,IAAI,SARgB,MAAM,SAAS,aAAa,kBAQ/B,CAAA;AAC9B,eAAO,MAAM,MAAM;;CAAe,CAAA;AAClC,eAAO,MAAM,UAAU,oHAAmB,CAAA;AAC1C,eAAO,MAAM,MAAM;;CAAe,CAAA;AAClC,eAAO,MAAM,UAAU,kFAAmB,CAAA;AAC1C,eAAO,MAAM,MAAM,UACJ,MAAM,SAAS,aAAa;uBAdV,MAAM,SAAS,aAAa;;;;;;;;;iBAA5B,MAAM,SAAS,aAAa;;;;;;;;;;;;;CAoC5D,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js new file mode 100644 index 00000000000000..0217ecc8cdd83d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js @@ -0,0 +1,43 @@ +import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'; +import { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'; +import { optsArg } from './opts-arg.js'; +import { pathArg } from './path-arg.js'; +import { useNative, useNativeSync } from './use-native.js'; +/* c8 ignore start */ +export { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'; +export { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'; +export { useNative, useNativeSync } from './use-native.js'; +/* c8 ignore stop */ +export const mkdirpSync = (path, opts) => { + path = pathArg(path); + const resolved = optsArg(opts); + return useNativeSync(resolved) + ? mkdirpNativeSync(path, resolved) + : mkdirpManualSync(path, resolved); +}; +export const sync = mkdirpSync; +export const manual = mkdirpManual; +export const manualSync = mkdirpManualSync; +export const native = mkdirpNative; +export const nativeSync = mkdirpNativeSync; +export const mkdirp = Object.assign(async (path, opts) => { + path = pathArg(path); + const resolved = optsArg(opts); + return useNative(resolved) + ? mkdirpNative(path, resolved) + : mkdirpManual(path, resolved); +}, { + mkdirpSync, + mkdirpNative, + mkdirpNativeSync, + mkdirpManual, + mkdirpManualSync, + sync: mkdirpSync, + native: mkdirpNative, + nativeSync: mkdirpNativeSync, + manual: mkdirpManual, + manualSync: mkdirpManualSync, + useNative, + useNativeSync, +}); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js.map new file mode 100644 index 00000000000000..47a8133a070c8f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAiB,OAAO,EAAE,MAAM,eAAe,CAAA;AACtD,OAAO,EAAE,OAAO,EAAE,MAAM,eAAe,CAAA;AACvC,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC1D,qBAAqB;AACrB,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC1D,oBAAoB;AAEpB,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,IAAY,EAAE,IAAoB,EAAE,EAAE;IAC/D,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC9B,OAAO,aAAa,CAAC,QAAQ,CAAC;QAC5B,CAAC,CAAC,gBAAgB,CAAC,IAAI,EAAE,QAAQ,CAAC;QAClC,CAAC,CAAC,gBAAgB,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AACtC,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,IAAI,GAAG,UAAU,CAAA;AAC9B,MAAM,CAAC,MAAM,MAAM,GAAG,YAAY,CAAA;AAClC,MAAM,CAAC,MAAM,UAAU,GAAG,gBAAgB,CAAA;AAC1C,MAAM,CAAC,MAAM,MAAM,GAAG,YAAY,CAAA;AAClC,MAAM,CAAC,MAAM,UAAU,GAAG,gBAAgB,CAAA;AAC1C,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CACjC,KAAK,EAAE,IAAY,EAAE,IAAoB,EAAE,EAAE;IAC3C,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC9B,OAAO,SAAS,CAAC,QAAQ,CAAC;QACxB,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,QAAQ,CAAC;QAC9B,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,QAAQ,CAAC,CAAA;AAClC,CAAC,EACD;IACE,UAAU;IACV,YAAY;IACZ,gBAAgB;IAChB,YAAY;IACZ,gBAAgB;IAEhB,IAAI,EAAE,UAAU;IAChB,MAAM,EAAE,YAAY;IACpB,UAAU,EAAE,gBAAgB;IAC5B,MAAM,EAAE,YAAY;IACpB,UAAU,EAAE,gBAAgB;IAC5B,SAAS;IACT,aAAa;CACd,CACF,CAAA","sourcesContent":["import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nimport { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\nimport { pathArg } from './path-arg.js'\nimport { useNative, useNativeSync } from './use-native.js'\n/* c8 ignore start */\nexport { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nexport { mkdirpNative, mkdirpNativeSync } from './mkdirp-native.js'\nexport { useNative, useNativeSync } from './use-native.js'\n/* c8 ignore stop */\n\nexport const mkdirpSync = (path: string, opts?: MkdirpOptions) => {\n path = pathArg(path)\n const resolved = optsArg(opts)\n return useNativeSync(resolved)\n ? mkdirpNativeSync(path, resolved)\n : mkdirpManualSync(path, resolved)\n}\n\nexport const sync = mkdirpSync\nexport const manual = mkdirpManual\nexport const manualSync = mkdirpManualSync\nexport const native = mkdirpNative\nexport const nativeSync = mkdirpNativeSync\nexport const mkdirp = Object.assign(\n async (path: string, opts?: MkdirpOptions) => {\n path = pathArg(path)\n const resolved = optsArg(opts)\n return useNative(resolved)\n ? mkdirpNative(path, resolved)\n : mkdirpManual(path, resolved)\n },\n {\n mkdirpSync,\n mkdirpNative,\n mkdirpNativeSync,\n mkdirpManual,\n mkdirpManualSync,\n\n sync: mkdirpSync,\n native: mkdirpNative,\n nativeSync: mkdirpNativeSync,\n manual: mkdirpManual,\n manualSync: mkdirpManualSync,\n useNative,\n useNativeSync,\n }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts new file mode 100644 index 00000000000000..e49cdf9f1bd122 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts @@ -0,0 +1,6 @@ +import { MkdirpOptions } from './opts-arg.js'; +export declare const mkdirpManualSync: (path: string, options?: MkdirpOptions, made?: string | undefined | void) => string | undefined | void; +export declare const mkdirpManual: ((path: string, options?: MkdirpOptions, made?: string | undefined | void) => Promise) & { + sync: (path: string, options?: MkdirpOptions, made?: string | undefined | void) => string | undefined | void; +}; +//# sourceMappingURL=mkdirp-manual.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts.map new file mode 100644 index 00000000000000..ae7f243d3ca78b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-manual.d.ts","sourceRoot":"","sources":["../../src/mkdirp-manual.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAEtD,eAAO,MAAM,gBAAgB,SACrB,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,MAAM,GAAG,SAAS,GAAG,IAmCvB,CAAA;AAED,eAAO,MAAM,YAAY,UAEf,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,QAAQ,MAAM,GAAG,SAAS,GAAG,IAAI,CAAC;iBA7C/B,MAAM,YACF,aAAa,SAChB,MAAM,GAAG,SAAS,GAAG,IAAI,KAC/B,MAAM,GAAG,SAAS,GAAG,IAAI;CAqF3B,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js new file mode 100644 index 00000000000000..a4d044e02d3bfc --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js @@ -0,0 +1,75 @@ +import { dirname } from 'path'; +import { optsArg } from './opts-arg.js'; +export const mkdirpManualSync = (path, options, made) => { + const parent = dirname(path); + const opts = { ...optsArg(options), recursive: false }; + if (parent === path) { + try { + return opts.mkdirSync(path, opts); + } + catch (er) { + // swallowed by recursive implementation on posix systems + // any other error is a failure + const fer = er; + if (fer && fer.code !== 'EISDIR') { + throw er; + } + return; + } + } + try { + opts.mkdirSync(path, opts); + return made || path; + } + catch (er) { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made)); + } + if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') { + throw er; + } + try { + if (!opts.statSync(path).isDirectory()) + throw er; + } + catch (_) { + throw er; + } + } +}; +export const mkdirpManual = Object.assign(async (path, options, made) => { + const opts = optsArg(options); + opts.recursive = false; + const parent = dirname(path); + if (parent === path) { + return opts.mkdirAsync(path, opts).catch(er => { + // swallowed by recursive implementation on posix systems + // any other error is a failure + const fer = er; + if (fer && fer.code !== 'EISDIR') { + throw er; + } + }); + } + return opts.mkdirAsync(path, opts).then(() => made || path, async (er) => { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return mkdirpManual(parent, opts).then((made) => mkdirpManual(path, opts, made)); + } + if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') { + throw er; + } + return opts.statAsync(path).then(st => { + if (st.isDirectory()) { + return made; + } + else { + throw er; + } + }, () => { + throw er; + }); + }); +}, { sync: mkdirpManualSync }); +//# sourceMappingURL=mkdirp-manual.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js.map new file mode 100644 index 00000000000000..29eab250e126c8 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-manual.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-manual.js","sourceRoot":"","sources":["../../src/mkdirp-manual.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAC9B,OAAO,EAAiB,OAAO,EAAE,MAAM,eAAe,CAAA;AAEtD,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,OAAuB,EACvB,IAAgC,EACL,EAAE;IAC7B,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,IAAI,GAAG,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC,EAAE,SAAS,EAAE,KAAK,EAAE,CAAA;IAEtD,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,IAAI;YACF,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SAClC;QAAC,OAAO,EAAE,EAAE;YACX,yDAAyD;YACzD,+BAA+B;YAC/B,MAAM,GAAG,GAAG,EAA2B,CAAA;YACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAChC,MAAM,EAAE,CAAA;aACT;YACD,OAAM;SACP;KACF;IAED,IAAI;QACF,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QAC1B,OAAO,IAAI,IAAI,IAAI,CAAA;KACpB;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,gBAAgB,CAAC,IAAI,EAAE,IAAI,EAAE,gBAAgB,CAAC,MAAM,EAAE,IAAI,EAAE,IAAI,CAAC,CAAC,CAAA;SAC1E;QACD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,EAAE;YAC/D,MAAM,EAAE,CAAA;SACT;QACD,IAAI;YACF,IAAI,CAAC,IAAI,CAAC,QAAQ,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE;gBAAE,MAAM,EAAE,CAAA;SACjD;QAAC,OAAO,CAAC,EAAE;YACV,MAAM,EAAE,CAAA;SACT;KACF;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,CACvC,KAAK,EACH,IAAY,EACZ,OAAuB,EACvB,IAAgC,EACI,EAAE;IACtC,MAAM,IAAI,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;IAC7B,IAAI,CAAC,SAAS,GAAG,KAAK,CAAA;IACtB,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;YAC5C,yDAAyD;YACzD,+BAA+B;YAC/B,MAAM,GAAG,GAAG,EAA2B,CAAA;YACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;gBAChC,MAAM,EAAE,CAAA;aACT;QACH,CAAC,CAAC,CAAA;KACH;IAED,OAAO,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,IAAI,CACrC,GAAG,EAAE,CAAC,IAAI,IAAI,IAAI,EAClB,KAAK,EAAC,EAAE,EAAC,EAAE;QACT,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,YAAY,CAAC,MAAM,EAAE,IAAI,CAAC,CAAC,IAAI,CACpC,CAAC,IAAgC,EAAE,EAAE,CAAC,YAAY,CAAC,IAAI,EAAE,IAAI,EAAE,IAAI,CAAC,CACrE,CAAA;SACF;QACD,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,IAAI,GAAG,CAAC,IAAI,KAAK,OAAO,EAAE;YACxD,MAAM,EAAE,CAAA;SACT;QACD,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,IAAI,CAC9B,EAAE,CAAC,EAAE;YACH,IAAI,EAAE,CAAC,WAAW,EAAE,EAAE;gBACpB,OAAO,IAAI,CAAA;aACZ;iBAAM;gBACL,MAAM,EAAE,CAAA;aACT;QACH,CAAC,EACD,GAAG,EAAE;YACH,MAAM,EAAE,CAAA;QACV,CAAC,CACF,CAAA;IACH,CAAC,CACF,CAAA;AACH,CAAC,EACD,EAAE,IAAI,EAAE,gBAAgB,EAAE,CAC3B,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nexport const mkdirpManualSync = (\n path: string,\n options?: MkdirpOptions,\n made?: string | undefined | void\n): string | undefined | void => {\n const parent = dirname(path)\n const opts = { ...optsArg(options), recursive: false }\n\n if (parent === path) {\n try {\n return opts.mkdirSync(path, opts)\n } catch (er) {\n // swallowed by recursive implementation on posix systems\n // any other error is a failure\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code !== 'EISDIR') {\n throw er\n }\n return\n }\n }\n\n try {\n opts.mkdirSync(path, opts)\n return made || path\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManualSync(path, opts, mkdirpManualSync(parent, opts, made))\n }\n if (fer && fer.code !== 'EEXIST' && fer && fer.code !== 'EROFS') {\n throw er\n }\n try {\n if (!opts.statSync(path).isDirectory()) throw er\n } catch (_) {\n throw er\n }\n }\n}\n\nexport const mkdirpManual = Object.assign(\n async (\n path: string,\n options?: MkdirpOptions,\n made?: string | undefined | void\n ): Promise => {\n const opts = optsArg(options)\n opts.recursive = false\n const parent = dirname(path)\n if (parent === path) {\n return opts.mkdirAsync(path, opts).catch(er => {\n // swallowed by recursive implementation on posix systems\n // any other error is a failure\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code !== 'EISDIR') {\n throw er\n }\n })\n }\n\n return opts.mkdirAsync(path, opts).then(\n () => made || path,\n async er => {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManual(parent, opts).then(\n (made?: string | undefined | void) => mkdirpManual(path, opts, made)\n )\n }\n if (fer && fer.code !== 'EEXIST' && fer.code !== 'EROFS') {\n throw er\n }\n return opts.statAsync(path).then(\n st => {\n if (st.isDirectory()) {\n return made\n } else {\n throw er\n }\n },\n () => {\n throw er\n }\n )\n }\n )\n },\n { sync: mkdirpManualSync }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts new file mode 100644 index 00000000000000..28b64814b2545a --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts @@ -0,0 +1,6 @@ +import { MkdirpOptions } from './opts-arg.js'; +export declare const mkdirpNativeSync: (path: string, options?: MkdirpOptions) => string | void | undefined; +export declare const mkdirpNative: ((path: string, options?: MkdirpOptions) => Promise) & { + sync: (path: string, options?: MkdirpOptions) => string | void | undefined; +}; +//# sourceMappingURL=mkdirp-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts.map new file mode 100644 index 00000000000000..517dfabe7d1213 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-native.d.ts","sourceRoot":"","sources":["../../src/mkdirp-native.ts"],"names":[],"mappings":"AAGA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAEtD,eAAO,MAAM,gBAAgB,SACrB,MAAM,YACF,aAAa,KACtB,MAAM,GAAG,IAAI,GAAG,SAoBlB,CAAA;AAED,eAAO,MAAM,YAAY,UAEf,MAAM,YACF,aAAa,KACtB,QAAQ,MAAM,GAAG,IAAI,GAAG,SAAS,CAAC;iBA5B/B,MAAM,YACF,aAAa,KACtB,MAAM,GAAG,IAAI,GAAG,SAAS;CAgD3B,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js new file mode 100644 index 00000000000000..99d10a5425dade --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js @@ -0,0 +1,46 @@ +import { dirname } from 'path'; +import { findMade, findMadeSync } from './find-made.js'; +import { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'; +import { optsArg } from './opts-arg.js'; +export const mkdirpNativeSync = (path, options) => { + const opts = optsArg(options); + opts.recursive = true; + const parent = dirname(path); + if (parent === path) { + return opts.mkdirSync(path, opts); + } + const made = findMadeSync(opts, path); + try { + opts.mkdirSync(path, opts); + return made; + } + catch (er) { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return mkdirpManualSync(path, opts); + } + else { + throw er; + } + } +}; +export const mkdirpNative = Object.assign(async (path, options) => { + const opts = { ...optsArg(options), recursive: true }; + const parent = dirname(path); + if (parent === path) { + return await opts.mkdirAsync(path, opts); + } + return findMade(opts, path).then((made) => opts + .mkdirAsync(path, opts) + .then(m => made || m) + .catch(er => { + const fer = er; + if (fer && fer.code === 'ENOENT') { + return mkdirpManual(path, opts); + } + else { + throw er; + } + })); +}, { sync: mkdirpNativeSync }); +//# sourceMappingURL=mkdirp-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js.map new file mode 100644 index 00000000000000..27de32d9436d67 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/mkdirp-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"mkdirp-native.js","sourceRoot":"","sources":["../../src/mkdirp-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAC9B,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAA;AACvD,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAiB,OAAO,EAAE,MAAM,eAAe,CAAA;AAEtD,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,OAAuB,EACI,EAAE;IAC7B,MAAM,IAAI,GAAG,OAAO,CAAC,OAAO,CAAC,CAAA;IAC7B,IAAI,CAAC,SAAS,GAAG,IAAI,CAAA;IACrB,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KAClC;IAED,MAAM,IAAI,GAAG,YAAY,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;IACrC,IAAI;QACF,IAAI,CAAC,SAAS,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;QAC1B,OAAO,IAAI,CAAA;KACZ;IAAC,OAAO,EAAE,EAAE;QACX,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,gBAAgB,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SACpC;aAAM;YACL,MAAM,EAAE,CAAA;SACT;KACF;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,YAAY,GAAG,MAAM,CAAC,MAAM,CACvC,KAAK,EACH,IAAY,EACZ,OAAuB,EACa,EAAE;IACtC,MAAM,IAAI,GAAG,EAAE,GAAG,OAAO,CAAC,OAAO,CAAC,EAAE,SAAS,EAAE,IAAI,EAAE,CAAA;IACrD,MAAM,MAAM,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IAC5B,IAAI,MAAM,KAAK,IAAI,EAAE;QACnB,OAAO,MAAM,IAAI,CAAC,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;KACzC;IAED,OAAO,QAAQ,CAAC,IAAI,EAAE,IAAI,CAAC,CAAC,IAAI,CAAC,CAAC,IAAyB,EAAE,EAAE,CAC7D,IAAI;SACD,UAAU,CAAC,IAAI,EAAE,IAAI,CAAC;SACtB,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC,IAAI,IAAI,CAAC,CAAC;SACpB,KAAK,CAAC,EAAE,CAAC,EAAE;QACV,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE;YAChC,OAAO,YAAY,CAAC,IAAI,EAAE,IAAI,CAAC,CAAA;SAChC;aAAM;YACL,MAAM,EAAE,CAAA;SACT;IACH,CAAC,CAAC,CACL,CAAA;AACH,CAAC,EACD,EAAE,IAAI,EAAE,gBAAgB,EAAE,CAC3B,CAAA","sourcesContent":["import { dirname } from 'path'\nimport { findMade, findMadeSync } from './find-made.js'\nimport { mkdirpManual, mkdirpManualSync } from './mkdirp-manual.js'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nexport const mkdirpNativeSync = (\n path: string,\n options?: MkdirpOptions\n): string | void | undefined => {\n const opts = optsArg(options)\n opts.recursive = true\n const parent = dirname(path)\n if (parent === path) {\n return opts.mkdirSync(path, opts)\n }\n\n const made = findMadeSync(opts, path)\n try {\n opts.mkdirSync(path, opts)\n return made\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManualSync(path, opts)\n } else {\n throw er\n }\n }\n}\n\nexport const mkdirpNative = Object.assign(\n async (\n path: string,\n options?: MkdirpOptions\n ): Promise => {\n const opts = { ...optsArg(options), recursive: true }\n const parent = dirname(path)\n if (parent === path) {\n return await opts.mkdirAsync(path, opts)\n }\n\n return findMade(opts, path).then((made?: string | undefined) =>\n opts\n .mkdirAsync(path, opts)\n .then(m => made || m)\n .catch(er => {\n const fer = er as NodeJS.ErrnoException\n if (fer && fer.code === 'ENOENT') {\n return mkdirpManual(path, opts)\n } else {\n throw er\n }\n })\n )\n },\n { sync: mkdirpNativeSync }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts new file mode 100644 index 00000000000000..73d076b3b6923c --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts @@ -0,0 +1,42 @@ +/// +/// +import { MakeDirectoryOptions, Stats } from 'fs'; +export interface FsProvider { + stat?: (path: string, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any) => any; + mkdir?: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }, callback: (err: NodeJS.ErrnoException | null, made?: string) => any) => any; + statSync?: (path: string) => Stats; + mkdirSync?: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => string | undefined; +} +interface Options extends FsProvider { + mode?: number | string; + fs?: FsProvider; + mkdirAsync?: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => Promise; + statAsync?: (path: string) => Promise; +} +export type MkdirpOptions = Options | number | string; +export interface MkdirpOptionsResolved { + mode: number; + fs: FsProvider; + mkdirAsync: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => Promise; + statAsync: (path: string) => Promise; + stat: (path: string, callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any) => any; + mkdir: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }, callback: (err: NodeJS.ErrnoException | null, made?: string) => any) => any; + statSync: (path: string) => Stats; + mkdirSync: (path: string, opts: MakeDirectoryOptions & { + recursive?: boolean; + }) => string | undefined; + recursive?: boolean; +} +export declare const optsArg: (opts?: MkdirpOptions) => MkdirpOptionsResolved; +export {}; +//# sourceMappingURL=opts-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts.map new file mode 100644 index 00000000000000..717deb5f9cb0c6 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"opts-arg.d.ts","sourceRoot":"","sources":["../../src/opts-arg.ts"],"names":[],"mappings":";;AAAA,OAAO,EACL,oBAAoB,EAIpB,KAAK,EAEN,MAAM,IAAI,CAAA;AAEX,MAAM,WAAW,UAAU;IACzB,IAAI,CAAC,EAAE,CACL,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,KAAK,EAAE,KAAK,KAAK,GAAG,KAC/D,GAAG,CAAA;IACR,KAAK,CAAC,EAAE,CACN,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,EACpD,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,GAAG,KAChE,GAAG,CAAA;IACR,QAAQ,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,KAAK,CAAA;IAClC,SAAS,CAAC,EAAE,CACV,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,MAAM,GAAG,SAAS,CAAA;CACxB;AAED,UAAU,OAAQ,SAAQ,UAAU;IAClC,IAAI,CAAC,EAAE,MAAM,GAAG,MAAM,CAAA;IACtB,EAAE,CAAC,EAAE,UAAU,CAAA;IACf,UAAU,CAAC,EAAE,CACX,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAAA;IAChC,SAAS,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAA;CAC7C;AAED,MAAM,MAAM,aAAa,GAAG,OAAO,GAAG,MAAM,GAAG,MAAM,CAAA;AAErD,MAAM,WAAW,qBAAqB;IACpC,IAAI,EAAE,MAAM,CAAA;IACZ,EAAE,EAAE,UAAU,CAAA;IACd,UAAU,EAAE,CACV,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC,CAAA;IAChC,SAAS,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,KAAK,CAAC,CAAA;IAC3C,IAAI,EAAE,CACJ,IAAI,EAAE,MAAM,EACZ,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,KAAK,EAAE,KAAK,KAAK,GAAG,KAC/D,GAAG,CAAA;IACR,KAAK,EAAE,CACL,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,EACpD,QAAQ,EAAE,CAAC,GAAG,EAAE,MAAM,CAAC,cAAc,GAAG,IAAI,EAAE,IAAI,CAAC,EAAE,MAAM,KAAK,GAAG,KAChE,GAAG,CAAA;IACR,QAAQ,EAAE,CAAC,IAAI,EAAE,MAAM,KAAK,KAAK,CAAA;IACjC,SAAS,EAAE,CACT,IAAI,EAAE,MAAM,EACZ,IAAI,EAAE,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,CAAA;KAAE,KACjD,MAAM,GAAG,SAAS,CAAA;IACvB,SAAS,CAAC,EAAE,OAAO,CAAA;CACpB;AAED,eAAO,MAAM,OAAO,UAAW,aAAa,KAAG,qBA2C9C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js new file mode 100644 index 00000000000000..d47e2927fee4c0 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js @@ -0,0 +1,34 @@ +import { mkdir, mkdirSync, stat, statSync, } from 'fs'; +export const optsArg = (opts) => { + if (!opts) { + opts = { mode: 0o777 }; + } + else if (typeof opts === 'object') { + opts = { mode: 0o777, ...opts }; + } + else if (typeof opts === 'number') { + opts = { mode: opts }; + } + else if (typeof opts === 'string') { + opts = { mode: parseInt(opts, 8) }; + } + else { + throw new TypeError('invalid options argument'); + } + const resolved = opts; + const optsFs = opts.fs || {}; + opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir; + opts.mkdirAsync = opts.mkdirAsync + ? opts.mkdirAsync + : async (path, options) => { + return new Promise((res, rej) => resolved.mkdir(path, options, (er, made) => er ? rej(er) : res(made))); + }; + opts.stat = opts.stat || optsFs.stat || stat; + opts.statAsync = opts.statAsync + ? opts.statAsync + : async (path) => new Promise((res, rej) => resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats)))); + opts.statSync = opts.statSync || optsFs.statSync || statSync; + opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync; + return resolved; +}; +//# sourceMappingURL=opts-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js.map new file mode 100644 index 00000000000000..663286dc7212ed --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/opts-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"opts-arg.js","sourceRoot":"","sources":["../../src/opts-arg.ts"],"names":[],"mappings":"AAAA,OAAO,EAEL,KAAK,EACL,SAAS,EACT,IAAI,EAEJ,QAAQ,GACT,MAAM,IAAI,CAAA;AAwDX,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,IAAoB,EAAyB,EAAE;IACrE,IAAI,CAAC,IAAI,EAAE;QACT,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,CAAA;KACvB;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,KAAK,EAAE,GAAG,IAAI,EAAE,CAAA;KAChC;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,IAAI,EAAE,CAAA;KACtB;SAAM,IAAI,OAAO,IAAI,KAAK,QAAQ,EAAE;QACnC,IAAI,GAAG,EAAE,IAAI,EAAE,QAAQ,CAAC,IAAI,EAAE,CAAC,CAAC,EAAE,CAAA;KACnC;SAAM;QACL,MAAM,IAAI,SAAS,CAAC,0BAA0B,CAAC,CAAA;KAChD;IAED,MAAM,QAAQ,GAAG,IAA6B,CAAA;IAC9C,MAAM,MAAM,GAAG,IAAI,CAAC,EAAE,IAAI,EAAE,CAAA;IAE5B,IAAI,CAAC,KAAK,GAAG,IAAI,CAAC,KAAK,IAAI,MAAM,CAAC,KAAK,IAAI,KAAK,CAAA;IAEhD,IAAI,CAAC,UAAU,GAAG,IAAI,CAAC,UAAU;QAC/B,CAAC,CAAC,IAAI,CAAC,UAAU;QACjB,CAAC,CAAC,KAAK,EACH,IAAY,EACZ,OAAuD,EAC1B,EAAE;YAC/B,OAAO,IAAI,OAAO,CAAqB,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CAClD,QAAQ,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CACzC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CACzB,CACF,CAAA;QACH,CAAC,CAAA;IAEL,IAAI,CAAC,IAAI,GAAG,IAAI,CAAC,IAAI,IAAI,MAAM,CAAC,IAAI,IAAI,IAAI,CAAA;IAC5C,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS;QAC7B,CAAC,CAAC,IAAI,CAAC,SAAS;QAChB,CAAC,CAAC,KAAK,EAAE,IAAY,EAAE,EAAE,CACrB,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,QAAQ,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,GAAG,EAAE,KAAK,EAAE,EAAE,CAAC,CAAC,GAAG,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,KAAK,CAAC,CAAC,CAAC,CACnE,CAAA;IAEP,IAAI,CAAC,QAAQ,GAAG,IAAI,CAAC,QAAQ,IAAI,MAAM,CAAC,QAAQ,IAAI,QAAQ,CAAA;IAC5D,IAAI,CAAC,SAAS,GAAG,IAAI,CAAC,SAAS,IAAI,MAAM,CAAC,SAAS,IAAI,SAAS,CAAA;IAEhE,OAAO,QAAQ,CAAA;AACjB,CAAC,CAAA","sourcesContent":["import {\n MakeDirectoryOptions,\n mkdir,\n mkdirSync,\n stat,\n Stats,\n statSync,\n} from 'fs'\n\nexport interface FsProvider {\n stat?: (\n path: string,\n callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any\n ) => any\n mkdir?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean },\n callback: (err: NodeJS.ErrnoException | null, made?: string) => any\n ) => any\n statSync?: (path: string) => Stats\n mkdirSync?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => string | undefined\n}\n\ninterface Options extends FsProvider {\n mode?: number | string\n fs?: FsProvider\n mkdirAsync?: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => Promise\n statAsync?: (path: string) => Promise\n}\n\nexport type MkdirpOptions = Options | number | string\n\nexport interface MkdirpOptionsResolved {\n mode: number\n fs: FsProvider\n mkdirAsync: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => Promise\n statAsync: (path: string) => Promise\n stat: (\n path: string,\n callback: (err: NodeJS.ErrnoException | null, stats: Stats) => any\n ) => any\n mkdir: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean },\n callback: (err: NodeJS.ErrnoException | null, made?: string) => any\n ) => any\n statSync: (path: string) => Stats\n mkdirSync: (\n path: string,\n opts: MakeDirectoryOptions & { recursive?: boolean }\n ) => string | undefined\n recursive?: boolean\n}\n\nexport const optsArg = (opts?: MkdirpOptions): MkdirpOptionsResolved => {\n if (!opts) {\n opts = { mode: 0o777 }\n } else if (typeof opts === 'object') {\n opts = { mode: 0o777, ...opts }\n } else if (typeof opts === 'number') {\n opts = { mode: opts }\n } else if (typeof opts === 'string') {\n opts = { mode: parseInt(opts, 8) }\n } else {\n throw new TypeError('invalid options argument')\n }\n\n const resolved = opts as MkdirpOptionsResolved\n const optsFs = opts.fs || {}\n\n opts.mkdir = opts.mkdir || optsFs.mkdir || mkdir\n\n opts.mkdirAsync = opts.mkdirAsync\n ? opts.mkdirAsync\n : async (\n path: string,\n options: MakeDirectoryOptions & { recursive?: boolean }\n ): Promise => {\n return new Promise((res, rej) =>\n resolved.mkdir(path, options, (er, made) =>\n er ? rej(er) : res(made)\n )\n )\n }\n\n opts.stat = opts.stat || optsFs.stat || stat\n opts.statAsync = opts.statAsync\n ? opts.statAsync\n : async (path: string) =>\n new Promise((res, rej) =>\n resolved.stat(path, (err, stats) => (err ? rej(err) : res(stats)))\n )\n\n opts.statSync = opts.statSync || optsFs.statSync || statSync\n opts.mkdirSync = opts.mkdirSync || optsFs.mkdirSync || mkdirSync\n\n return resolved\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/package.json b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts new file mode 100644 index 00000000000000..ad0ccfc482a485 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts @@ -0,0 +1,2 @@ +export declare const pathArg: (path: string) => string; +//# sourceMappingURL=path-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts.map new file mode 100644 index 00000000000000..801799e766fabc --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.d.ts","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAEA,eAAO,MAAM,OAAO,SAAU,MAAM,WAyBnC,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js new file mode 100644 index 00000000000000..03539cc5a94f98 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js @@ -0,0 +1,24 @@ +const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform; +import { parse, resolve } from 'path'; +export const pathArg = (path) => { + if (/\0/.test(path)) { + // simulate same failure that node raises + throw Object.assign(new TypeError('path must be a string without null bytes'), { + path, + code: 'ERR_INVALID_ARG_VALUE', + }); + } + path = resolve(path); + if (platform === 'win32') { + const badWinChars = /[*|"<>?:]/; + const { root } = parse(path); + if (badWinChars.test(path.substring(root.length))) { + throw Object.assign(new Error('Illegal characters in path.'), { + path, + code: 'EINVAL', + }); + } + } + return path; +}; +//# sourceMappingURL=path-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js.map new file mode 100644 index 00000000000000..43efe1e3a9976f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/path-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.js","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAAA,MAAM,QAAQ,GAAG,OAAO,CAAC,GAAG,CAAC,2BAA2B,IAAI,OAAO,CAAC,QAAQ,CAAA;AAC5E,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AACrC,MAAM,CAAC,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,EAAE;IACtC,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE;QACnB,yCAAyC;QACzC,MAAM,MAAM,CAAC,MAAM,CACjB,IAAI,SAAS,CAAC,0CAA0C,CAAC,EACzD;YACE,IAAI;YACJ,IAAI,EAAE,uBAAuB;SAC9B,CACF,CAAA;KACF;IAED,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,IAAI,QAAQ,KAAK,OAAO,EAAE;QACxB,MAAM,WAAW,GAAG,WAAW,CAAA;QAC/B,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE;YACjD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,IAAI;gBACJ,IAAI,EAAE,QAAQ;aACf,CAAC,CAAA;SACH;KACF;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["const platform = process.env.__TESTING_MKDIRP_PLATFORM__ || process.platform\nimport { parse, resolve } from 'path'\nexport const pathArg = (path: string) => {\n if (/\\0/.test(path)) {\n // simulate same failure that node raises\n throw Object.assign(\n new TypeError('path must be a string without null bytes'),\n {\n path,\n code: 'ERR_INVALID_ARG_VALUE',\n }\n )\n }\n\n path = resolve(path)\n if (platform === 'win32') {\n const badWinChars = /[*|\"<>?:]/\n const { root } = parse(path)\n if (badWinChars.test(path.substring(root.length))) {\n throw Object.assign(new Error('Illegal characters in path.'), {\n path,\n code: 'EINVAL',\n })\n }\n }\n\n return path\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts new file mode 100644 index 00000000000000..1c6cb619e30405 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts @@ -0,0 +1,6 @@ +import { MkdirpOptions } from './opts-arg.js'; +export declare const useNativeSync: (opts?: MkdirpOptions) => boolean; +export declare const useNative: ((opts?: MkdirpOptions) => boolean) & { + sync: (opts?: MkdirpOptions) => boolean; +}; +//# sourceMappingURL=use-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts.map new file mode 100644 index 00000000000000..e2484228a04472 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.d.ts","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,aAAa,EAAW,MAAM,eAAe,CAAA;AAMtD,eAAO,MAAM,aAAa,UAEd,aAAa,YAA0C,CAAA;AAEnE,eAAO,MAAM,SAAS,WAGR,aAAa;kBALf,aAAa;CASxB,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js new file mode 100644 index 00000000000000..ad2093867eb74e --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js @@ -0,0 +1,14 @@ +import { mkdir, mkdirSync } from 'fs'; +import { optsArg } from './opts-arg.js'; +const version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version; +const versArr = version.replace(/^v/, '').split('.'); +const hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12); +export const useNativeSync = !hasNative + ? () => false + : (opts) => optsArg(opts).mkdirSync === mkdirSync; +export const useNative = Object.assign(!hasNative + ? () => false + : (opts) => optsArg(opts).mkdir === mkdir, { + sync: useNativeSync, +}); +//# sourceMappingURL=use-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js.map b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js.map new file mode 100644 index 00000000000000..08c616d365510f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/dist/mjs/use-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.js","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,SAAS,EAAE,MAAM,IAAI,CAAA;AACrC,OAAO,EAAiB,OAAO,EAAE,MAAM,eAAe,CAAA;AAEtD,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,OAAO,CAAC,OAAO,CAAA;AAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;AACpD,MAAM,SAAS,GAAG,CAAC,OAAO,CAAC,CAAC,CAAC,GAAG,EAAE,IAAI,CAAC,CAAC,OAAO,CAAC,CAAC,CAAC,KAAK,EAAE,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,IAAI,EAAE,CAAC,CAAA;AAE/E,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,SAAS;IACrC,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,CAAC,IAAoB,EAAE,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,SAAS,KAAK,SAAS,CAAA;AAEnE,MAAM,CAAC,MAAM,SAAS,GAAG,MAAM,CAAC,MAAM,CACpC,CAAC,SAAS;IACR,CAAC,CAAC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,CAAC,IAAoB,EAAE,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK,KAAK,KAAK,EAC3D;IACE,IAAI,EAAE,aAAa;CACpB,CACF,CAAA","sourcesContent":["import { mkdir, mkdirSync } from 'fs'\nimport { MkdirpOptions, optsArg } from './opts-arg.js'\n\nconst version = process.env.__TESTING_MKDIRP_NODE_VERSION__ || process.version\nconst versArr = version.replace(/^v/, '').split('.')\nconst hasNative = +versArr[0] > 10 || (+versArr[0] === 10 && +versArr[1] >= 12)\n\nexport const useNativeSync = !hasNative\n ? () => false\n : (opts?: MkdirpOptions) => optsArg(opts).mkdirSync === mkdirSync\n\nexport const useNative = Object.assign(\n !hasNative\n ? () => false\n : (opts?: MkdirpOptions) => optsArg(opts).mkdir === mkdir,\n {\n sync: useNativeSync,\n }\n)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/package.json b/deps/npm/node_modules/cacache/node_modules/mkdirp/package.json new file mode 100644 index 00000000000000..f31ac3314d6f6a --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/package.json @@ -0,0 +1,91 @@ +{ + "name": "mkdirp", + "description": "Recursively mkdir, like `mkdir -p`", + "version": "3.0.1", + "keywords": [ + "mkdir", + "directory", + "make dir", + "make", + "dir", + "recursive", + "native" + ], + "bin": "./dist/cjs/src/bin.js", + "main": "./dist/cjs/src/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/mjs/index.d.ts", + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/src/index.d.ts", + "default": "./dist/cjs/src/index.js" + } + } + }, + "files": [ + "dist" + ], + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "preprepare": "rm -rf dist", + "prepare": "tsc -p tsconfig.json && tsc -p tsconfig-esm.json", + "postprepare": "bash fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn", + "benchmark": "node benchmark/index.js", + "typedoc": "typedoc --tsconfig tsconfig-esm.json ./src/*.ts" + }, + "prettier": { + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/brace-expansion": "^1.1.0", + "@types/node": "^18.11.9", + "@types/tap": "^15.0.7", + "c8": "^7.12.0", + "eslint-config-prettier": "^8.6.0", + "prettier": "^2.8.2", + "tap": "^16.3.3", + "ts-node": "^10.9.1", + "typedoc": "^0.23.21", + "typescript": "^4.9.3" + }, + "tap": { + "coverage": false, + "node-arg": [ + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "repository": { + "type": "git", + "url": "https://github.com/isaacs/node-mkdirp.git" + }, + "license": "MIT", + "engines": { + "node": ">=10" + } +} diff --git a/deps/npm/node_modules/cacache/node_modules/mkdirp/readme.markdown b/deps/npm/node_modules/cacache/node_modules/mkdirp/readme.markdown new file mode 100644 index 00000000000000..df654b808755f5 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/mkdirp/readme.markdown @@ -0,0 +1,281 @@ +# mkdirp + +Like `mkdir -p`, but in Node.js! + +Now with a modern API and no\* bugs! + +\* may contain some bugs + +# example + +## pow.js + +```js +// hybrid module, import or require() both work +import { mkdirp } from 'mkdirp' +// or: +const { mkdirp } = require('mkdirp') + +// return value is a Promise resolving to the first directory created +mkdirp('/tmp/foo/bar/baz').then(made => + console.log(`made directories, starting with ${made}`) +) +``` + +Output (where `/tmp/foo` already exists) + +``` +made directories, starting with /tmp/foo/bar +``` + +Or, if you don't have time to wait around for promises: + +```js +import { mkdirp } from 'mkdirp' + +// return value is the first directory created +const made = mkdirp.sync('/tmp/foo/bar/baz') +console.log(`made directories, starting with ${made}`) +``` + +And now /tmp/foo/bar/baz exists, huzzah! + +# methods + +```js +import { mkdirp } from 'mkdirp' +``` + +## `mkdirp(dir: string, opts?: MkdirpOptions) => Promise` + +Create a new directory and any necessary subdirectories at `dir` +with octal permission string `opts.mode`. If `opts` is a string +or number, it will be treated as the `opts.mode`. + +If `opts.mode` isn't specified, it defaults to `0o777`. + +Promise resolves to first directory `made` that had to be +created, or `undefined` if everything already exists. Promise +rejects if any errors are encountered. Note that, in the case of +promise rejection, some directories _may_ have been created, as +recursive directory creation is not an atomic operation. + +You can optionally pass in an alternate `fs` implementation by +passing in `opts.fs`. Your implementation should have +`opts.fs.mkdir(path, opts, cb)` and `opts.fs.stat(path, cb)`. + +You can also override just one or the other of `mkdir` and `stat` +by passing in `opts.stat` or `opts.mkdir`, or providing an `fs` +option that only overrides one of these. + +## `mkdirp.sync(dir: string, opts: MkdirpOptions) => string|undefined` + +Synchronously create a new directory and any necessary +subdirectories at `dir` with octal permission string `opts.mode`. +If `opts` is a string or number, it will be treated as the +`opts.mode`. + +If `opts.mode` isn't specified, it defaults to `0o777`. + +Returns the first directory that had to be created, or undefined +if everything already exists. + +You can optionally pass in an alternate `fs` implementation by +passing in `opts.fs`. Your implementation should have +`opts.fs.mkdirSync(path, mode)` and `opts.fs.statSync(path)`. + +You can also override just one or the other of `mkdirSync` and +`statSync` by passing in `opts.statSync` or `opts.mkdirSync`, or +providing an `fs` option that only overrides one of these. + +## `mkdirp.manual`, `mkdirp.manualSync` + +Use the manual implementation (not the native one). This is the +default when the native implementation is not available or the +stat/mkdir implementation is overridden. + +## `mkdirp.native`, `mkdirp.nativeSync` + +Use the native implementation (not the manual one). This is the +default when the native implementation is available and +stat/mkdir are not overridden. + +# implementation + +On Node.js v10.12.0 and above, use the native `fs.mkdir(p, +{recursive:true})` option, unless `fs.mkdir`/`fs.mkdirSync` has +been overridden by an option. + +## native implementation + +- If the path is a root directory, then pass it to the underlying + implementation and return the result/error. (In this case, + it'll either succeed or fail, but we aren't actually creating + any dirs.) +- Walk up the path statting each directory, to find the first + path that will be created, `made`. +- Call `fs.mkdir(path, { recursive: true })` (or `fs.mkdirSync`) +- If error, raise it to the caller. +- Return `made`. + +## manual implementation + +- Call underlying `fs.mkdir` implementation, with `recursive: +false` +- If error: + - If path is a root directory, raise to the caller and do not + handle it + - If ENOENT, mkdirp parent dir, store result as `made` + - stat(path) + - If error, raise original `mkdir` error + - If directory, return `made` + - Else, raise original `mkdir` error +- else + - return `undefined` if a root dir, or `made` if set, or `path` + +## windows vs unix caveat + +On Windows file systems, attempts to create a root directory (ie, +a drive letter or root UNC path) will fail. If the root +directory exists, then it will fail with `EPERM`. If the root +directory does not exist, then it will fail with `ENOENT`. + +On posix file systems, attempts to create a root directory (in +recursive mode) will succeed silently, as it is treated like just +another directory that already exists. (In non-recursive mode, +of course, it fails with `EEXIST`.) + +In order to preserve this system-specific behavior (and because +it's not as if we can create the parent of a root directory +anyway), attempts to create a root directory are passed directly +to the `fs` implementation, and any errors encountered are not +handled. + +## native error caveat + +The native implementation (as of at least Node.js v13.4.0) does +not provide appropriate errors in some cases (see +[nodejs/node#31481](https://github.com/nodejs/node/issues/31481) +and +[nodejs/node#28015](https://github.com/nodejs/node/issues/28015)). + +In order to work around this issue, the native implementation +will fall back to the manual implementation if an `ENOENT` error +is encountered. + +# choosing a recursive mkdir implementation + +There are a few to choose from! Use the one that suits your +needs best :D + +## use `fs.mkdir(path, {recursive: true}, cb)` if: + +- You wish to optimize performance even at the expense of other + factors. +- You don't need to know the first dir created. +- You are ok with getting `ENOENT` as the error when some other + problem is the actual cause. +- You can limit your platforms to Node.js v10.12 and above. +- You're ok with using callbacks instead of promises. +- You don't need/want a CLI. +- You don't need to override the `fs` methods in use. + +## use this module (mkdirp 1.x or 2.x) if: + +- You need to know the first directory that was created. +- You wish to use the native implementation if available, but + fall back when it's not. +- You prefer promise-returning APIs to callback-taking APIs. +- You want more useful error messages than the native recursive + mkdir provides (at least as of Node.js v13.4), and are ok with + re-trying on `ENOENT` to achieve this. +- You need (or at least, are ok with) a CLI. +- You need to override the `fs` methods in use. + +## use [`make-dir`](http://npm.im/make-dir) if: + +- You do not need to know the first dir created (and wish to save + a few `stat` calls when using the native implementation for + this reason). +- You wish to use the native implementation if available, but + fall back when it's not. +- You prefer promise-returning APIs to callback-taking APIs. +- You are ok with occasionally getting `ENOENT` errors for + failures that are actually related to something other than a + missing file system entry. +- You don't need/want a CLI. +- You need to override the `fs` methods in use. + +## use mkdirp 0.x if: + +- You need to know the first directory that was created. +- You need (or at least, are ok with) a CLI. +- You need to override the `fs` methods in use. +- You're ok with using callbacks instead of promises. +- You are not running on Windows, where the root-level ENOENT + errors can lead to infinite regress. +- You think vinyl just sounds warmer and richer for some weird + reason. +- You are supporting truly ancient Node.js versions, before even + the advent of a `Promise` language primitive. (Please don't. + You deserve better.) + +# cli + +This package also ships with a `mkdirp` command. + +``` +$ mkdirp -h + +usage: mkdirp [DIR1,DIR2..] {OPTIONS} + + Create each supplied directory including any necessary parent directories + that don't yet exist. + + If the directory already exists, do nothing. + +OPTIONS are: + + -m If a directory needs to be created, set the mode as an octal + --mode= permission string. + + -v --version Print the mkdirp version number + + -h --help Print this helpful banner + + -p --print Print the first directories created for each path provided + + --manual Use manual implementation, even if native is available +``` + +# install + +With [npm](http://npmjs.org) do: + +``` +npm install mkdirp +``` + +to get the library locally, or + +``` +npm install -g mkdirp +``` + +to get the command everywhere, or + +``` +npx mkdirp ... +``` + +to run the command without installing it globally. + +# platform support + +This module works on node v8, but only v10 and above are officially +supported, as Node v8 reached its LTS end of life 2020-01-01, which is in +the past, as of this writing. + +# license + +MIT diff --git a/deps/npm/node_modules/cacache/node_modules/p-map/index.js b/deps/npm/node_modules/cacache/node_modules/p-map/index.js new file mode 100644 index 00000000000000..2f7d91ccca4eda --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/p-map/index.js @@ -0,0 +1,269 @@ +export default async function pMap( + iterable, + mapper, + { + concurrency = Number.POSITIVE_INFINITY, + stopOnError = true, + signal, + } = {}, +) { + return new Promise((resolve, reject_) => { + if (iterable[Symbol.iterator] === undefined && iterable[Symbol.asyncIterator] === undefined) { + throw new TypeError(`Expected \`input\` to be either an \`Iterable\` or \`AsyncIterable\`, got (${typeof iterable})`); + } + + if (typeof mapper !== 'function') { + throw new TypeError('Mapper function is required'); + } + + if (!((Number.isSafeInteger(concurrency) && concurrency >= 1) || concurrency === Number.POSITIVE_INFINITY)) { + throw new TypeError(`Expected \`concurrency\` to be an integer from 1 and up or \`Infinity\`, got \`${concurrency}\` (${typeof concurrency})`); + } + + const result = []; + const errors = []; + const skippedIndexesMap = new Map(); + let isRejected = false; + let isResolved = false; + let isIterableDone = false; + let resolvingCount = 0; + let currentIndex = 0; + const iterator = iterable[Symbol.iterator] === undefined ? iterable[Symbol.asyncIterator]() : iterable[Symbol.iterator](); + + const reject = reason => { + isRejected = true; + isResolved = true; + reject_(reason); + }; + + if (signal) { + if (signal.aborted) { + reject(signal.reason); + } + + signal.addEventListener('abort', () => { + reject(signal.reason); + }); + } + + const next = async () => { + if (isResolved) { + return; + } + + const nextItem = await iterator.next(); + + const index = currentIndex; + currentIndex++; + + // Note: `iterator.next()` can be called many times in parallel. + // This can cause multiple calls to this `next()` function to + // receive a `nextItem` with `done === true`. + // The shutdown logic that rejects/resolves must be protected + // so it runs only one time as the `skippedIndex` logic is + // non-idempotent. + if (nextItem.done) { + isIterableDone = true; + + if (resolvingCount === 0 && !isResolved) { + if (!stopOnError && errors.length > 0) { + reject(new AggregateError(errors)); // eslint-disable-line unicorn/error-message + return; + } + + isResolved = true; + + if (skippedIndexesMap.size === 0) { + resolve(result); + return; + } + + const pureResult = []; + + // Support multiple `pMapSkip`'s. + for (const [index, value] of result.entries()) { + if (skippedIndexesMap.get(index) === pMapSkip) { + continue; + } + + pureResult.push(value); + } + + resolve(pureResult); + } + + return; + } + + resolvingCount++; + + // Intentionally detached + (async () => { + try { + const element = await nextItem.value; + + if (isResolved) { + return; + } + + const value = await mapper(element, index); + + // Use Map to stage the index of the element. + if (value === pMapSkip) { + skippedIndexesMap.set(index, value); + } + + result[index] = value; + + resolvingCount--; + await next(); + } catch (error) { + if (stopOnError) { + reject(error); + } else { + errors.push(error); + resolvingCount--; + + // In that case we can't really continue regardless of `stopOnError` state + // since an iterable is likely to continue throwing after it throws once. + // If we continue calling `next()` indefinitely we will likely end up + // in an infinite loop of failed iteration. + try { + await next(); + } catch (error) { + reject(error); + } + } + } + })(); + }; + + // Create the concurrent runners in a detached (non-awaited) + // promise. We need this so we can await the `next()` calls + // to stop creating runners before hitting the concurrency limit + // if the iterable has already been marked as done. + // NOTE: We *must* do this for async iterators otherwise we'll spin up + // infinite `next()` calls by default and never start the event loop. + (async () => { + for (let index = 0; index < concurrency; index++) { + try { + // eslint-disable-next-line no-await-in-loop + await next(); + } catch (error) { + reject(error); + break; + } + + if (isIterableDone || isRejected) { + break; + } + } + })(); + }); +} + +export function pMapIterable( + iterable, + mapper, + { + concurrency = Number.POSITIVE_INFINITY, + backpressure = concurrency, + } = {}, +) { + if (iterable[Symbol.iterator] === undefined && iterable[Symbol.asyncIterator] === undefined) { + throw new TypeError(`Expected \`input\` to be either an \`Iterable\` or \`AsyncIterable\`, got (${typeof iterable})`); + } + + if (typeof mapper !== 'function') { + throw new TypeError('Mapper function is required'); + } + + if (!((Number.isSafeInteger(concurrency) && concurrency >= 1) || concurrency === Number.POSITIVE_INFINITY)) { + throw new TypeError(`Expected \`concurrency\` to be an integer from 1 and up or \`Infinity\`, got \`${concurrency}\` (${typeof concurrency})`); + } + + if (!((Number.isSafeInteger(backpressure) && backpressure >= concurrency) || backpressure === Number.POSITIVE_INFINITY)) { + throw new TypeError(`Expected \`backpressure\` to be an integer from \`concurrency\` (${concurrency}) and up or \`Infinity\`, got \`${backpressure}\` (${typeof backpressure})`); + } + + return { + async * [Symbol.asyncIterator]() { + const iterator = iterable[Symbol.asyncIterator] === undefined ? iterable[Symbol.iterator]() : iterable[Symbol.asyncIterator](); + + const promises = []; + let runningMappersCount = 0; + let isDone = false; + let index = 0; + + function trySpawn() { + if (isDone || !(runningMappersCount < concurrency && promises.length < backpressure)) { + return; + } + + const promise = (async () => { + const {done, value} = await iterator.next(); + + if (done) { + return {done: true}; + } + + runningMappersCount++; + + // Spawn if still below concurrency and backpressure limit + trySpawn(); + + try { + const returnValue = await mapper(await value, index++); + + runningMappersCount--; + + if (returnValue === pMapSkip) { + const index = promises.indexOf(promise); + + if (index > 0) { + promises.splice(index, 1); + } + } + + // Spawn if still below backpressure limit and just dropped below concurrency limit + trySpawn(); + + return {done: false, value: returnValue}; + } catch (error) { + isDone = true; + return {error}; + } + })(); + + promises.push(promise); + } + + trySpawn(); + + while (promises.length > 0) { + const {error, done, value} = await promises[0]; // eslint-disable-line no-await-in-loop + + promises.shift(); + + if (error) { + throw error; + } + + if (done) { + return; + } + + // Spawn if just dropped below backpressure limit and below the concurrency limit + trySpawn(); + + if (value === pMapSkip) { + continue; + } + + yield value; + } + }, + }; +} + +export const pMapSkip = Symbol('skip'); diff --git a/deps/npm/node_modules/cacache/node_modules/p-map/license b/deps/npm/node_modules/cacache/node_modules/p-map/license new file mode 100644 index 00000000000000..fa7ceba3eb4a96 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/p-map/license @@ -0,0 +1,9 @@ +MIT License + +Copyright (c) Sindre Sorhus (https://sindresorhus.com) + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/cacache/node_modules/p-map/package.json b/deps/npm/node_modules/cacache/node_modules/p-map/package.json new file mode 100644 index 00000000000000..ea58f599f3a035 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/p-map/package.json @@ -0,0 +1,57 @@ +{ + "name": "p-map", + "version": "7.0.2", + "description": "Map over promises concurrently", + "license": "MIT", + "repository": "sindresorhus/p-map", + "funding": "https://github.com/sponsors/sindresorhus", + "author": { + "name": "Sindre Sorhus", + "email": "sindresorhus@gmail.com", + "url": "https://sindresorhus.com" + }, + "type": "module", + "exports": { + "types": "./index.d.ts", + "default": "./index.js" + }, + "sideEffects": false, + "engines": { + "node": ">=18" + }, + "scripts": { + "test": "xo && ava && tsd" + }, + "files": [ + "index.js", + "index.d.ts" + ], + "keywords": [ + "promise", + "map", + "resolved", + "wait", + "collection", + "iterable", + "iterator", + "race", + "fulfilled", + "async", + "await", + "promises", + "concurrently", + "concurrency", + "parallel", + "bluebird" + ], + "devDependencies": { + "ava": "^5.2.0", + "chalk": "^5.3.0", + "delay": "^6.0.0", + "in-range": "^3.0.0", + "random-int": "^3.0.0", + "time-span": "^5.1.0", + "tsd": "^0.29.0", + "xo": "^0.56.0" + } +} diff --git a/deps/npm/node_modules/cacache/node_modules/tar/LICENSE b/deps/npm/node_modules/cacache/node_modules/tar/LICENSE new file mode 100644 index 00000000000000..19129e315fe593 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/create.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/create.js new file mode 100644 index 00000000000000..3190afc48318f9 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/create.js @@ -0,0 +1,83 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.create = void 0; +const fs_minipass_1 = require("@isaacs/fs-minipass"); +const node_path_1 = __importDefault(require("node:path")); +const list_js_1 = require("./list.js"); +const make_command_js_1 = require("./make-command.js"); +const pack_js_1 = require("./pack.js"); +const createFileSync = (opt, files) => { + const p = new pack_js_1.PackSync(opt); + const stream = new fs_minipass_1.WriteStreamSync(opt.file, { + mode: opt.mode || 0o666, + }); + p.pipe(stream); + addFilesSync(p, files); +}; +const createFile = (opt, files) => { + const p = new pack_js_1.Pack(opt); + const stream = new fs_minipass_1.WriteStream(opt.file, { + mode: opt.mode || 0o666, + }); + p.pipe(stream); + const promise = new Promise((res, rej) => { + stream.on('error', rej); + stream.on('close', res); + p.on('error', rej); + }); + addFilesAsync(p, files); + return promise; +}; +const addFilesSync = (p, files) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + (0, list_js_1.list)({ + file: node_path_1.default.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onReadEntry: entry => p.add(entry), + }); + } + else { + p.add(file); + } + }); + p.end(); +}; +const addFilesAsync = async (p, files) => { + for (let i = 0; i < files.length; i++) { + const file = String(files[i]); + if (file.charAt(0) === '@') { + await (0, list_js_1.list)({ + file: node_path_1.default.resolve(String(p.cwd), file.slice(1)), + noResume: true, + onReadEntry: entry => { + p.add(entry); + }, + }); + } + else { + p.add(file); + } + } + p.end(); +}; +const createSync = (opt, files) => { + const p = new pack_js_1.PackSync(opt); + addFilesSync(p, files); + return p; +}; +const createAsync = (opt, files) => { + const p = new pack_js_1.Pack(opt); + addFilesAsync(p, files); + return p; +}; +exports.create = (0, make_command_js_1.makeCommand)(createFileSync, createFile, createSync, createAsync, (_opt, files) => { + if (!files?.length) { + throw new TypeError('no paths specified to add to archive'); + } +}); +//# sourceMappingURL=create.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/cwd-error.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/cwd-error.js new file mode 100644 index 00000000000000..d703a7772be3a5 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/cwd-error.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.CwdError = void 0; +class CwdError extends Error { + path; + code; + syscall = 'chdir'; + constructor(path, code) { + super(`${code}: Cannot cd into '${path}'`); + this.path = path; + this.code = code; + } + get name() { + return 'CwdError'; + } +} +exports.CwdError = CwdError; +//# sourceMappingURL=cwd-error.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/extract.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/extract.js new file mode 100644 index 00000000000000..f848cbcbf779e8 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/extract.js @@ -0,0 +1,78 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.extract = void 0; +// tar -x +const fsm = __importStar(require("@isaacs/fs-minipass")); +const node_fs_1 = __importDefault(require("node:fs")); +const list_js_1 = require("./list.js"); +const make_command_js_1 = require("./make-command.js"); +const unpack_js_1 = require("./unpack.js"); +const extractFileSync = (opt) => { + const u = new unpack_js_1.UnpackSync(opt); + const file = opt.file; + const stat = node_fs_1.default.statSync(file); + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + const stream = new fsm.ReadStreamSync(file, { + readSize: readSize, + size: stat.size, + }); + stream.pipe(u); +}; +const extractFile = (opt, _) => { + const u = new unpack_js_1.Unpack(opt); + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + const file = opt.file; + const p = new Promise((resolve, reject) => { + u.on('error', reject); + u.on('close', resolve); + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + node_fs_1.default.stat(file, (er, stat) => { + if (er) { + reject(er); + } + else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }); + stream.on('error', reject); + stream.pipe(u); + } + }); + }); + return p; +}; +exports.extract = (0, make_command_js_1.makeCommand)(extractFileSync, extractFile, opt => new unpack_js_1.UnpackSync(opt), opt => new unpack_js_1.Unpack(opt), (opt, files) => { + if (files?.length) + (0, list_js_1.filesFilter)(opt, files); +}); +//# sourceMappingURL=extract.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/get-write-flag.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/get-write-flag.js new file mode 100644 index 00000000000000..94add8f6b2231c --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/get-write-flag.js @@ -0,0 +1,29 @@ +"use strict"; +// Get the appropriate flag to use for creating files +// We use fmap on Windows platforms for files less than +// 512kb. This is a fairly low limit, but avoids making +// things slower in some cases. Since most of what this +// library is used for is extracting tarballs of many +// relatively small files in npm packages and the like, +// it can be a big boost on Windows platforms. +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.getWriteFlag = void 0; +const fs_1 = __importDefault(require("fs")); +const platform = process.env.__FAKE_PLATFORM__ || process.platform; +const isWindows = platform === 'win32'; +/* c8 ignore start */ +const { O_CREAT, O_TRUNC, O_WRONLY } = fs_1.default.constants; +const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) || + fs_1.default.constants.UV_FS_O_FILEMAP || + 0; +/* c8 ignore stop */ +const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP; +const fMapLimit = 512 * 1024; +const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY; +exports.getWriteFlag = !fMapEnabled ? + () => 'w' + : (size) => (size < fMapLimit ? fMapFlag : 'w'); +//# sourceMappingURL=get-write-flag.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/header.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/header.js new file mode 100644 index 00000000000000..b3a48037b849ab --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/header.js @@ -0,0 +1,306 @@ +"use strict"; +// parse a 512-byte header block to a data object, or vice-versa +// encode returns `true` if a pax extended header is needed, because +// the data could not be faithfully encoded in a simple header. +// (Also, check header.needPax to see if it needs a pax header.) +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Header = void 0; +const node_path_1 = require("node:path"); +const large = __importStar(require("./large-numbers.js")); +const types = __importStar(require("./types.js")); +class Header { + cksumValid = false; + needPax = false; + nullBlock = false; + block; + path; + mode; + uid; + gid; + size; + cksum; + #type = 'Unsupported'; + linkpath; + uname; + gname; + devmaj = 0; + devmin = 0; + atime; + ctime; + mtime; + charset; + comment; + constructor(data, off = 0, ex, gex) { + if (Buffer.isBuffer(data)) { + this.decode(data, off || 0, ex, gex); + } + else if (data) { + this.#slurp(data); + } + } + decode(buf, off, ex, gex) { + if (!off) { + off = 0; + } + if (!buf || !(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header'); + } + this.path = decString(buf, off, 100); + this.mode = decNumber(buf, off + 100, 8); + this.uid = decNumber(buf, off + 108, 8); + this.gid = decNumber(buf, off + 116, 8); + this.size = decNumber(buf, off + 124, 12); + this.mtime = decDate(buf, off + 136, 12); + this.cksum = decNumber(buf, off + 148, 12); + // if we have extended or global extended headers, apply them now + // See https://github.com/npm/node-tar/pull/187 + // Apply global before local, so it overrides + if (gex) + this.#slurp(gex, true); + if (ex) + this.#slurp(ex); + // old tar versions marked dirs as a file with a trailing / + const t = decString(buf, off + 156, 1); + if (types.isCode(t)) { + this.#type = t || '0'; + } + if (this.#type === '0' && this.path.slice(-1) === '/') { + this.#type = '5'; + } + // tar implementations sometimes incorrectly put the stat(dir).size + // as the size in the tarball, even though Directory entries are + // not able to have any body at all. In the very rare chance that + // it actually DOES have a body, we weren't going to do anything with + // it anyway, and it'll just be a warning about an invalid header. + if (this.#type === '5') { + this.size = 0; + } + this.linkpath = decString(buf, off + 157, 100); + if (buf.subarray(off + 257, off + 265).toString() === + 'ustar\u000000') { + this.uname = decString(buf, off + 265, 32); + this.gname = decString(buf, off + 297, 32); + /* c8 ignore start */ + this.devmaj = decNumber(buf, off + 329, 8) ?? 0; + this.devmin = decNumber(buf, off + 337, 8) ?? 0; + /* c8 ignore stop */ + if (buf[off + 475] !== 0) { + // definitely a prefix, definitely >130 chars. + const prefix = decString(buf, off + 345, 155); + this.path = prefix + '/' + this.path; + } + else { + const prefix = decString(buf, off + 345, 130); + if (prefix) { + this.path = prefix + '/' + this.path; + } + this.atime = decDate(buf, off + 476, 12); + this.ctime = decDate(buf, off + 488, 12); + } + } + let sum = 8 * 0x20; + for (let i = off; i < off + 148; i++) { + sum += buf[i]; + } + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i]; + } + this.cksumValid = sum === this.cksum; + if (this.cksum === undefined && sum === 8 * 0x20) { + this.nullBlock = true; + } + } + #slurp(ex, gex = false) { + Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. Also, any + // null/undefined values are ignored. + return !(v === null || + v === undefined || + (k === 'path' && gex) || + (k === 'linkpath' && gex) || + k === 'global'); + }))); + } + encode(buf, off = 0) { + if (!buf) { + buf = this.block = Buffer.alloc(512); + } + if (this.#type === 'Unsupported') { + this.#type = '0'; + } + if (!(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header'); + } + const prefixSize = this.ctime || this.atime ? 130 : 155; + const split = splitPrefix(this.path || '', prefixSize); + const path = split[0]; + const prefix = split[1]; + this.needPax = !!split[2]; + this.needPax = encString(buf, off, 100, path) || this.needPax; + this.needPax = + encNumber(buf, off + 100, 8, this.mode) || this.needPax; + this.needPax = + encNumber(buf, off + 108, 8, this.uid) || this.needPax; + this.needPax = + encNumber(buf, off + 116, 8, this.gid) || this.needPax; + this.needPax = + encNumber(buf, off + 124, 12, this.size) || this.needPax; + this.needPax = + encDate(buf, off + 136, 12, this.mtime) || this.needPax; + buf[off + 156] = this.#type.charCodeAt(0); + this.needPax = + encString(buf, off + 157, 100, this.linkpath) || this.needPax; + buf.write('ustar\u000000', off + 257, 8); + this.needPax = + encString(buf, off + 265, 32, this.uname) || this.needPax; + this.needPax = + encString(buf, off + 297, 32, this.gname) || this.needPax; + this.needPax = + encNumber(buf, off + 329, 8, this.devmaj) || this.needPax; + this.needPax = + encNumber(buf, off + 337, 8, this.devmin) || this.needPax; + this.needPax = + encString(buf, off + 345, prefixSize, prefix) || this.needPax; + if (buf[off + 475] !== 0) { + this.needPax = + encString(buf, off + 345, 155, prefix) || this.needPax; + } + else { + this.needPax = + encString(buf, off + 345, 130, prefix) || this.needPax; + this.needPax = + encDate(buf, off + 476, 12, this.atime) || this.needPax; + this.needPax = + encDate(buf, off + 488, 12, this.ctime) || this.needPax; + } + let sum = 8 * 0x20; + for (let i = off; i < off + 148; i++) { + sum += buf[i]; + } + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i]; + } + this.cksum = sum; + encNumber(buf, off + 148, 8, this.cksum); + this.cksumValid = true; + return this.needPax; + } + get type() { + return (this.#type === 'Unsupported' ? + this.#type + : types.name.get(this.#type)); + } + get typeKey() { + return this.#type; + } + set type(type) { + const c = String(types.code.get(type)); + if (types.isCode(c) || c === 'Unsupported') { + this.#type = c; + } + else if (types.isCode(type)) { + this.#type = type; + } + else { + throw new TypeError('invalid entry type: ' + type); + } + } +} +exports.Header = Header; +const splitPrefix = (p, prefixSize) => { + const pathSize = 100; + let pp = p; + let prefix = ''; + let ret = undefined; + const root = node_path_1.posix.parse(p).root || '.'; + if (Buffer.byteLength(pp) < pathSize) { + ret = [pp, prefix, false]; + } + else { + // first set prefix to the dir, and path to the base + prefix = node_path_1.posix.dirname(pp); + pp = node_path_1.posix.basename(pp); + do { + if (Buffer.byteLength(pp) <= pathSize && + Buffer.byteLength(prefix) <= prefixSize) { + // both fit! + ret = [pp, prefix, false]; + } + else if (Buffer.byteLength(pp) > pathSize && + Buffer.byteLength(prefix) <= prefixSize) { + // prefix fits in prefix, but path doesn't fit in path + ret = [pp.slice(0, pathSize - 1), prefix, true]; + } + else { + // make path take a bit from prefix + pp = node_path_1.posix.join(node_path_1.posix.basename(prefix), pp); + prefix = node_path_1.posix.dirname(prefix); + } + } while (prefix !== root && ret === undefined); + // at this point, found no resolution, just truncate + if (!ret) { + ret = [p.slice(0, pathSize - 1), '', true]; + } + } + return ret; +}; +const decString = (buf, off, size) => buf + .subarray(off, off + size) + .toString('utf8') + .replace(/\0.*/, ''); +const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size)); +const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000); +const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ? + large.parse(buf.subarray(off, off + size)) + : decSmallNumber(buf, off, size); +const nanUndef = (value) => (isNaN(value) ? undefined : value); +const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf + .subarray(off, off + size) + .toString('utf8') + .replace(/\0.*$/, '') + .trim(), 8)); +// the maximum encodable as a null-terminated octal, by field size +const MAXNUM = { + 12: 0o77777777777, + 8: 0o7777777, +}; +const encNumber = (buf, off, size, num) => num === undefined ? false + : num > MAXNUM[size] || num < 0 ? + (large.encode(num, buf.subarray(off, off + size)), true) + : (encSmallNumber(buf, off, size, num), false); +const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii'); +const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size); +const padOctal = (str, size) => (str.length === size - 1 ? + str + : new Array(size - str.length - 1).join('0') + str + ' ') + '\0'; +const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000)); +// enough to fill the longest string we've got +const NULLS = new Array(156).join('\0'); +// pad with nulls, return true if it's longer or non-ascii +const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'), + str.length !== Buffer.byteLength(str) || str.length > size)); +//# sourceMappingURL=header.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/index.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/index.js new file mode 100644 index 00000000000000..e93ed5ad54aa6e --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/index.js @@ -0,0 +1,54 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.u = exports.types = exports.r = exports.t = exports.x = exports.c = void 0; +__exportStar(require("./create.js"), exports); +var create_js_1 = require("./create.js"); +Object.defineProperty(exports, "c", { enumerable: true, get: function () { return create_js_1.create; } }); +__exportStar(require("./extract.js"), exports); +var extract_js_1 = require("./extract.js"); +Object.defineProperty(exports, "x", { enumerable: true, get: function () { return extract_js_1.extract; } }); +__exportStar(require("./header.js"), exports); +__exportStar(require("./list.js"), exports); +var list_js_1 = require("./list.js"); +Object.defineProperty(exports, "t", { enumerable: true, get: function () { return list_js_1.list; } }); +// classes +__exportStar(require("./pack.js"), exports); +__exportStar(require("./parse.js"), exports); +__exportStar(require("./pax.js"), exports); +__exportStar(require("./read-entry.js"), exports); +__exportStar(require("./replace.js"), exports); +var replace_js_1 = require("./replace.js"); +Object.defineProperty(exports, "r", { enumerable: true, get: function () { return replace_js_1.replace; } }); +exports.types = __importStar(require("./types.js")); +__exportStar(require("./unpack.js"), exports); +__exportStar(require("./update.js"), exports); +var update_js_1 = require("./update.js"); +Object.defineProperty(exports, "u", { enumerable: true, get: function () { return update_js_1.update; } }); +__exportStar(require("./write-entry.js"), exports); +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/large-numbers.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/large-numbers.js new file mode 100644 index 00000000000000..5b07aa7f71b48d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/large-numbers.js @@ -0,0 +1,99 @@ +"use strict"; +// Tar can encode large and negative numbers using a leading byte of +// 0xff for negative, and 0x80 for positive. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.parse = exports.encode = void 0; +const encode = (num, buf) => { + if (!Number.isSafeInteger(num)) { + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error('cannot encode number outside of javascript safe integer range'); + } + else if (num < 0) { + encodeNegative(num, buf); + } + else { + encodePositive(num, buf); + } + return buf; +}; +exports.encode = encode; +const encodePositive = (num, buf) => { + buf[0] = 0x80; + for (var i = buf.length; i > 1; i--) { + buf[i - 1] = num & 0xff; + num = Math.floor(num / 0x100); + } +}; +const encodeNegative = (num, buf) => { + buf[0] = 0xff; + var flipped = false; + num = num * -1; + for (var i = buf.length; i > 1; i--) { + var byte = num & 0xff; + num = Math.floor(num / 0x100); + if (flipped) { + buf[i - 1] = onesComp(byte); + } + else if (byte === 0) { + buf[i - 1] = 0; + } + else { + flipped = true; + buf[i - 1] = twosComp(byte); + } + } +}; +const parse = (buf) => { + const pre = buf[0]; + const value = pre === 0x80 ? pos(buf.subarray(1, buf.length)) + : pre === 0xff ? twos(buf) + : null; + if (value === null) { + throw Error('invalid base256 encoding'); + } + if (!Number.isSafeInteger(value)) { + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error('parsed number outside of javascript safe integer range'); + } + return value; +}; +exports.parse = parse; +const twos = (buf) => { + var len = buf.length; + var sum = 0; + var flipped = false; + for (var i = len - 1; i > -1; i--) { + var byte = Number(buf[i]); + var f; + if (flipped) { + f = onesComp(byte); + } + else if (byte === 0) { + f = byte; + } + else { + flipped = true; + f = twosComp(byte); + } + if (f !== 0) { + sum -= f * Math.pow(256, len - i - 1); + } + } + return sum; +}; +const pos = (buf) => { + var len = buf.length; + var sum = 0; + for (var i = len - 1; i > -1; i--) { + var byte = Number(buf[i]); + if (byte !== 0) { + sum += byte * Math.pow(256, len - i - 1); + } + } + return sum; +}; +const onesComp = (byte) => (0xff ^ byte) & 0xff; +const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff; +//# sourceMappingURL=large-numbers.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/list.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/list.js new file mode 100644 index 00000000000000..3cd34bb4bad481 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/list.js @@ -0,0 +1,136 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.list = exports.filesFilter = void 0; +// tar -t +const fsm = __importStar(require("@isaacs/fs-minipass")); +const node_fs_1 = __importDefault(require("node:fs")); +const path_1 = require("path"); +const make_command_js_1 = require("./make-command.js"); +const parse_js_1 = require("./parse.js"); +const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js"); +const onReadEntryFunction = (opt) => { + const onReadEntry = opt.onReadEntry; + opt.onReadEntry = + onReadEntry ? + e => { + onReadEntry(e); + e.resume(); + } + : e => e.resume(); +}; +// construct a filter that limits the file entries listed +// include child entries if a dir is included +const filesFilter = (opt, files) => { + const map = new Map(files.map(f => [(0, strip_trailing_slashes_js_1.stripTrailingSlashes)(f), true])); + const filter = opt.filter; + const mapHas = (file, r = '') => { + const root = r || (0, path_1.parse)(file).root || '.'; + let ret; + if (file === root) + ret = false; + else { + const m = map.get(file); + if (m !== undefined) { + ret = m; + } + else { + ret = mapHas((0, path_1.dirname)(file), root); + } + } + map.set(file, ret); + return ret; + }; + opt.filter = + filter ? + (file, entry) => filter(file, entry) && mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file)) + : file => mapHas((0, strip_trailing_slashes_js_1.stripTrailingSlashes)(file)); +}; +exports.filesFilter = filesFilter; +const listFileSync = (opt) => { + const p = new parse_js_1.Parser(opt); + const file = opt.file; + let fd; + try { + const stat = node_fs_1.default.statSync(file); + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + if (stat.size < readSize) { + p.end(node_fs_1.default.readFileSync(file)); + } + else { + let pos = 0; + const buf = Buffer.allocUnsafe(readSize); + fd = node_fs_1.default.openSync(file, 'r'); + while (pos < stat.size) { + const bytesRead = node_fs_1.default.readSync(fd, buf, 0, readSize, pos); + pos += bytesRead; + p.write(buf.subarray(0, bytesRead)); + } + p.end(); + } + } + finally { + if (typeof fd === 'number') { + try { + node_fs_1.default.closeSync(fd); + /* c8 ignore next */ + } + catch (er) { } + } + } +}; +const listFile = (opt, _files) => { + const parse = new parse_js_1.Parser(opt); + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + const file = opt.file; + const p = new Promise((resolve, reject) => { + parse.on('error', reject); + parse.on('end', resolve); + node_fs_1.default.stat(file, (er, stat) => { + if (er) { + reject(er); + } + else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }); + stream.on('error', reject); + stream.pipe(parse); + } + }); + }); + return p; +}; +exports.list = (0, make_command_js_1.makeCommand)(listFileSync, listFile, opt => new parse_js_1.Parser(opt), opt => new parse_js_1.Parser(opt), (opt, files) => { + if (files?.length) + (0, exports.filesFilter)(opt, files); + if (!opt.noResume) + onReadEntryFunction(opt); +}); +//# sourceMappingURL=list.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/make-command.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/make-command.js new file mode 100644 index 00000000000000..1814319e78bc62 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/make-command.js @@ -0,0 +1,61 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.makeCommand = void 0; +const options_js_1 = require("./options.js"); +const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => { + return Object.assign((opt_ = [], entries, cb) => { + if (Array.isArray(opt_)) { + entries = opt_; + opt_ = {}; + } + if (typeof entries === 'function') { + cb = entries; + entries = undefined; + } + if (!entries) { + entries = []; + } + else { + entries = Array.from(entries); + } + const opt = (0, options_js_1.dealias)(opt_); + validate?.(opt, entries); + if ((0, options_js_1.isSyncFile)(opt)) { + if (typeof cb === 'function') { + throw new TypeError('callback not supported for sync tar functions'); + } + return syncFile(opt, entries); + } + else if ((0, options_js_1.isAsyncFile)(opt)) { + const p = asyncFile(opt, entries); + // weirdness to make TS happy + const c = cb ? cb : undefined; + return c ? p.then(() => c(), c) : p; + } + else if ((0, options_js_1.isSyncNoFile)(opt)) { + if (typeof cb === 'function') { + throw new TypeError('callback not supported for sync tar functions'); + } + return syncNoFile(opt, entries); + } + else if ((0, options_js_1.isAsyncNoFile)(opt)) { + if (typeof cb === 'function') { + throw new TypeError('callback only supported with file option'); + } + return asyncNoFile(opt, entries); + /* c8 ignore start */ + } + else { + throw new Error('impossible options??'); + } + /* c8 ignore stop */ + }, { + syncFile, + asyncFile, + syncNoFile, + asyncNoFile, + validate, + }); +}; +exports.makeCommand = makeCommand; +//# sourceMappingURL=make-command.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mkdir.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mkdir.js new file mode 100644 index 00000000000000..2b13ecbab6723e --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mkdir.js @@ -0,0 +1,209 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.mkdirSync = exports.mkdir = void 0; +const chownr_1 = require("chownr"); +const fs_1 = __importDefault(require("fs")); +const mkdirp_1 = require("mkdirp"); +const node_path_1 = __importDefault(require("node:path")); +const cwd_error_js_1 = require("./cwd-error.js"); +const normalize_windows_path_js_1 = require("./normalize-windows-path.js"); +const symlink_error_js_1 = require("./symlink-error.js"); +const cGet = (cache, key) => cache.get((0, normalize_windows_path_js_1.normalizeWindowsPath)(key)); +const cSet = (cache, key, val) => cache.set((0, normalize_windows_path_js_1.normalizeWindowsPath)(key), val); +const checkCwd = (dir, cb) => { + fs_1.default.stat(dir, (er, st) => { + if (er || !st.isDirectory()) { + er = new cwd_error_js_1.CwdError(dir, er?.code || 'ENOTDIR'); + } + cb(er); + }); +}; +/** + * Wrapper around mkdirp for tar's needs. + * + * The main purpose is to avoid creating directories if we know that + * they already exist (and track which ones exist for this purpose), + * and prevent entries from being extracted into symlinked folders, + * if `preservePaths` is not set. + */ +const mkdir = (dir, opt, cb) => { + dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir); + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + /* c8 ignore next */ + const umask = opt.umask ?? 0o22; + const mode = opt.mode | 0o0700; + const needChmod = (mode & umask) !== 0; + const uid = opt.uid; + const gid = opt.gid; + const doChown = typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid); + const preserve = opt.preserve; + const unlink = opt.unlink; + const cache = opt.cache; + const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd); + const done = (er, created) => { + if (er) { + cb(er); + } + else { + cSet(cache, dir, true); + if (created && doChown) { + (0, chownr_1.chownr)(created, uid, gid, er => done(er)); + } + else if (needChmod) { + fs_1.default.chmod(dir, mode, cb); + } + else { + cb(); + } + } + }; + if (cache && cGet(cache, dir) === true) { + return done(); + } + if (dir === cwd) { + return checkCwd(dir, done); + } + if (preserve) { + return (0, mkdirp_1.mkdirp)(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts + done); + } + const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir)); + const parts = sub.split('/'); + mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done); +}; +exports.mkdir = mkdir; +const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { + if (!parts.length) { + return cb(null, created); + } + const p = parts.shift(); + const part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(base + '/' + p)); + if (cGet(cache, part)) { + return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb); + } + fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)); +}; +const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => { + if (er) { + fs_1.default.lstat(part, (statEr, st) => { + if (statEr) { + statEr.path = + statEr.path && (0, normalize_windows_path_js_1.normalizeWindowsPath)(statEr.path); + cb(statEr); + } + else if (st.isDirectory()) { + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb); + } + else if (unlink) { + fs_1.default.unlink(part, er => { + if (er) { + return cb(er); + } + fs_1.default.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)); + }); + } + else if (st.isSymbolicLink()) { + return cb(new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/'))); + } + else { + cb(er); + } + }); + } + else { + created = created || part; + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb); + } +}; +const checkCwdSync = (dir) => { + let ok = false; + let code = undefined; + try { + ok = fs_1.default.statSync(dir).isDirectory(); + } + catch (er) { + code = er?.code; + } + finally { + if (!ok) { + throw new cwd_error_js_1.CwdError(dir, code ?? 'ENOTDIR'); + } + } +}; +const mkdirSync = (dir, opt) => { + dir = (0, normalize_windows_path_js_1.normalizeWindowsPath)(dir); + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + /* c8 ignore next */ + const umask = opt.umask ?? 0o22; + const mode = opt.mode | 0o700; + const needChmod = (mode & umask) !== 0; + const uid = opt.uid; + const gid = opt.gid; + const doChown = typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid); + const preserve = opt.preserve; + const unlink = opt.unlink; + const cache = opt.cache; + const cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd); + const done = (created) => { + cSet(cache, dir, true); + if (created && doChown) { + (0, chownr_1.chownrSync)(created, uid, gid); + } + if (needChmod) { + fs_1.default.chmodSync(dir, mode); + } + }; + if (cache && cGet(cache, dir) === true) { + return done(); + } + if (dir === cwd) { + checkCwdSync(cwd); + return done(); + } + if (preserve) { + return done((0, mkdirp_1.mkdirpSync)(dir, mode) ?? undefined); + } + const sub = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.relative(cwd, dir)); + const parts = sub.split('/'); + let created = undefined; + for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) { + part = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(part)); + if (cGet(cache, part)) { + continue; + } + try { + fs_1.default.mkdirSync(part, mode); + created = created || part; + cSet(cache, part, true); + } + catch (er) { + const st = fs_1.default.lstatSync(part); + if (st.isDirectory()) { + cSet(cache, part, true); + continue; + } + else if (unlink) { + fs_1.default.unlinkSync(part); + fs_1.default.mkdirSync(part, mode); + created = created || part; + cSet(cache, part, true); + continue; + } + else if (st.isSymbolicLink()) { + return new symlink_error_js_1.SymlinkError(part, part + '/' + parts.join('/')); + } + } + } + return done(created); +}; +exports.mkdirSync = mkdirSync; +//# sourceMappingURL=mkdir.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mode-fix.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mode-fix.js new file mode 100644 index 00000000000000..49dd727961d290 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/mode-fix.js @@ -0,0 +1,29 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.modeFix = void 0; +const modeFix = (mode, isDir, portable) => { + mode &= 0o7777; + // in portable mode, use the minimum reasonable umask + // if this system creates files with 0o664 by default + // (as some linux distros do), then we'll write the + // archive with 0o644 instead. Also, don't ever create + // a file that is not readable/writable by the owner. + if (portable) { + mode = (mode | 0o600) & ~0o22; + } + // if dirs are readable, then they should be listable + if (isDir) { + if (mode & 0o400) { + mode |= 0o100; + } + if (mode & 0o40) { + mode |= 0o10; + } + if (mode & 0o4) { + mode |= 0o1; + } + } + return mode; +}; +exports.modeFix = modeFix; +//# sourceMappingURL=mode-fix.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-unicode.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-unicode.js new file mode 100644 index 00000000000000..2f08ce46d98c4c --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-unicode.js @@ -0,0 +1,17 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.normalizeUnicode = void 0; +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +const normalizeCache = Object.create(null); +const { hasOwnProperty } = Object.prototype; +const normalizeUnicode = (s) => { + if (!hasOwnProperty.call(normalizeCache, s)) { + normalizeCache[s] = s.normalize('NFD'); + } + return normalizeCache[s]; +}; +exports.normalizeUnicode = normalizeUnicode; +//# sourceMappingURL=normalize-unicode.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-windows-path.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-windows-path.js new file mode 100644 index 00000000000000..b0c7aaa9f2d175 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/normalize-windows-path.js @@ -0,0 +1,12 @@ +"use strict"; +// on windows, either \ or / are valid directory separators. +// on unix, \ is a valid character in filenames. +// so, on windows, and only on windows, we replace all \ chars with /, +// so that we can use / as our one and only directory separator char. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.normalizeWindowsPath = void 0; +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform; +exports.normalizeWindowsPath = platform !== 'win32' ? + (p) => p + : (p) => p && p.replace(/\\/g, '/'); +//# sourceMappingURL=normalize-windows-path.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/options.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/options.js new file mode 100644 index 00000000000000..4cd06505bc72b2 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/options.js @@ -0,0 +1,66 @@ +"use strict"; +// turn tar(1) style args like `C` into the more verbose things like `cwd` +Object.defineProperty(exports, "__esModule", { value: true }); +exports.dealias = exports.isNoFile = exports.isFile = exports.isAsync = exports.isSync = exports.isAsyncNoFile = exports.isSyncNoFile = exports.isAsyncFile = exports.isSyncFile = void 0; +const argmap = new Map([ + ['C', 'cwd'], + ['f', 'file'], + ['z', 'gzip'], + ['P', 'preservePaths'], + ['U', 'unlink'], + ['strip-components', 'strip'], + ['stripComponents', 'strip'], + ['keep-newer', 'newer'], + ['keepNewer', 'newer'], + ['keep-newer-files', 'newer'], + ['keepNewerFiles', 'newer'], + ['k', 'keep'], + ['keep-existing', 'keep'], + ['keepExisting', 'keep'], + ['m', 'noMtime'], + ['no-mtime', 'noMtime'], + ['p', 'preserveOwner'], + ['L', 'follow'], + ['h', 'follow'], + ['onentry', 'onReadEntry'], +]); +const isSyncFile = (o) => !!o.sync && !!o.file; +exports.isSyncFile = isSyncFile; +const isAsyncFile = (o) => !o.sync && !!o.file; +exports.isAsyncFile = isAsyncFile; +const isSyncNoFile = (o) => !!o.sync && !o.file; +exports.isSyncNoFile = isSyncNoFile; +const isAsyncNoFile = (o) => !o.sync && !o.file; +exports.isAsyncNoFile = isAsyncNoFile; +const isSync = (o) => !!o.sync; +exports.isSync = isSync; +const isAsync = (o) => !o.sync; +exports.isAsync = isAsync; +const isFile = (o) => !!o.file; +exports.isFile = isFile; +const isNoFile = (o) => !o.file; +exports.isNoFile = isNoFile; +const dealiasKey = (k) => { + const d = argmap.get(k); + if (d) + return d; + return k; +}; +const dealias = (opt = {}) => { + if (!opt) + return {}; + const result = {}; + for (const [key, v] of Object.entries(opt)) { + // TS doesn't know that aliases are going to always be the same type + const k = dealiasKey(key); + result[k] = v; + } + // affordance for deprecated noChmod -> chmod + if (result.chmod === undefined && result.noChmod === false) { + result.chmod = true; + } + delete result.noChmod; + return result; +}; +exports.dealias = dealias; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pack.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pack.js new file mode 100644 index 00000000000000..303e93063c2db4 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pack.js @@ -0,0 +1,477 @@ +"use strict"; +// A readable tar stream creator +// Technically, this is a transform stream that you write paths into, +// and tar format comes out of. +// The `add()` method is like `write()` but returns this, +// and end() return `this` as well, so you can +// do `new Pack(opt).add('files').add('dir').end().pipe(output) +// You could also do something like: +// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PackSync = exports.Pack = exports.PackJob = void 0; +const fs_1 = __importDefault(require("fs")); +const write_entry_js_1 = require("./write-entry.js"); +class PackJob { + path; + absolute; + entry; + stat; + readdir; + pending = false; + ignore = false; + piped = false; + constructor(path, absolute) { + this.path = path || './'; + this.absolute = absolute; + } +} +exports.PackJob = PackJob; +const minipass_1 = require("minipass"); +const zlib = __importStar(require("minizlib")); +const yallist_1 = require("yallist"); +const read_entry_js_1 = require("./read-entry.js"); +const warn_method_js_1 = require("./warn-method.js"); +const EOF = Buffer.alloc(1024); +const ONSTAT = Symbol('onStat'); +const ENDED = Symbol('ended'); +const QUEUE = Symbol('queue'); +const CURRENT = Symbol('current'); +const PROCESS = Symbol('process'); +const PROCESSING = Symbol('processing'); +const PROCESSJOB = Symbol('processJob'); +const JOBS = Symbol('jobs'); +const JOBDONE = Symbol('jobDone'); +const ADDFSENTRY = Symbol('addFSEntry'); +const ADDTARENTRY = Symbol('addTarEntry'); +const STAT = Symbol('stat'); +const READDIR = Symbol('readdir'); +const ONREADDIR = Symbol('onreaddir'); +const PIPE = Symbol('pipe'); +const ENTRY = Symbol('entry'); +const ENTRYOPT = Symbol('entryOpt'); +const WRITEENTRYCLASS = Symbol('writeEntryClass'); +const WRITE = Symbol('write'); +const ONDRAIN = Symbol('ondrain'); +const path_1 = __importDefault(require("path")); +const normalize_windows_path_js_1 = require("./normalize-windows-path.js"); +class Pack extends minipass_1.Minipass { + opt; + cwd; + maxReadSize; + preservePaths; + strict; + noPax; + prefix; + linkCache; + statCache; + file; + portable; + zip; + readdirCache; + noDirRecurse; + follow; + noMtime; + mtime; + filter; + jobs; + [WRITEENTRYCLASS]; + onWriteEntry; + [QUEUE]; + [JOBS] = 0; + [PROCESSING] = false; + [ENDED] = false; + constructor(opt = {}) { + //@ts-ignore + super(); + this.opt = opt; + this.file = opt.file || ''; + this.cwd = opt.cwd || process.cwd(); + this.maxReadSize = opt.maxReadSize; + this.preservePaths = !!opt.preservePaths; + this.strict = !!opt.strict; + this.noPax = !!opt.noPax; + this.prefix = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix || ''); + this.linkCache = opt.linkCache || new Map(); + this.statCache = opt.statCache || new Map(); + this.readdirCache = opt.readdirCache || new Map(); + this.onWriteEntry = opt.onWriteEntry; + this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntry; + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + this.portable = !!opt.portable; + if (opt.gzip || opt.brotli) { + if (opt.gzip && opt.brotli) { + throw new TypeError('gzip and brotli are mutually exclusive'); + } + if (opt.gzip) { + if (typeof opt.gzip !== 'object') { + opt.gzip = {}; + } + if (this.portable) { + opt.gzip.portable = true; + } + this.zip = new zlib.Gzip(opt.gzip); + } + if (opt.brotli) { + if (typeof opt.brotli !== 'object') { + opt.brotli = {}; + } + this.zip = new zlib.BrotliCompress(opt.brotli); + } + /* c8 ignore next */ + if (!this.zip) + throw new Error('impossible'); + const zip = this.zip; + zip.on('data', chunk => super.write(chunk)); + zip.on('end', () => super.end()); + zip.on('drain', () => this[ONDRAIN]()); + this.on('resume', () => zip.resume()); + } + else { + this.on('drain', this[ONDRAIN]); + } + this.noDirRecurse = !!opt.noDirRecurse; + this.follow = !!opt.follow; + this.noMtime = !!opt.noMtime; + if (opt.mtime) + this.mtime = opt.mtime; + this.filter = + typeof opt.filter === 'function' ? opt.filter : () => true; + this[QUEUE] = new yallist_1.Yallist(); + this[JOBS] = 0; + this.jobs = Number(opt.jobs) || 4; + this[PROCESSING] = false; + this[ENDED] = false; + } + [WRITE](chunk) { + return super.write(chunk); + } + add(path) { + this.write(path); + return this; + } + end(path, encoding, cb) { + /* c8 ignore start */ + if (typeof path === 'function') { + cb = path; + path = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (path) { + this.add(path); + } + this[ENDED] = true; + this[PROCESS](); + /* c8 ignore next */ + if (cb) + cb(); + return this; + } + write(path) { + if (this[ENDED]) { + throw new Error('write after end'); + } + if (path instanceof read_entry_js_1.ReadEntry) { + this[ADDTARENTRY](path); + } + else { + this[ADDFSENTRY](path); + } + return this.flowing; + } + [ADDTARENTRY](p) { + const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p.path)); + // in this case, we don't have to wait for the stat + if (!this.filter(p.path, p)) { + p.resume(); + } + else { + const job = new PackJob(p.path, absolute); + job.entry = new write_entry_js_1.WriteEntryTar(p, this[ENTRYOPT](job)); + job.entry.on('end', () => this[JOBDONE](job)); + this[JOBS] += 1; + this[QUEUE].push(job); + } + this[PROCESS](); + } + [ADDFSENTRY](p) { + const absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.resolve(this.cwd, p)); + this[QUEUE].push(new PackJob(p, absolute)); + this[PROCESS](); + } + [STAT](job) { + job.pending = true; + this[JOBS] += 1; + const stat = this.follow ? 'stat' : 'lstat'; + fs_1.default[stat](job.absolute, (er, stat) => { + job.pending = false; + this[JOBS] -= 1; + if (er) { + this.emit('error', er); + } + else { + this[ONSTAT](job, stat); + } + }); + } + [ONSTAT](job, stat) { + this.statCache.set(job.absolute, stat); + job.stat = stat; + // now we have the stat, we can filter it. + if (!this.filter(job.path, stat)) { + job.ignore = true; + } + this[PROCESS](); + } + [READDIR](job) { + job.pending = true; + this[JOBS] += 1; + fs_1.default.readdir(job.absolute, (er, entries) => { + job.pending = false; + this[JOBS] -= 1; + if (er) { + return this.emit('error', er); + } + this[ONREADDIR](job, entries); + }); + } + [ONREADDIR](job, entries) { + this.readdirCache.set(job.absolute, entries); + job.readdir = entries; + this[PROCESS](); + } + [PROCESS]() { + if (this[PROCESSING]) { + return; + } + this[PROCESSING] = true; + for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) { + this[PROCESSJOB](w.value); + if (w.value.ignore) { + const p = w.next; + this[QUEUE].removeNode(w); + w.next = p; + } + } + this[PROCESSING] = false; + if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { + if (this.zip) { + this.zip.end(EOF); + } + else { + super.write(EOF); + super.end(); + } + } + } + get [CURRENT]() { + return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value; + } + [JOBDONE](_job) { + this[QUEUE].shift(); + this[JOBS] -= 1; + this[PROCESS](); + } + [PROCESSJOB](job) { + if (job.pending) { + return; + } + if (job.entry) { + if (job === this[CURRENT] && !job.piped) { + this[PIPE](job); + } + return; + } + if (!job.stat) { + const sc = this.statCache.get(job.absolute); + if (sc) { + this[ONSTAT](job, sc); + } + else { + this[STAT](job); + } + } + if (!job.stat) { + return; + } + // filtered out! + if (job.ignore) { + return; + } + if (!this.noDirRecurse && + job.stat.isDirectory() && + !job.readdir) { + const rc = this.readdirCache.get(job.absolute); + if (rc) { + this[ONREADDIR](job, rc); + } + else { + this[READDIR](job); + } + if (!job.readdir) { + return; + } + } + // we know it doesn't have an entry, because that got checked above + job.entry = this[ENTRY](job); + if (!job.entry) { + job.ignore = true; + return; + } + if (job === this[CURRENT] && !job.piped) { + this[PIPE](job); + } + } + [ENTRYOPT](job) { + return { + onwarn: (code, msg, data) => this.warn(code, msg, data), + noPax: this.noPax, + cwd: this.cwd, + absolute: job.absolute, + preservePaths: this.preservePaths, + maxReadSize: this.maxReadSize, + strict: this.strict, + portable: this.portable, + linkCache: this.linkCache, + statCache: this.statCache, + noMtime: this.noMtime, + mtime: this.mtime, + prefix: this.prefix, + onWriteEntry: this.onWriteEntry, + }; + } + [ENTRY](job) { + this[JOBS] += 1; + try { + const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)); + return e + .on('end', () => this[JOBDONE](job)) + .on('error', er => this.emit('error', er)); + } + catch (er) { + this.emit('error', er); + } + } + [ONDRAIN]() { + if (this[CURRENT] && this[CURRENT].entry) { + this[CURRENT].entry.resume(); + } + } + // like .pipe() but using super, because our write() is special + [PIPE](job) { + job.piped = true; + if (job.readdir) { + job.readdir.forEach(entry => { + const p = job.path; + const base = p === './' ? '' : p.replace(/\/*$/, '/'); + this[ADDFSENTRY](base + entry); + }); + } + const source = job.entry; + const zip = this.zip; + /* c8 ignore start */ + if (!source) + throw new Error('cannot pipe without source'); + /* c8 ignore stop */ + if (zip) { + source.on('data', chunk => { + if (!zip.write(chunk)) { + source.pause(); + } + }); + } + else { + source.on('data', chunk => { + if (!super.write(chunk)) { + source.pause(); + } + }); + } + } + pause() { + if (this.zip) { + this.zip.pause(); + } + return super.pause(); + } + warn(code, message, data = {}) { + (0, warn_method_js_1.warnMethod)(this, code, message, data); + } +} +exports.Pack = Pack; +class PackSync extends Pack { + sync = true; + constructor(opt) { + super(opt); + this[WRITEENTRYCLASS] = write_entry_js_1.WriteEntrySync; + } + // pause/resume are no-ops in sync streams. + pause() { } + resume() { } + [STAT](job) { + const stat = this.follow ? 'statSync' : 'lstatSync'; + this[ONSTAT](job, fs_1.default[stat](job.absolute)); + } + [READDIR](job) { + this[ONREADDIR](job, fs_1.default.readdirSync(job.absolute)); + } + // gotta get it all in this tick + [PIPE](job) { + const source = job.entry; + const zip = this.zip; + if (job.readdir) { + job.readdir.forEach(entry => { + const p = job.path; + const base = p === './' ? '' : p.replace(/\/*$/, '/'); + this[ADDFSENTRY](base + entry); + }); + } + /* c8 ignore start */ + if (!source) + throw new Error('Cannot pipe without source'); + /* c8 ignore stop */ + if (zip) { + source.on('data', chunk => { + zip.write(chunk); + }); + } + else { + source.on('data', chunk => { + super[WRITE](chunk); + }); + } + } +} +exports.PackSync = PackSync; +//# sourceMappingURL=pack.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/package.json b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/parse.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/parse.js new file mode 100644 index 00000000000000..1f7e5fd65e869f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/parse.js @@ -0,0 +1,599 @@ +"use strict"; +// this[BUFFER] is the remainder of a chunk if we're waiting for +// the full 512 bytes of a header to come in. We will Buffer.concat() +// it to the next write(), which is a mem copy, but a small one. +// +// this[QUEUE] is a Yallist of entries that haven't been emitted +// yet this can only get filled up if the user keeps write()ing after +// a write() returns false, or does a write() with more than one entry +// +// We don't buffer chunks, we always parse them and either create an +// entry, or push it into the active entry. The ReadEntry class knows +// to throw data away if .ignore=true +// +// Shift entry off the buffer when it emits 'end', and emit 'entry' for +// the next one in the list. +// +// At any time, we're pushing body chunks into the entry at WRITEENTRY, +// and waiting for 'end' on the entry at READENTRY +// +// ignored entries get .resume() called on them straight away +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Parser = void 0; +const events_1 = require("events"); +const minizlib_1 = require("minizlib"); +const yallist_1 = require("yallist"); +const header_js_1 = require("./header.js"); +const pax_js_1 = require("./pax.js"); +const read_entry_js_1 = require("./read-entry.js"); +const warn_method_js_1 = require("./warn-method.js"); +const maxMetaEntrySize = 1024 * 1024; +const gzipHeader = Buffer.from([0x1f, 0x8b]); +const STATE = Symbol('state'); +const WRITEENTRY = Symbol('writeEntry'); +const READENTRY = Symbol('readEntry'); +const NEXTENTRY = Symbol('nextEntry'); +const PROCESSENTRY = Symbol('processEntry'); +const EX = Symbol('extendedHeader'); +const GEX = Symbol('globalExtendedHeader'); +const META = Symbol('meta'); +const EMITMETA = Symbol('emitMeta'); +const BUFFER = Symbol('buffer'); +const QUEUE = Symbol('queue'); +const ENDED = Symbol('ended'); +const EMITTEDEND = Symbol('emittedEnd'); +const EMIT = Symbol('emit'); +const UNZIP = Symbol('unzip'); +const CONSUMECHUNK = Symbol('consumeChunk'); +const CONSUMECHUNKSUB = Symbol('consumeChunkSub'); +const CONSUMEBODY = Symbol('consumeBody'); +const CONSUMEMETA = Symbol('consumeMeta'); +const CONSUMEHEADER = Symbol('consumeHeader'); +const CONSUMING = Symbol('consuming'); +const BUFFERCONCAT = Symbol('bufferConcat'); +const MAYBEEND = Symbol('maybeEnd'); +const WRITING = Symbol('writing'); +const ABORTED = Symbol('aborted'); +const DONE = Symbol('onDone'); +const SAW_VALID_ENTRY = Symbol('sawValidEntry'); +const SAW_NULL_BLOCK = Symbol('sawNullBlock'); +const SAW_EOF = Symbol('sawEOF'); +const CLOSESTREAM = Symbol('closeStream'); +const noop = () => true; +class Parser extends events_1.EventEmitter { + file; + strict; + maxMetaEntrySize; + filter; + brotli; + writable = true; + readable = false; + [QUEUE] = new yallist_1.Yallist(); + [BUFFER]; + [READENTRY]; + [WRITEENTRY]; + [STATE] = 'begin'; + [META] = ''; + [EX]; + [GEX]; + [ENDED] = false; + [UNZIP]; + [ABORTED] = false; + [SAW_VALID_ENTRY]; + [SAW_NULL_BLOCK] = false; + [SAW_EOF] = false; + [WRITING] = false; + [CONSUMING] = false; + [EMITTEDEND] = false; + constructor(opt = {}) { + super(); + this.file = opt.file || ''; + // these BADARCHIVE errors can't be detected early. listen on DONE. + this.on(DONE, () => { + if (this[STATE] === 'begin' || + this[SAW_VALID_ENTRY] === false) { + // either less than 1 block of data, or all entries were invalid. + // Either way, probably not even a tarball. + this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format'); + } + }); + if (opt.ondone) { + this.on(DONE, opt.ondone); + } + else { + this.on(DONE, () => { + this.emit('prefinish'); + this.emit('finish'); + this.emit('end'); + }); + } + this.strict = !!opt.strict; + this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize; + this.filter = typeof opt.filter === 'function' ? opt.filter : noop; + // Unlike gzip, brotli doesn't have any magic bytes to identify it + // Users need to explicitly tell us they're extracting a brotli file + // Or we infer from the file extension + const isTBR = opt.file && + (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')); + // if it's a tbr file it MIGHT be brotli, but we don't know until + // we look at it and verify it's not a valid tar file. + this.brotli = + !opt.gzip && opt.brotli !== undefined ? opt.brotli + : isTBR ? undefined + : false; + // have to set this so that streams are ok piping into it + this.on('end', () => this[CLOSESTREAM]()); + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + if (typeof opt.onReadEntry === 'function') { + this.on('entry', opt.onReadEntry); + } + } + warn(code, message, data = {}) { + (0, warn_method_js_1.warnMethod)(this, code, message, data); + } + [CONSUMEHEADER](chunk, position) { + if (this[SAW_VALID_ENTRY] === undefined) { + this[SAW_VALID_ENTRY] = false; + } + let header; + try { + header = new header_js_1.Header(chunk, position, this[EX], this[GEX]); + } + catch (er) { + return this.warn('TAR_ENTRY_INVALID', er); + } + if (header.nullBlock) { + if (this[SAW_NULL_BLOCK]) { + this[SAW_EOF] = true; + // ending an archive with no entries. pointless, but legal. + if (this[STATE] === 'begin') { + this[STATE] = 'header'; + } + this[EMIT]('eof'); + } + else { + this[SAW_NULL_BLOCK] = true; + this[EMIT]('nullBlock'); + } + } + else { + this[SAW_NULL_BLOCK] = false; + if (!header.cksumValid) { + this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header }); + } + else if (!header.path) { + this.warn('TAR_ENTRY_INVALID', 'path is required', { header }); + } + else { + const type = header.type; + if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath required', { + header, + }); + } + else if (!/^(Symbolic)?Link$/.test(type) && + !/^(Global)?ExtendedHeader$/.test(type) && + header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { + header, + }); + } + else { + const entry = (this[WRITEENTRY] = new read_entry_js_1.ReadEntry(header, this[EX], this[GEX])); + // we do this for meta & ignored entries as well, because they + // are still valid tar, or else we wouldn't know to ignore them + if (!this[SAW_VALID_ENTRY]) { + if (entry.remain) { + // this might be the one! + const onend = () => { + if (!entry.invalid) { + this[SAW_VALID_ENTRY] = true; + } + }; + entry.on('end', onend); + } + else { + this[SAW_VALID_ENTRY] = true; + } + } + if (entry.meta) { + if (entry.size > this.maxMetaEntrySize) { + entry.ignore = true; + this[EMIT]('ignoredEntry', entry); + this[STATE] = 'ignore'; + entry.resume(); + } + else if (entry.size > 0) { + this[META] = ''; + entry.on('data', c => (this[META] += c)); + this[STATE] = 'meta'; + } + } + else { + this[EX] = undefined; + entry.ignore = + entry.ignore || !this.filter(entry.path, entry); + if (entry.ignore) { + // probably valid, just not something we care about + this[EMIT]('ignoredEntry', entry); + this[STATE] = entry.remain ? 'ignore' : 'header'; + entry.resume(); + } + else { + if (entry.remain) { + this[STATE] = 'body'; + } + else { + this[STATE] = 'header'; + entry.end(); + } + if (!this[READENTRY]) { + this[QUEUE].push(entry); + this[NEXTENTRY](); + } + else { + this[QUEUE].push(entry); + } + } + } + } + } + } + } + [CLOSESTREAM]() { + queueMicrotask(() => this.emit('close')); + } + [PROCESSENTRY](entry) { + let go = true; + if (!entry) { + this[READENTRY] = undefined; + go = false; + } + else if (Array.isArray(entry)) { + const [ev, ...args] = entry; + this.emit(ev, ...args); + } + else { + this[READENTRY] = entry; + this.emit('entry', entry); + if (!entry.emittedEnd) { + entry.on('end', () => this[NEXTENTRY]()); + go = false; + } + } + return go; + } + [NEXTENTRY]() { + do { } while (this[PROCESSENTRY](this[QUEUE].shift())); + if (!this[QUEUE].length) { + // At this point, there's nothing in the queue, but we may have an + // entry which is being consumed (readEntry). + // If we don't, then we definitely can handle more data. + // If we do, and either it's flowing, or it has never had any data + // written to it, then it needs more. + // The only other possibility is that it has returned false from a + // write() call, so we wait for the next drain to continue. + const re = this[READENTRY]; + const drainNow = !re || re.flowing || re.size === re.remain; + if (drainNow) { + if (!this[WRITING]) { + this.emit('drain'); + } + } + else { + re.once('drain', () => this.emit('drain')); + } + } + } + [CONSUMEBODY](chunk, position) { + // write up to but no more than writeEntry.blockRemain + const entry = this[WRITEENTRY]; + /* c8 ignore start */ + if (!entry) { + throw new Error('attempt to consume body without entry??'); + } + const br = entry.blockRemain ?? 0; + /* c8 ignore stop */ + const c = br >= chunk.length && position === 0 ? + chunk + : chunk.subarray(position, position + br); + entry.write(c); + if (!entry.blockRemain) { + this[STATE] = 'header'; + this[WRITEENTRY] = undefined; + entry.end(); + } + return c.length; + } + [CONSUMEMETA](chunk, position) { + const entry = this[WRITEENTRY]; + const ret = this[CONSUMEBODY](chunk, position); + // if we finished, then the entry is reset + if (!this[WRITEENTRY] && entry) { + this[EMITMETA](entry); + } + return ret; + } + [EMIT](ev, data, extra) { + if (!this[QUEUE].length && !this[READENTRY]) { + this.emit(ev, data, extra); + } + else { + this[QUEUE].push([ev, data, extra]); + } + } + [EMITMETA](entry) { + this[EMIT]('meta', this[META]); + switch (entry.type) { + case 'ExtendedHeader': + case 'OldExtendedHeader': + this[EX] = pax_js_1.Pax.parse(this[META], this[EX], false); + break; + case 'GlobalExtendedHeader': + this[GEX] = pax_js_1.Pax.parse(this[META], this[GEX], true); + break; + case 'NextFileHasLongPath': + case 'OldGnuLongPath': { + const ex = this[EX] ?? Object.create(null); + this[EX] = ex; + ex.path = this[META].replace(/\0.*/, ''); + break; + } + case 'NextFileHasLongLinkpath': { + const ex = this[EX] || Object.create(null); + this[EX] = ex; + ex.linkpath = this[META].replace(/\0.*/, ''); + break; + } + /* c8 ignore start */ + default: + throw new Error('unknown meta: ' + entry.type); + /* c8 ignore stop */ + } + } + abort(error) { + this[ABORTED] = true; + this.emit('abort', error); + // always throws, even in non-strict mode + this.warn('TAR_ABORT', error, { recoverable: false }); + } + write(chunk, encoding, cb) { + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, + /* c8 ignore next */ + typeof encoding === 'string' ? encoding : 'utf8'); + } + if (this[ABORTED]) { + /* c8 ignore next */ + cb?.(); + return false; + } + // first write, might be gzipped + const needSniff = this[UNZIP] === undefined || + (this.brotli === undefined && this[UNZIP] === false); + if (needSniff && chunk) { + if (this[BUFFER]) { + chunk = Buffer.concat([this[BUFFER], chunk]); + this[BUFFER] = undefined; + } + if (chunk.length < gzipHeader.length) { + this[BUFFER] = chunk; + /* c8 ignore next */ + cb?.(); + return true; + } + // look for gzip header + for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) { + if (chunk[i] !== gzipHeader[i]) { + this[UNZIP] = false; + } + } + const maybeBrotli = this.brotli === undefined; + if (this[UNZIP] === false && maybeBrotli) { + // read the first header to see if it's a valid tar file. If so, + // we can safely assume that it's not actually brotli, despite the + // .tbr or .tar.br file extension. + // if we ended before getting a full chunk, yes, def brotli + if (chunk.length < 512) { + if (this[ENDED]) { + this.brotli = true; + } + else { + this[BUFFER] = chunk; + /* c8 ignore next */ + cb?.(); + return true; + } + } + else { + // if it's tar, it's pretty reliably not brotli, chances of + // that happening are astronomical. + try { + new header_js_1.Header(chunk.subarray(0, 512)); + this.brotli = false; + } + catch (_) { + this.brotli = true; + } + } + } + if (this[UNZIP] === undefined || + (this[UNZIP] === false && this.brotli)) { + const ended = this[ENDED]; + this[ENDED] = false; + this[UNZIP] = + this[UNZIP] === undefined ? + new minizlib_1.Unzip({}) + : new minizlib_1.BrotliDecompress({}); + this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk)); + this[UNZIP].on('error', er => this.abort(er)); + this[UNZIP].on('end', () => { + this[ENDED] = true; + this[CONSUMECHUNK](); + }); + this[WRITING] = true; + const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk); + this[WRITING] = false; + cb?.(); + return ret; + } + } + this[WRITING] = true; + if (this[UNZIP]) { + this[UNZIP].write(chunk); + } + else { + this[CONSUMECHUNK](chunk); + } + this[WRITING] = false; + // return false if there's a queue, or if the current entry isn't flowing + const ret = this[QUEUE].length ? false + : this[READENTRY] ? this[READENTRY].flowing + : true; + // if we have no queue, then that means a clogged READENTRY + if (!ret && !this[QUEUE].length) { + this[READENTRY]?.once('drain', () => this.emit('drain')); + } + /* c8 ignore next */ + cb?.(); + return ret; + } + [BUFFERCONCAT](c) { + if (c && !this[ABORTED]) { + this[BUFFER] = + this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c; + } + } + [MAYBEEND]() { + if (this[ENDED] && + !this[EMITTEDEND] && + !this[ABORTED] && + !this[CONSUMING]) { + this[EMITTEDEND] = true; + const entry = this[WRITEENTRY]; + if (entry && entry.blockRemain) { + // truncated, likely a damaged file + const have = this[BUFFER] ? this[BUFFER].length : 0; + this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry }); + if (this[BUFFER]) { + entry.write(this[BUFFER]); + } + entry.end(); + } + this[EMIT](DONE); + } + } + [CONSUMECHUNK](chunk) { + if (this[CONSUMING] && chunk) { + this[BUFFERCONCAT](chunk); + } + else if (!chunk && !this[BUFFER]) { + this[MAYBEEND](); + } + else if (chunk) { + this[CONSUMING] = true; + if (this[BUFFER]) { + this[BUFFERCONCAT](chunk); + const c = this[BUFFER]; + this[BUFFER] = undefined; + this[CONSUMECHUNKSUB](c); + } + else { + this[CONSUMECHUNKSUB](chunk); + } + while (this[BUFFER] && + this[BUFFER]?.length >= 512 && + !this[ABORTED] && + !this[SAW_EOF]) { + const c = this[BUFFER]; + this[BUFFER] = undefined; + this[CONSUMECHUNKSUB](c); + } + this[CONSUMING] = false; + } + if (!this[BUFFER] || this[ENDED]) { + this[MAYBEEND](); + } + } + [CONSUMECHUNKSUB](chunk) { + // we know that we are in CONSUMING mode, so anything written goes into + // the buffer. Advance the position and put any remainder in the buffer. + let position = 0; + const length = chunk.length; + while (position + 512 <= length && + !this[ABORTED] && + !this[SAW_EOF]) { + switch (this[STATE]) { + case 'begin': + case 'header': + this[CONSUMEHEADER](chunk, position); + position += 512; + break; + case 'ignore': + case 'body': + position += this[CONSUMEBODY](chunk, position); + break; + case 'meta': + position += this[CONSUMEMETA](chunk, position); + break; + /* c8 ignore start */ + default: + throw new Error('invalid state: ' + this[STATE]); + /* c8 ignore stop */ + } + } + if (position < length) { + if (this[BUFFER]) { + this[BUFFER] = Buffer.concat([ + chunk.subarray(position), + this[BUFFER], + ]); + } + else { + this[BUFFER] = chunk.subarray(position); + } + } + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + if (cb) + this.once('finish', cb); + if (!this[ABORTED]) { + if (this[UNZIP]) { + /* c8 ignore start */ + if (chunk) + this[UNZIP].write(chunk); + /* c8 ignore stop */ + this[UNZIP].end(); + } + else { + this[ENDED] = true; + if (this.brotli === undefined) + chunk = chunk || Buffer.alloc(0); + if (chunk) + this.write(chunk); + this[MAYBEEND](); + } + } + return this; + } +} +exports.Parser = Parser; +//# sourceMappingURL=parse.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/path-reservations.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/path-reservations.js new file mode 100644 index 00000000000000..9ff391c44092c7 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/path-reservations.js @@ -0,0 +1,170 @@ +"use strict"; +// A path exclusive reservation system +// reserve([list, of, paths], fn) +// When the fn is first in line for all its paths, it +// is called with a cb that clears the reservation. +// +// Used by async unpack to avoid clobbering paths in use, +// while still allowing maximal safe parallelization. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.PathReservations = void 0; +const node_path_1 = require("node:path"); +const normalize_unicode_js_1 = require("./normalize-unicode.js"); +const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js"); +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform; +const isWindows = platform === 'win32'; +// return a set of parent dirs for a given path +// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d'] +const getDirs = (path) => { + const dirs = path + .split('/') + .slice(0, -1) + .reduce((set, path) => { + const s = set[set.length - 1]; + if (s !== undefined) { + path = (0, node_path_1.join)(s, path); + } + set.push(path || '/'); + return set; + }, []); + return dirs; +}; +class PathReservations { + // path => [function or Set] + // A Set object means a directory reservation + // A fn is a direct reservation on that path + #queues = new Map(); + // fn => {paths:[path,...], dirs:[path, ...]} + #reservations = new Map(); + // functions currently running + #running = new Set(); + reserve(paths, fn) { + paths = + isWindows ? + ['win32 parallelization disabled'] + : paths.map(p => { + // don't need normPath, because we skip this entirely for windows + return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, node_path_1.join)((0, normalize_unicode_js_1.normalizeUnicode)(p))).toLowerCase(); + }); + const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))); + this.#reservations.set(fn, { dirs, paths }); + for (const p of paths) { + const q = this.#queues.get(p); + if (!q) { + this.#queues.set(p, [fn]); + } + else { + q.push(fn); + } + } + for (const dir of dirs) { + const q = this.#queues.get(dir); + if (!q) { + this.#queues.set(dir, [new Set([fn])]); + } + else { + const l = q[q.length - 1]; + if (l instanceof Set) { + l.add(fn); + } + else { + q.push(new Set([fn])); + } + } + } + return this.#run(fn); + } + // return the queues for each path the function cares about + // fn => {paths, dirs} + #getQueues(fn) { + const res = this.#reservations.get(fn); + /* c8 ignore start */ + if (!res) { + throw new Error('function does not have any path reservations'); + } + /* c8 ignore stop */ + return { + paths: res.paths.map((path) => this.#queues.get(path)), + dirs: [...res.dirs].map(path => this.#queues.get(path)), + }; + } + // check if fn is first in line for all its paths, and is + // included in the first set for all its dir queues + check(fn) { + const { paths, dirs } = this.#getQueues(fn); + return (paths.every(q => q && q[0] === fn) && + dirs.every(q => q && q[0] instanceof Set && q[0].has(fn))); + } + // run the function if it's first in line and not already running + #run(fn) { + if (this.#running.has(fn) || !this.check(fn)) { + return false; + } + this.#running.add(fn); + fn(() => this.#clear(fn)); + return true; + } + #clear(fn) { + if (!this.#running.has(fn)) { + return false; + } + const res = this.#reservations.get(fn); + /* c8 ignore start */ + if (!res) { + throw new Error('invalid reservation'); + } + /* c8 ignore stop */ + const { paths, dirs } = res; + const next = new Set(); + for (const path of paths) { + const q = this.#queues.get(path); + /* c8 ignore start */ + if (!q || q?.[0] !== fn) { + continue; + } + /* c8 ignore stop */ + const q0 = q[1]; + if (!q0) { + this.#queues.delete(path); + continue; + } + q.shift(); + if (typeof q0 === 'function') { + next.add(q0); + } + else { + for (const f of q0) { + next.add(f); + } + } + } + for (const dir of dirs) { + const q = this.#queues.get(dir); + const q0 = q?.[0]; + /* c8 ignore next - type safety only */ + if (!q || !(q0 instanceof Set)) + continue; + if (q0.size === 1 && q.length === 1) { + this.#queues.delete(dir); + continue; + } + else if (q0.size === 1) { + q.shift(); + // next one must be a function, + // or else the Set would've been reused + const n = q[0]; + if (typeof n === 'function') { + next.add(n); + } + } + else { + q0.delete(fn); + } + } + this.#running.delete(fn); + next.forEach(fn => this.#run(fn)); + return true; + } +} +exports.PathReservations = PathReservations; +//# sourceMappingURL=path-reservations.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pax.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pax.js new file mode 100644 index 00000000000000..d30c0f3efbe9ea --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/pax.js @@ -0,0 +1,158 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Pax = void 0; +const node_path_1 = require("node:path"); +const header_js_1 = require("./header.js"); +class Pax { + atime; + mtime; + ctime; + charset; + comment; + gid; + uid; + gname; + uname; + linkpath; + dev; + ino; + nlink; + path; + size; + mode; + global; + constructor(obj, global = false) { + this.atime = obj.atime; + this.charset = obj.charset; + this.comment = obj.comment; + this.ctime = obj.ctime; + this.dev = obj.dev; + this.gid = obj.gid; + this.global = global; + this.gname = obj.gname; + this.ino = obj.ino; + this.linkpath = obj.linkpath; + this.mtime = obj.mtime; + this.nlink = obj.nlink; + this.path = obj.path; + this.size = obj.size; + this.uid = obj.uid; + this.uname = obj.uname; + } + encode() { + const body = this.encodeBody(); + if (body === '') { + return Buffer.allocUnsafe(0); + } + const bodyLen = Buffer.byteLength(body); + // round up to 512 bytes + // add 512 for header + const bufLen = 512 * Math.ceil(1 + bodyLen / 512); + const buf = Buffer.allocUnsafe(bufLen); + // 0-fill the header section, it might not hit every field + for (let i = 0; i < 512; i++) { + buf[i] = 0; + } + new header_js_1.Header({ + // XXX split the path + // then the path should be PaxHeader + basename, but less than 99, + // prepend with the dirname + /* c8 ignore start */ + path: ('PaxHeader/' + (0, node_path_1.basename)(this.path ?? '')).slice(0, 99), + /* c8 ignore stop */ + mode: this.mode || 0o644, + uid: this.uid, + gid: this.gid, + size: bodyLen, + mtime: this.mtime, + type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader', + linkpath: '', + uname: this.uname || '', + gname: this.gname || '', + devmaj: 0, + devmin: 0, + atime: this.atime, + ctime: this.ctime, + }).encode(buf); + buf.write(body, 512, bodyLen, 'utf8'); + // null pad after the body + for (let i = bodyLen + 512; i < buf.length; i++) { + buf[i] = 0; + } + return buf; + } + encodeBody() { + return (this.encodeField('path') + + this.encodeField('ctime') + + this.encodeField('atime') + + this.encodeField('dev') + + this.encodeField('ino') + + this.encodeField('nlink') + + this.encodeField('charset') + + this.encodeField('comment') + + this.encodeField('gid') + + this.encodeField('gname') + + this.encodeField('linkpath') + + this.encodeField('mtime') + + this.encodeField('size') + + this.encodeField('uid') + + this.encodeField('uname')); + } + encodeField(field) { + if (this[field] === undefined) { + return ''; + } + const r = this[field]; + const v = r instanceof Date ? r.getTime() / 1000 : r; + const s = ' ' + + (field === 'dev' || field === 'ino' || field === 'nlink' ? + 'SCHILY.' + : '') + + field + + '=' + + v + + '\n'; + const byteLen = Buffer.byteLength(s); + // the digits includes the length of the digits in ascii base-10 + // so if it's 9 characters, then adding 1 for the 9 makes it 10 + // which makes it 11 chars. + let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1; + if (byteLen + digits >= Math.pow(10, digits)) { + digits += 1; + } + const len = digits + byteLen; + return len + s; + } + static parse(str, ex, g = false) { + return new Pax(merge(parseKV(str), ex), g); + } +} +exports.Pax = Pax; +const merge = (a, b) => b ? Object.assign({}, b, a) : a; +const parseKV = (str) => str + .replace(/\n$/, '') + .split('\n') + .reduce(parseKVLine, Object.create(null)); +const parseKVLine = (set, line) => { + const n = parseInt(line, 10); + // XXX Values with \n in them will fail this. + // Refactor to not be a naive line-by-line parse. + if (n !== Buffer.byteLength(line) + 1) { + return set; + } + line = line.slice((n + ' ').length); + const kv = line.split('='); + const r = kv.shift(); + if (!r) { + return set; + } + const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1'); + const v = kv.join('='); + set[k] = + /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ? + new Date(Number(v) * 1000) + : /^[0-9]+$/.test(v) ? +v + : v; + return set; +}; +//# sourceMappingURL=pax.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/read-entry.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/read-entry.js new file mode 100644 index 00000000000000..15e2d55c938a43 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/read-entry.js @@ -0,0 +1,140 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ReadEntry = void 0; +const minipass_1 = require("minipass"); +const normalize_windows_path_js_1 = require("./normalize-windows-path.js"); +class ReadEntry extends minipass_1.Minipass { + extended; + globalExtended; + header; + startBlockSize; + blockRemain; + remain; + type; + meta = false; + ignore = false; + path; + mode; + uid; + gid; + uname; + gname; + size = 0; + mtime; + atime; + ctime; + linkpath; + dev; + ino; + nlink; + invalid = false; + absolute; + unsupported = false; + constructor(header, ex, gex) { + super({}); + // read entries always start life paused. this is to avoid the + // situation where Minipass's auto-ending empty streams results + // in an entry ending before we're ready for it. + this.pause(); + this.extended = ex; + this.globalExtended = gex; + this.header = header; + /* c8 ignore start */ + this.remain = header.size ?? 0; + /* c8 ignore stop */ + this.startBlockSize = 512 * Math.ceil(this.remain / 512); + this.blockRemain = this.startBlockSize; + this.type = header.type; + switch (this.type) { + case 'File': + case 'OldFile': + case 'Link': + case 'SymbolicLink': + case 'CharacterDevice': + case 'BlockDevice': + case 'Directory': + case 'FIFO': + case 'ContiguousFile': + case 'GNUDumpDir': + break; + case 'NextFileHasLongLinkpath': + case 'NextFileHasLongPath': + case 'OldGnuLongPath': + case 'GlobalExtendedHeader': + case 'ExtendedHeader': + case 'OldExtendedHeader': + this.meta = true; + break; + // NOTE: gnutar and bsdtar treat unrecognized types as 'File' + // it may be worth doing the same, but with a warning. + default: + this.ignore = true; + } + /* c8 ignore start */ + if (!header.path) { + throw new Error('no path provided for tar.ReadEntry'); + } + /* c8 ignore stop */ + this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.path); + this.mode = header.mode; + if (this.mode) { + this.mode = this.mode & 0o7777; + } + this.uid = header.uid; + this.gid = header.gid; + this.uname = header.uname; + this.gname = header.gname; + this.size = this.remain; + this.mtime = header.mtime; + this.atime = header.atime; + this.ctime = header.ctime; + /* c8 ignore start */ + this.linkpath = + header.linkpath ? + (0, normalize_windows_path_js_1.normalizeWindowsPath)(header.linkpath) + : undefined; + /* c8 ignore stop */ + this.uname = header.uname; + this.gname = header.gname; + if (ex) { + this.#slurp(ex); + } + if (gex) { + this.#slurp(gex, true); + } + } + write(data) { + const writeLen = data.length; + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate'); + } + const r = this.remain; + const br = this.blockRemain; + this.remain = Math.max(0, r - writeLen); + this.blockRemain = Math.max(0, br - writeLen); + if (this.ignore) { + return true; + } + if (r >= writeLen) { + return super.write(data); + } + // r < writeLen + return super.write(data.subarray(0, r)); + } + #slurp(ex, gex = false) { + if (ex.path) + ex.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.path); + if (ex.linkpath) + ex.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(ex.linkpath); + Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. Also, any + // null/undefined values are ignored. + return !(v === null || + v === undefined || + (k === 'path' && gex)); + }))); + } +} +exports.ReadEntry = ReadEntry; +//# sourceMappingURL=read-entry.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/replace.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/replace.js new file mode 100644 index 00000000000000..22eff246d4d75f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/replace.js @@ -0,0 +1,231 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.replace = void 0; +// tar -r +const fs_minipass_1 = require("@isaacs/fs-minipass"); +const node_fs_1 = __importDefault(require("node:fs")); +const node_path_1 = __importDefault(require("node:path")); +const header_js_1 = require("./header.js"); +const list_js_1 = require("./list.js"); +const make_command_js_1 = require("./make-command.js"); +const options_js_1 = require("./options.js"); +const pack_js_1 = require("./pack.js"); +// starting at the head of the file, read a Header +// If the checksum is invalid, that's our position to start writing +// If it is, jump forward by the specified size (round up to 512) +// and try again. +// Write the new Pack stream starting there. +const replaceSync = (opt, files) => { + const p = new pack_js_1.PackSync(opt); + let threw = true; + let fd; + let position; + try { + try { + fd = node_fs_1.default.openSync(opt.file, 'r+'); + } + catch (er) { + if (er?.code === 'ENOENT') { + fd = node_fs_1.default.openSync(opt.file, 'w+'); + } + else { + throw er; + } + } + const st = node_fs_1.default.fstatSync(fd); + const headBuf = Buffer.alloc(512); + POSITION: for (position = 0; position < st.size; position += 512) { + for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) { + bytes = node_fs_1.default.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos); + if (position === 0 && + headBuf[0] === 0x1f && + headBuf[1] === 0x8b) { + throw new Error('cannot append to compressed archives'); + } + if (!bytes) { + break POSITION; + } + } + const h = new header_js_1.Header(headBuf); + if (!h.cksumValid) { + break; + } + const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512); + if (position + entryBlockSize + 512 > st.size) { + break; + } + // the 512 for the header we just parsed will be added as well + // also jump ahead all the blocks for the body + position += entryBlockSize; + if (opt.mtimeCache && h.mtime) { + opt.mtimeCache.set(String(h.path), h.mtime); + } + } + threw = false; + streamSync(opt, p, position, fd, files); + } + finally { + if (threw) { + try { + node_fs_1.default.closeSync(fd); + } + catch (er) { } + } + } +}; +const streamSync = (opt, p, position, fd, files) => { + const stream = new fs_minipass_1.WriteStreamSync(opt.file, { + fd: fd, + start: position, + }); + p.pipe(stream); + addFilesSync(p, files); +}; +const replaceAsync = (opt, files) => { + files = Array.from(files); + const p = new pack_js_1.Pack(opt); + const getPos = (fd, size, cb_) => { + const cb = (er, pos) => { + if (er) { + node_fs_1.default.close(fd, _ => cb_(er)); + } + else { + cb_(null, pos); + } + }; + let position = 0; + if (size === 0) { + return cb(null, 0); + } + let bufPos = 0; + const headBuf = Buffer.alloc(512); + const onread = (er, bytes) => { + if (er || typeof bytes === 'undefined') { + return cb(er); + } + bufPos += bytes; + if (bufPos < 512 && bytes) { + return node_fs_1.default.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread); + } + if (position === 0 && + headBuf[0] === 0x1f && + headBuf[1] === 0x8b) { + return cb(new Error('cannot append to compressed archives')); + } + // truncated header + if (bufPos < 512) { + return cb(null, position); + } + const h = new header_js_1.Header(headBuf); + if (!h.cksumValid) { + return cb(null, position); + } + /* c8 ignore next */ + const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512); + if (position + entryBlockSize + 512 > size) { + return cb(null, position); + } + position += entryBlockSize + 512; + if (position >= size) { + return cb(null, position); + } + if (opt.mtimeCache && h.mtime) { + opt.mtimeCache.set(String(h.path), h.mtime); + } + bufPos = 0; + node_fs_1.default.read(fd, headBuf, 0, 512, position, onread); + }; + node_fs_1.default.read(fd, headBuf, 0, 512, position, onread); + }; + const promise = new Promise((resolve, reject) => { + p.on('error', reject); + let flag = 'r+'; + const onopen = (er, fd) => { + if (er && er.code === 'ENOENT' && flag === 'r+') { + flag = 'w+'; + return node_fs_1.default.open(opt.file, flag, onopen); + } + if (er || !fd) { + return reject(er); + } + node_fs_1.default.fstat(fd, (er, st) => { + if (er) { + return node_fs_1.default.close(fd, () => reject(er)); + } + getPos(fd, st.size, (er, position) => { + if (er) { + return reject(er); + } + const stream = new fs_minipass_1.WriteStream(opt.file, { + fd: fd, + start: position, + }); + p.pipe(stream); + stream.on('error', reject); + stream.on('close', resolve); + addFilesAsync(p, files); + }); + }); + }; + node_fs_1.default.open(opt.file, flag, onopen); + }); + return promise; +}; +const addFilesSync = (p, files) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + (0, list_js_1.list)({ + file: node_path_1.default.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onReadEntry: entry => p.add(entry), + }); + } + else { + p.add(file); + } + }); + p.end(); +}; +const addFilesAsync = async (p, files) => { + for (let i = 0; i < files.length; i++) { + const file = String(files[i]); + if (file.charAt(0) === '@') { + await (0, list_js_1.list)({ + file: node_path_1.default.resolve(String(p.cwd), file.slice(1)), + noResume: true, + onReadEntry: entry => p.add(entry), + }); + } + else { + p.add(file); + } + } + p.end(); +}; +exports.replace = (0, make_command_js_1.makeCommand)(replaceSync, replaceAsync, +/* c8 ignore start */ +() => { + throw new TypeError('file is required'); +}, () => { + throw new TypeError('file is required'); +}, +/* c8 ignore stop */ +(opt, entries) => { + if (!(0, options_js_1.isFile)(opt)) { + throw new TypeError('file is required'); + } + if (opt.gzip || + opt.brotli || + opt.file.endsWith('.br') || + opt.file.endsWith('.tbr')) { + throw new TypeError('cannot append to compressed archives'); + } + if (!entries?.length) { + throw new TypeError('no paths specified to add/replace'); + } +}); +//# sourceMappingURL=replace.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-absolute-path.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-absolute-path.js new file mode 100644 index 00000000000000..bb7639c35a1104 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-absolute-path.js @@ -0,0 +1,29 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.stripAbsolutePath = void 0; +// unix absolute paths are also absolute on win32, so we use this for both +const node_path_1 = require("node:path"); +const { isAbsolute, parse } = node_path_1.win32; +// returns [root, stripped] +// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in +// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip / +// explicitly if it's the first character. +// drive-specific relative paths on Windows get their root stripped off even +// though they are not absolute, so `c:../foo` becomes ['c:', '../foo'] +const stripAbsolutePath = (path) => { + let r = ''; + let parsed = parse(path); + while (isAbsolute(path) || parsed.root) { + // windows will think that //x/y/z has a "root" of //x/y/ + // but strip the //?/C:/ off of //?/C:/path + const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? + '/' + : parsed.root; + path = path.slice(root.length); + r += root; + parsed = parse(path); + } + return [r, path]; +}; +exports.stripAbsolutePath = stripAbsolutePath; +//# sourceMappingURL=strip-absolute-path.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-trailing-slashes.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-trailing-slashes.js new file mode 100644 index 00000000000000..6fa74ad6a4ac93 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/strip-trailing-slashes.js @@ -0,0 +1,18 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.stripTrailingSlashes = void 0; +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +const stripTrailingSlashes = (str) => { + let i = str.length - 1; + let slashesStart = -1; + while (i > -1 && str.charAt(i) === '/') { + slashesStart = i; + i--; + } + return slashesStart === -1 ? str : str.slice(0, slashesStart); +}; +exports.stripTrailingSlashes = stripTrailingSlashes; +//# sourceMappingURL=strip-trailing-slashes.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/symlink-error.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/symlink-error.js new file mode 100644 index 00000000000000..cc19ac1a2e3c6b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/symlink-error.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.SymlinkError = void 0; +class SymlinkError extends Error { + path; + symlink; + syscall = 'symlink'; + code = 'TAR_SYMLINK_ERROR'; + constructor(symlink, path) { + super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link'); + this.symlink = symlink; + this.path = path; + } + get name() { + return 'SymlinkError'; + } +} +exports.SymlinkError = SymlinkError; +//# sourceMappingURL=symlink-error.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/types.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/types.js new file mode 100644 index 00000000000000..cb9b684e843b72 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/types.js @@ -0,0 +1,50 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.code = exports.name = exports.isName = exports.isCode = void 0; +const isCode = (c) => exports.name.has(c); +exports.isCode = isCode; +const isName = (c) => exports.code.has(c); +exports.isName = isName; +// map types from key to human-friendly name +exports.name = new Map([ + ['0', 'File'], + // same as File + ['', 'OldFile'], + ['1', 'Link'], + ['2', 'SymbolicLink'], + // Devices and FIFOs aren't fully supported + // they are parsed, but skipped when unpacking + ['3', 'CharacterDevice'], + ['4', 'BlockDevice'], + ['5', 'Directory'], + ['6', 'FIFO'], + // same as File + ['7', 'ContiguousFile'], + // pax headers + ['g', 'GlobalExtendedHeader'], + ['x', 'ExtendedHeader'], + // vendor-specific stuff + // skip + ['A', 'SolarisACL'], + // like 5, but with data, which should be skipped + ['D', 'GNUDumpDir'], + // metadata only, skip + ['I', 'Inode'], + // data = link path of next file + ['K', 'NextFileHasLongLinkpath'], + // data = path of next file + ['L', 'NextFileHasLongPath'], + // skip + ['M', 'ContinuationFile'], + // like L + ['N', 'OldGnuLongPath'], + // skip + ['S', 'SparseFile'], + // skip + ['V', 'TapeVolumeHeader'], + // like x + ['X', 'OldExtendedHeader'], +]); +// map the other direction +exports.code = new Map(Array.from(exports.name).map(kv => [kv[1], kv[0]])); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/unpack.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/unpack.js new file mode 100644 index 00000000000000..edf8acbb18c408 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/unpack.js @@ -0,0 +1,919 @@ +"use strict"; +// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet. +// but the path reservations are required to avoid race conditions where +// parallelized unpack ops may mess with one another, due to dependencies +// (like a Link depending on its target) or destructive operations (like +// clobbering an fs object to create one of a different type.) +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.UnpackSync = exports.Unpack = void 0; +const fsm = __importStar(require("@isaacs/fs-minipass")); +const node_assert_1 = __importDefault(require("node:assert")); +const node_crypto_1 = require("node:crypto"); +const node_fs_1 = __importDefault(require("node:fs")); +const node_path_1 = __importDefault(require("node:path")); +const get_write_flag_js_1 = require("./get-write-flag.js"); +const mkdir_js_1 = require("./mkdir.js"); +const normalize_unicode_js_1 = require("./normalize-unicode.js"); +const normalize_windows_path_js_1 = require("./normalize-windows-path.js"); +const parse_js_1 = require("./parse.js"); +const strip_absolute_path_js_1 = require("./strip-absolute-path.js"); +const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js"); +const wc = __importStar(require("./winchars.js")); +const path_reservations_js_1 = require("./path-reservations.js"); +const ONENTRY = Symbol('onEntry'); +const CHECKFS = Symbol('checkFs'); +const CHECKFS2 = Symbol('checkFs2'); +const PRUNECACHE = Symbol('pruneCache'); +const ISREUSABLE = Symbol('isReusable'); +const MAKEFS = Symbol('makeFs'); +const FILE = Symbol('file'); +const DIRECTORY = Symbol('directory'); +const LINK = Symbol('link'); +const SYMLINK = Symbol('symlink'); +const HARDLINK = Symbol('hardlink'); +const UNSUPPORTED = Symbol('unsupported'); +const CHECKPATH = Symbol('checkPath'); +const MKDIR = Symbol('mkdir'); +const ONERROR = Symbol('onError'); +const PENDING = Symbol('pending'); +const PEND = Symbol('pend'); +const UNPEND = Symbol('unpend'); +const ENDED = Symbol('ended'); +const MAYBECLOSE = Symbol('maybeClose'); +const SKIP = Symbol('skip'); +const DOCHOWN = Symbol('doChown'); +const UID = Symbol('uid'); +const GID = Symbol('gid'); +const CHECKED_CWD = Symbol('checkedCwd'); +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform; +const isWindows = platform === 'win32'; +const DEFAULT_MAX_DEPTH = 1024; +// Unlinks on Windows are not atomic. +// +// This means that if you have a file entry, followed by another +// file entry with an identical name, and you cannot re-use the file +// (because it's a hardlink, or because unlink:true is set, or it's +// Windows, which does not have useful nlink values), then the unlink +// will be committed to the disk AFTER the new file has been written +// over the old one, deleting the new file. +// +// To work around this, on Windows systems, we rename the file and then +// delete the renamed file. It's a sloppy kludge, but frankly, I do not +// know of a better way to do this, given windows' non-atomic unlink +// semantics. +// +// See: https://github.com/npm/node-tar/issues/183 +/* c8 ignore start */ +const unlinkFile = (path, cb) => { + if (!isWindows) { + return node_fs_1.default.unlink(path, cb); + } + const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex'); + node_fs_1.default.rename(path, name, er => { + if (er) { + return cb(er); + } + node_fs_1.default.unlink(name, cb); + }); +}; +/* c8 ignore stop */ +/* c8 ignore start */ +const unlinkFileSync = (path) => { + if (!isWindows) { + return node_fs_1.default.unlinkSync(path); + } + const name = path + '.DELETE.' + (0, node_crypto_1.randomBytes)(16).toString('hex'); + node_fs_1.default.renameSync(path, name); + node_fs_1.default.unlinkSync(name); +}; +/* c8 ignore stop */ +// this.gid, entry.gid, this.processUid +const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a + : b !== undefined && b === b >>> 0 ? b + : c; +// clear the cache if it's a case-insensitive unicode-squashing match. +// we can't know if the current file system is case-sensitive or supports +// unicode fully, so we check for similarity on the maximally compatible +// representation. Err on the side of pruning, since all it's doing is +// preventing lstats, and it's not the end of the world if we get a false +// positive. +// Note that on windows, we always drop the entire cache whenever a +// symbolic link is encountered, because 8.3 filenames are impossible +// to reason about, and collisions are hazards rather than just failures. +const cacheKeyNormalize = (path) => (0, strip_trailing_slashes_js_1.stripTrailingSlashes)((0, normalize_windows_path_js_1.normalizeWindowsPath)((0, normalize_unicode_js_1.normalizeUnicode)(path))).toLowerCase(); +// remove all cache entries matching ${abs}/** +const pruneCache = (cache, abs) => { + abs = cacheKeyNormalize(abs); + for (const path of cache.keys()) { + const pnorm = cacheKeyNormalize(path); + if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) { + cache.delete(path); + } + } +}; +const dropCache = (cache) => { + for (const key of cache.keys()) { + cache.delete(key); + } +}; +class Unpack extends parse_js_1.Parser { + [ENDED] = false; + [CHECKED_CWD] = false; + [PENDING] = 0; + reservations = new path_reservations_js_1.PathReservations(); + transform; + writable = true; + readable = false; + dirCache; + uid; + gid; + setOwner; + preserveOwner; + processGid; + processUid; + maxDepth; + forceChown; + win32; + newer; + keep; + noMtime; + preservePaths; + unlink; + cwd; + strip; + processUmask; + umask; + dmode; + fmode; + chmod; + constructor(opt = {}) { + opt.ondone = () => { + this[ENDED] = true; + this[MAYBECLOSE](); + }; + super(opt); + this.transform = opt.transform; + this.dirCache = opt.dirCache || new Map(); + this.chmod = !!opt.chmod; + if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { + // need both or neither + if (typeof opt.uid !== 'number' || + typeof opt.gid !== 'number') { + throw new TypeError('cannot set owner without number uid and gid'); + } + if (opt.preserveOwner) { + throw new TypeError('cannot preserve owner in archive and also set owner explicitly'); + } + this.uid = opt.uid; + this.gid = opt.gid; + this.setOwner = true; + } + else { + this.uid = undefined; + this.gid = undefined; + this.setOwner = false; + } + // default true for root + if (opt.preserveOwner === undefined && + typeof opt.uid !== 'number') { + this.preserveOwner = !!(process.getuid && process.getuid() === 0); + } + else { + this.preserveOwner = !!opt.preserveOwner; + } + this.processUid = + (this.preserveOwner || this.setOwner) && process.getuid ? + process.getuid() + : undefined; + this.processGid = + (this.preserveOwner || this.setOwner) && process.getgid ? + process.getgid() + : undefined; + // prevent excessively deep nesting of subfolders + // set to `Infinity` to remove this restriction + this.maxDepth = + typeof opt.maxDepth === 'number' ? + opt.maxDepth + : DEFAULT_MAX_DEPTH; + // mostly just for testing, but useful in some cases. + // Forcibly trigger a chown on every entry, no matter what + this.forceChown = opt.forceChown === true; + // turn > this[ONENTRY](entry)); + } + // a bad or damaged archive is a warning for Parser, but an error + // when extracting. Mark those errors as unrecoverable, because + // the Unpack contract cannot be met. + warn(code, msg, data = {}) { + if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') { + data.recoverable = false; + } + return super.warn(code, msg, data); + } + [MAYBECLOSE]() { + if (this[ENDED] && this[PENDING] === 0) { + this.emit('prefinish'); + this.emit('finish'); + this.emit('end'); + } + } + [CHECKPATH](entry) { + const p = (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path); + const parts = p.split('/'); + if (this.strip) { + if (parts.length < this.strip) { + return false; + } + if (entry.type === 'Link') { + const linkparts = (0, normalize_windows_path_js_1.normalizeWindowsPath)(String(entry.linkpath)).split('/'); + if (linkparts.length >= this.strip) { + entry.linkpath = linkparts.slice(this.strip).join('/'); + } + else { + return false; + } + } + parts.splice(0, this.strip); + entry.path = parts.join('/'); + } + if (isFinite(this.maxDepth) && parts.length > this.maxDepth) { + this.warn('TAR_ENTRY_ERROR', 'path excessively deep', { + entry, + path: p, + depth: parts.length, + maxDepth: this.maxDepth, + }); + return false; + } + if (!this.preservePaths) { + if (parts.includes('..') || + /* c8 ignore next */ + (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) { + this.warn('TAR_ENTRY_ERROR', `path contains '..'`, { + entry, + path: p, + }); + return false; + } + // strip off the root + const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(p); + if (root) { + entry.path = String(stripped); + this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, { + entry, + path: p, + }); + } + } + if (node_path_1.default.isAbsolute(entry.path)) { + entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(entry.path)); + } + else { + entry.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, entry.path)); + } + // if we somehow ended up with a path that escapes the cwd, and we are + // not in preservePaths mode, then something is fishy! This should have + // been prevented above, so ignore this for coverage. + /* c8 ignore start - defense in depth */ + if (!this.preservePaths && + typeof entry.absolute === 'string' && + entry.absolute.indexOf(this.cwd + '/') !== 0 && + entry.absolute !== this.cwd) { + this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', { + entry, + path: (0, normalize_windows_path_js_1.normalizeWindowsPath)(entry.path), + resolvedPath: entry.absolute, + cwd: this.cwd, + }); + return false; + } + /* c8 ignore stop */ + // an archive can set properties on the extraction directory, but it + // may not replace the cwd with a different kind of thing entirely. + if (entry.absolute === this.cwd && + entry.type !== 'Directory' && + entry.type !== 'GNUDumpDir') { + return false; + } + // only encode : chars that aren't drive letter indicators + if (this.win32) { + const { root: aRoot } = node_path_1.default.win32.parse(String(entry.absolute)); + entry.absolute = + aRoot + wc.encode(String(entry.absolute).slice(aRoot.length)); + const { root: pRoot } = node_path_1.default.win32.parse(entry.path); + entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length)); + } + return true; + } + [ONENTRY](entry) { + if (!this[CHECKPATH](entry)) { + return entry.resume(); + } + node_assert_1.default.equal(typeof entry.absolute, 'string'); + switch (entry.type) { + case 'Directory': + case 'GNUDumpDir': + if (entry.mode) { + entry.mode = entry.mode | 0o700; + } + // eslint-disable-next-line no-fallthrough + case 'File': + case 'OldFile': + case 'ContiguousFile': + case 'Link': + case 'SymbolicLink': + return this[CHECKFS](entry); + case 'CharacterDevice': + case 'BlockDevice': + case 'FIFO': + default: + return this[UNSUPPORTED](entry); + } + } + [ONERROR](er, entry) { + // Cwd has to exist, or else nothing works. That's serious. + // Other errors are warnings, which raise the error in strict + // mode, but otherwise continue on. + if (er.name === 'CwdError') { + this.emit('error', er); + } + else { + this.warn('TAR_ENTRY_ERROR', er, { entry }); + this[UNPEND](); + entry.resume(); + } + } + [MKDIR](dir, mode, cb) { + (0, mkdir_js_1.mkdir)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + }, cb); + } + [DOCHOWN](entry) { + // in preserve owner mode, chown if the entry doesn't match process + // in set owner mode, chown if setting doesn't match process + return (this.forceChown || + (this.preserveOwner && + ((typeof entry.uid === 'number' && + entry.uid !== this.processUid) || + (typeof entry.gid === 'number' && + entry.gid !== this.processGid))) || + (typeof this.uid === 'number' && + this.uid !== this.processUid) || + (typeof this.gid === 'number' && this.gid !== this.processGid)); + } + [UID](entry) { + return uint32(this.uid, entry.uid, this.processUid); + } + [GID](entry) { + return uint32(this.gid, entry.gid, this.processGid); + } + [FILE](entry, fullyDone) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.fmode; + const stream = new fsm.WriteStream(String(entry.absolute), { + // slight lie, but it can be numeric flags + flags: (0, get_write_flag_js_1.getWriteFlag)(entry.size), + mode: mode, + autoClose: false, + }); + stream.on('error', (er) => { + if (stream.fd) { + node_fs_1.default.close(stream.fd, () => { }); + } + // flush all the data out so that we aren't left hanging + // if the error wasn't actually fatal. otherwise the parse + // is blocked, and we never proceed. + stream.write = () => true; + this[ONERROR](er, entry); + fullyDone(); + }); + let actions = 1; + const done = (er) => { + if (er) { + /* c8 ignore start - we should always have a fd by now */ + if (stream.fd) { + node_fs_1.default.close(stream.fd, () => { }); + } + /* c8 ignore stop */ + this[ONERROR](er, entry); + fullyDone(); + return; + } + if (--actions === 0) { + if (stream.fd !== undefined) { + node_fs_1.default.close(stream.fd, er => { + if (er) { + this[ONERROR](er, entry); + } + else { + this[UNPEND](); + } + fullyDone(); + }); + } + } + }; + stream.on('finish', () => { + // if futimes fails, try utimes + // if utimes fails, fail with the original error + // same for fchown/chown + const abs = String(entry.absolute); + const fd = stream.fd; + if (typeof fd === 'number' && entry.mtime && !this.noMtime) { + actions++; + const atime = entry.atime || new Date(); + const mtime = entry.mtime; + node_fs_1.default.futimes(fd, atime, mtime, er => er ? + node_fs_1.default.utimes(abs, atime, mtime, er2 => done(er2 && er)) + : done()); + } + if (typeof fd === 'number' && this[DOCHOWN](entry)) { + actions++; + const uid = this[UID](entry); + const gid = this[GID](entry); + if (typeof uid === 'number' && typeof gid === 'number') { + node_fs_1.default.fchown(fd, uid, gid, er => er ? + node_fs_1.default.chown(abs, uid, gid, er2 => done(er2 && er)) + : done()); + } + } + done(); + }); + const tx = this.transform ? this.transform(entry) || entry : entry; + if (tx !== entry) { + tx.on('error', (er) => { + this[ONERROR](er, entry); + fullyDone(); + }); + entry.pipe(tx); + } + tx.pipe(stream); + } + [DIRECTORY](entry, fullyDone) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.dmode; + this[MKDIR](String(entry.absolute), mode, er => { + if (er) { + this[ONERROR](er, entry); + fullyDone(); + return; + } + let actions = 1; + const done = () => { + if (--actions === 0) { + fullyDone(); + this[UNPEND](); + entry.resume(); + } + }; + if (entry.mtime && !this.noMtime) { + actions++; + node_fs_1.default.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done); + } + if (this[DOCHOWN](entry)) { + actions++; + node_fs_1.default.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done); + } + done(); + }); + } + [UNSUPPORTED](entry) { + entry.unsupported = true; + this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry }); + entry.resume(); + } + [SYMLINK](entry, done) { + this[LINK](entry, String(entry.linkpath), 'symlink', done); + } + [HARDLINK](entry, done) { + const linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.resolve(this.cwd, String(entry.linkpath))); + this[LINK](entry, linkpath, 'link', done); + } + [PEND]() { + this[PENDING]++; + } + [UNPEND]() { + this[PENDING]--; + this[MAYBECLOSE](); + } + [SKIP](entry) { + this[UNPEND](); + entry.resume(); + } + // Check if we can reuse an existing filesystem entry safely and + // overwrite it, rather than unlinking and recreating + // Windows doesn't report a useful nlink, so we just never reuse entries + [ISREUSABLE](entry, st) { + return (entry.type === 'File' && + !this.unlink && + st.isFile() && + st.nlink <= 1 && + !isWindows); + } + // check if a thing is there, and if so, try to clobber it + [CHECKFS](entry) { + this[PEND](); + const paths = [entry.path]; + if (entry.linkpath) { + paths.push(entry.linkpath); + } + this.reservations.reserve(paths, done => this[CHECKFS2](entry, done)); + } + [PRUNECACHE](entry) { + // if we are not creating a directory, and the path is in the dirCache, + // then that means we are about to delete the directory we created + // previously, and it is no longer going to be a directory, and neither + // is any of its children. + // If a symbolic link is encountered, all bets are off. There is no + // reasonable way to sanitize the cache in such a way we will be able to + // avoid having filesystem collisions. If this happens with a non-symlink + // entry, it'll just fail to unpack, but a symlink to a directory, using an + // 8.3 shortname or certain unicode attacks, can evade detection and lead + // to arbitrary writes to anywhere on the system. + if (entry.type === 'SymbolicLink') { + dropCache(this.dirCache); + } + else if (entry.type !== 'Directory') { + pruneCache(this.dirCache, String(entry.absolute)); + } + } + [CHECKFS2](entry, fullyDone) { + this[PRUNECACHE](entry); + const done = (er) => { + this[PRUNECACHE](entry); + fullyDone(er); + }; + const checkCwd = () => { + this[MKDIR](this.cwd, this.dmode, er => { + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + this[CHECKED_CWD] = true; + start(); + }); + }; + const start = () => { + if (entry.absolute !== this.cwd) { + const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute))); + if (parent !== this.cwd) { + return this[MKDIR](parent, this.dmode, er => { + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + afterMakeParent(); + }); + } + } + afterMakeParent(); + }; + const afterMakeParent = () => { + node_fs_1.default.lstat(String(entry.absolute), (lstatEr, st) => { + if (st && + (this.keep || + /* c8 ignore next */ + (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) { + this[SKIP](entry); + done(); + return; + } + if (lstatEr || this[ISREUSABLE](entry, st)) { + return this[MAKEFS](null, entry, done); + } + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = this.chmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode; + const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done); + if (!needChmod) { + return afterChmod(); + } + return node_fs_1.default.chmod(String(entry.absolute), Number(entry.mode), afterChmod); + } + // Not a dir entry, have to remove it. + // NB: the only way to end up with an entry that is the cwd + // itself, in such a way that == does not detect, is a + // tricky windows absolute path with UNC or 8.3 parts (and + // preservePaths:true, or else it will have been stripped). + // In that case, the user has opted out of path protections + // explicitly, so if they blow away the cwd, c'est la vie. + if (entry.absolute !== this.cwd) { + return node_fs_1.default.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done)); + } + } + // not a dir, and not reusable + // don't remove if the cwd, we want that error + if (entry.absolute === this.cwd) { + return this[MAKEFS](null, entry, done); + } + unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done)); + }); + }; + if (this[CHECKED_CWD]) { + start(); + } + else { + checkCwd(); + } + } + [MAKEFS](er, entry, done) { + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + switch (entry.type) { + case 'File': + case 'OldFile': + case 'ContiguousFile': + return this[FILE](entry, done); + case 'Link': + return this[HARDLINK](entry, done); + case 'SymbolicLink': + return this[SYMLINK](entry, done); + case 'Directory': + case 'GNUDumpDir': + return this[DIRECTORY](entry, done); + } + } + [LINK](entry, linkpath, link, done) { + // XXX: get the type ('symlink' or 'junction') for windows + node_fs_1.default[link](linkpath, String(entry.absolute), er => { + if (er) { + this[ONERROR](er, entry); + } + else { + this[UNPEND](); + entry.resume(); + } + done(); + }); + } +} +exports.Unpack = Unpack; +const callSync = (fn) => { + try { + return [null, fn()]; + } + catch (er) { + return [er, null]; + } +}; +class UnpackSync extends Unpack { + sync = true; + [MAKEFS](er, entry) { + return super[MAKEFS](er, entry, () => { }); + } + [CHECKFS](entry) { + this[PRUNECACHE](entry); + if (!this[CHECKED_CWD]) { + const er = this[MKDIR](this.cwd, this.dmode); + if (er) { + return this[ONERROR](er, entry); + } + this[CHECKED_CWD] = true; + } + // don't bother to make the parent if the current entry is the cwd, + // we've already checked it. + if (entry.absolute !== this.cwd) { + const parent = (0, normalize_windows_path_js_1.normalizeWindowsPath)(node_path_1.default.dirname(String(entry.absolute))); + if (parent !== this.cwd) { + const mkParent = this[MKDIR](parent, this.dmode); + if (mkParent) { + return this[ONERROR](mkParent, entry); + } + } + } + const [lstatEr, st] = callSync(() => node_fs_1.default.lstatSync(String(entry.absolute))); + if (st && + (this.keep || + /* c8 ignore next */ + (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) { + return this[SKIP](entry); + } + if (lstatEr || this[ISREUSABLE](entry, st)) { + return this[MAKEFS](null, entry); + } + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = this.chmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode; + const [er] = needChmod ? + callSync(() => { + node_fs_1.default.chmodSync(String(entry.absolute), Number(entry.mode)); + }) + : []; + return this[MAKEFS](er, entry); + } + // not a dir entry, have to remove it + const [er] = callSync(() => node_fs_1.default.rmdirSync(String(entry.absolute))); + this[MAKEFS](er, entry); + } + // not a dir, and not reusable. + // don't remove if it's the cwd, since we want that error. + const [er] = entry.absolute === this.cwd ? + [] + : callSync(() => unlinkFileSync(String(entry.absolute))); + this[MAKEFS](er, entry); + } + [FILE](entry, done) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.fmode; + const oner = (er) => { + let closeError; + try { + node_fs_1.default.closeSync(fd); + } + catch (e) { + closeError = e; + } + if (er || closeError) { + this[ONERROR](er || closeError, entry); + } + done(); + }; + let fd; + try { + fd = node_fs_1.default.openSync(String(entry.absolute), (0, get_write_flag_js_1.getWriteFlag)(entry.size), mode); + } + catch (er) { + return oner(er); + } + const tx = this.transform ? this.transform(entry) || entry : entry; + if (tx !== entry) { + tx.on('error', (er) => this[ONERROR](er, entry)); + entry.pipe(tx); + } + tx.on('data', (chunk) => { + try { + node_fs_1.default.writeSync(fd, chunk, 0, chunk.length); + } + catch (er) { + oner(er); + } + }); + tx.on('end', () => { + let er = null; + // try both, falling futimes back to utimes + // if either fails, handle the first error + if (entry.mtime && !this.noMtime) { + const atime = entry.atime || new Date(); + const mtime = entry.mtime; + try { + node_fs_1.default.futimesSync(fd, atime, mtime); + } + catch (futimeser) { + try { + node_fs_1.default.utimesSync(String(entry.absolute), atime, mtime); + } + catch (utimeser) { + er = futimeser; + } + } + } + if (this[DOCHOWN](entry)) { + const uid = this[UID](entry); + const gid = this[GID](entry); + try { + node_fs_1.default.fchownSync(fd, Number(uid), Number(gid)); + } + catch (fchowner) { + try { + node_fs_1.default.chownSync(String(entry.absolute), Number(uid), Number(gid)); + } + catch (chowner) { + er = er || fchowner; + } + } + } + oner(er); + }); + } + [DIRECTORY](entry, done) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.dmode; + const er = this[MKDIR](String(entry.absolute), mode); + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + if (entry.mtime && !this.noMtime) { + try { + node_fs_1.default.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime); + /* c8 ignore next */ + } + catch (er) { } + } + if (this[DOCHOWN](entry)) { + try { + node_fs_1.default.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry))); + } + catch (er) { } + } + done(); + entry.resume(); + } + [MKDIR](dir, mode) { + try { + return (0, mkdir_js_1.mkdirSync)((0, normalize_windows_path_js_1.normalizeWindowsPath)(dir), { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + }); + } + catch (er) { + return er; + } + } + [LINK](entry, linkpath, link, done) { + const ls = `${link}Sync`; + try { + node_fs_1.default[ls](linkpath, String(entry.absolute)); + done(); + entry.resume(); + } + catch (er) { + return this[ONERROR](er, entry); + } + } +} +exports.UnpackSync = UnpackSync; +//# sourceMappingURL=unpack.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/update.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/update.js new file mode 100644 index 00000000000000..7687896f4bfeeb --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/update.js @@ -0,0 +1,33 @@ +"use strict"; +// tar -u +Object.defineProperty(exports, "__esModule", { value: true }); +exports.update = void 0; +const make_command_js_1 = require("./make-command.js"); +const replace_js_1 = require("./replace.js"); +// just call tar.r with the filter and mtimeCache +exports.update = (0, make_command_js_1.makeCommand)(replace_js_1.replace.syncFile, replace_js_1.replace.asyncFile, replace_js_1.replace.syncNoFile, replace_js_1.replace.asyncNoFile, (opt, entries = []) => { + replace_js_1.replace.validate?.(opt, entries); + mtimeFilter(opt); +}); +const mtimeFilter = (opt) => { + const filter = opt.filter; + if (!opt.mtimeCache) { + opt.mtimeCache = new Map(); + } + opt.filter = + filter ? + (path, stat) => filter(path, stat) && + !( + /* c8 ignore start */ + ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > + (stat.mtime ?? 0)) + /* c8 ignore stop */ + ) + : (path, stat) => !( + /* c8 ignore start */ + ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > + (stat.mtime ?? 0)) + /* c8 ignore stop */ + ); +}; +//# sourceMappingURL=update.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/warn-method.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/warn-method.js new file mode 100644 index 00000000000000..f25502776e36a3 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/warn-method.js @@ -0,0 +1,31 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.warnMethod = void 0; +const warnMethod = (self, code, message, data = {}) => { + if (self.file) { + data.file = self.file; + } + if (self.cwd) { + data.cwd = self.cwd; + } + data.code = + (message instanceof Error && + message.code) || + code; + data.tarCode = code; + if (!self.strict && data.recoverable !== false) { + if (message instanceof Error) { + data = Object.assign(message, data); + message = message.message; + } + self.emit('warn', code, message, data); + } + else if (message instanceof Error) { + self.emit('error', Object.assign(message, data)); + } + else { + self.emit('error', Object.assign(new Error(`${code}: ${message}`), data)); + } +}; +exports.warnMethod = warnMethod; +//# sourceMappingURL=warn-method.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/winchars.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/winchars.js new file mode 100644 index 00000000000000..c0a4405812929e --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/winchars.js @@ -0,0 +1,14 @@ +"use strict"; +// When writing files on Windows, translate the characters to their +// 0xf000 higher-encoded versions. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.decode = exports.encode = void 0; +const raw = ['|', '<', '>', '?', ':']; +const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0))); +const toWin = new Map(raw.map((char, i) => [char, win[i]])); +const toRaw = new Map(win.map((char, i) => [char, raw[i]])); +const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s); +exports.encode = encode; +const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s); +exports.decode = decode; +//# sourceMappingURL=winchars.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/write-entry.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/write-entry.js new file mode 100644 index 00000000000000..45b7efeb795027 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/commonjs/write-entry.js @@ -0,0 +1,689 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.WriteEntryTar = exports.WriteEntrySync = exports.WriteEntry = void 0; +const fs_1 = __importDefault(require("fs")); +const minipass_1 = require("minipass"); +const path_1 = __importDefault(require("path")); +const header_js_1 = require("./header.js"); +const mode_fix_js_1 = require("./mode-fix.js"); +const normalize_windows_path_js_1 = require("./normalize-windows-path.js"); +const options_js_1 = require("./options.js"); +const pax_js_1 = require("./pax.js"); +const strip_absolute_path_js_1 = require("./strip-absolute-path.js"); +const strip_trailing_slashes_js_1 = require("./strip-trailing-slashes.js"); +const warn_method_js_1 = require("./warn-method.js"); +const winchars = __importStar(require("./winchars.js")); +const prefixPath = (path, prefix) => { + if (!prefix) { + return (0, normalize_windows_path_js_1.normalizeWindowsPath)(path); + } + path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path).replace(/^\.(\/|$)/, ''); + return (0, strip_trailing_slashes_js_1.stripTrailingSlashes)(prefix) + '/' + path; +}; +const maxReadSize = 16 * 1024 * 1024; +const PROCESS = Symbol('process'); +const FILE = Symbol('file'); +const DIRECTORY = Symbol('directory'); +const SYMLINK = Symbol('symlink'); +const HARDLINK = Symbol('hardlink'); +const HEADER = Symbol('header'); +const READ = Symbol('read'); +const LSTAT = Symbol('lstat'); +const ONLSTAT = Symbol('onlstat'); +const ONREAD = Symbol('onread'); +const ONREADLINK = Symbol('onreadlink'); +const OPENFILE = Symbol('openfile'); +const ONOPENFILE = Symbol('onopenfile'); +const CLOSE = Symbol('close'); +const MODE = Symbol('mode'); +const AWAITDRAIN = Symbol('awaitDrain'); +const ONDRAIN = Symbol('ondrain'); +const PREFIX = Symbol('prefix'); +class WriteEntry extends minipass_1.Minipass { + path; + portable; + myuid = (process.getuid && process.getuid()) || 0; + // until node has builtin pwnam functions, this'll have to do + myuser = process.env.USER || ''; + maxReadSize; + linkCache; + statCache; + preservePaths; + cwd; + strict; + mtime; + noPax; + noMtime; + prefix; + fd; + blockLen = 0; + blockRemain = 0; + buf; + pos = 0; + remain = 0; + length = 0; + offset = 0; + win32; + absolute; + header; + type; + linkpath; + stat; + onWriteEntry; + #hadError = false; + constructor(p, opt_ = {}) { + const opt = (0, options_js_1.dealias)(opt_); + super(); + this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(p); + // suppress atime, ctime, uid, gid, uname, gname + this.portable = !!opt.portable; + this.maxReadSize = opt.maxReadSize || maxReadSize; + this.linkCache = opt.linkCache || new Map(); + this.statCache = opt.statCache || new Map(); + this.preservePaths = !!opt.preservePaths; + this.cwd = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.cwd || process.cwd()); + this.strict = !!opt.strict; + this.noPax = !!opt.noPax; + this.noMtime = !!opt.noMtime; + this.mtime = opt.mtime; + this.prefix = + opt.prefix ? (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.prefix) : undefined; + this.onWriteEntry = opt.onWriteEntry; + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + let pathWarn = false; + if (!this.preservePaths) { + const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path); + if (root && typeof stripped === 'string') { + this.path = stripped; + pathWarn = root; + } + } + this.win32 = !!opt.win32 || process.platform === 'win32'; + if (this.win32) { + // force the \ to / normalization, since we might not *actually* + // be on windows, but want \ to be considered a path separator. + this.path = winchars.decode(this.path.replace(/\\/g, '/')); + p = p.replace(/\\/g, '/'); + } + this.absolute = (0, normalize_windows_path_js_1.normalizeWindowsPath)(opt.absolute || path_1.default.resolve(this.cwd, p)); + if (this.path === '') { + this.path = './'; + } + if (pathWarn) { + this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { + entry: this, + path: pathWarn + this.path, + }); + } + const cs = this.statCache.get(this.absolute); + if (cs) { + this[ONLSTAT](cs); + } + else { + this[LSTAT](); + } + } + warn(code, message, data = {}) { + return (0, warn_method_js_1.warnMethod)(this, code, message, data); + } + emit(ev, ...data) { + if (ev === 'error') { + this.#hadError = true; + } + return super.emit(ev, ...data); + } + [LSTAT]() { + fs_1.default.lstat(this.absolute, (er, stat) => { + if (er) { + return this.emit('error', er); + } + this[ONLSTAT](stat); + }); + } + [ONLSTAT](stat) { + this.statCache.set(this.absolute, stat); + this.stat = stat; + if (!stat.isFile()) { + stat.size = 0; + } + this.type = getType(stat); + this.emit('stat', stat); + this[PROCESS](); + } + [PROCESS]() { + switch (this.type) { + case 'File': + return this[FILE](); + case 'Directory': + return this[DIRECTORY](); + case 'SymbolicLink': + return this[SYMLINK](); + // unsupported types are ignored. + default: + return this.end(); + } + } + [MODE](mode) { + return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable); + } + [PREFIX](path) { + return prefixPath(path, this.prefix); + } + [HEADER]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot write header before stat'); + } + /* c8 ignore stop */ + if (this.type === 'Directory' && this.portable) { + this.noMtime = true; + } + this.onWriteEntry?.(this); + this.header = new header_js_1.Header({ + path: this[PREFIX](this.path), + // only apply the prefix to hard links. + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this[MODE](this.stat.mode), + uid: this.portable ? undefined : this.stat.uid, + gid: this.portable ? undefined : this.stat.gid, + size: this.stat.size, + mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime, + /* c8 ignore next */ + type: this.type === 'Unsupported' ? undefined : this.type, + uname: this.portable ? undefined + : this.stat.uid === this.myuid ? this.myuser + : '', + atime: this.portable ? undefined : this.stat.atime, + ctime: this.portable ? undefined : this.stat.ctime, + }); + if (this.header.encode() && !this.noPax) { + super.write(new pax_js_1.Pax({ + atime: this.portable ? undefined : this.header.atime, + ctime: this.portable ? undefined : this.header.ctime, + gid: this.portable ? undefined : this.header.gid, + mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime), + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + size: this.header.size, + uid: this.portable ? undefined : this.header.uid, + uname: this.portable ? undefined : this.header.uname, + dev: this.portable ? undefined : this.stat.dev, + ino: this.portable ? undefined : this.stat.ino, + nlink: this.portable ? undefined : this.stat.nlink, + }).encode()); + } + const block = this.header?.block; + /* c8 ignore start */ + if (!block) { + throw new Error('failed to encode header'); + } + /* c8 ignore stop */ + super.write(block); + } + [DIRECTORY]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create directory entry without stat'); + } + /* c8 ignore stop */ + if (this.path.slice(-1) !== '/') { + this.path += '/'; + } + this.stat.size = 0; + this[HEADER](); + this.end(); + } + [SYMLINK]() { + fs_1.default.readlink(this.absolute, (er, linkpath) => { + if (er) { + return this.emit('error', er); + } + this[ONREADLINK](linkpath); + }); + } + [ONREADLINK](linkpath) { + this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(linkpath); + this[HEADER](); + this.end(); + } + [HARDLINK](linkpath) { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create link entry without stat'); + } + /* c8 ignore stop */ + this.type = 'Link'; + this.linkpath = (0, normalize_windows_path_js_1.normalizeWindowsPath)(path_1.default.relative(this.cwd, linkpath)); + this.stat.size = 0; + this[HEADER](); + this.end(); + } + [FILE]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create file entry without stat'); + } + /* c8 ignore stop */ + if (this.stat.nlink > 1) { + const linkKey = `${this.stat.dev}:${this.stat.ino}`; + const linkpath = this.linkCache.get(linkKey); + if (linkpath?.indexOf(this.cwd) === 0) { + return this[HARDLINK](linkpath); + } + this.linkCache.set(linkKey, this.absolute); + } + this[HEADER](); + if (this.stat.size === 0) { + return this.end(); + } + this[OPENFILE](); + } + [OPENFILE]() { + fs_1.default.open(this.absolute, 'r', (er, fd) => { + if (er) { + return this.emit('error', er); + } + this[ONOPENFILE](fd); + }); + } + [ONOPENFILE](fd) { + this.fd = fd; + if (this.#hadError) { + return this[CLOSE](); + } + /* c8 ignore start */ + if (!this.stat) { + throw new Error('should stat before calling onopenfile'); + } + /* c8 ignore start */ + this.blockLen = 512 * Math.ceil(this.stat.size / 512); + this.blockRemain = this.blockLen; + const bufLen = Math.min(this.blockLen, this.maxReadSize); + this.buf = Buffer.allocUnsafe(bufLen); + this.offset = 0; + this.pos = 0; + this.remain = this.stat.size; + this.length = this.buf.length; + this[READ](); + } + [READ]() { + const { fd, buf, offset, length, pos } = this; + if (fd === undefined || buf === undefined) { + throw new Error('cannot read file without first opening'); + } + fs_1.default.read(fd, buf, offset, length, pos, (er, bytesRead) => { + if (er) { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + return this[CLOSE](() => this.emit('error', er)); + } + this[ONREAD](bytesRead); + }); + } + /* c8 ignore start */ + [CLOSE](cb = () => { }) { + /* c8 ignore stop */ + if (this.fd !== undefined) + fs_1.default.close(this.fd, cb); + } + [ONREAD](bytesRead) { + if (bytesRead <= 0 && this.remain > 0) { + const er = Object.assign(new Error('encountered unexpected EOF'), { + path: this.absolute, + syscall: 'read', + code: 'EOF', + }); + return this[CLOSE](() => this.emit('error', er)); + } + if (bytesRead > this.remain) { + const er = Object.assign(new Error('did not encounter expected EOF'), { + path: this.absolute, + syscall: 'read', + code: 'EOF', + }); + return this[CLOSE](() => this.emit('error', er)); + } + /* c8 ignore start */ + if (!this.buf) { + throw new Error('should have created buffer prior to reading'); + } + /* c8 ignore stop */ + // null out the rest of the buffer, if we could fit the block padding + // at the end of this loop, we've incremented bytesRead and this.remain + // to be incremented up to the blockRemain level, as if we had expected + // to get a null-padded file, and read it until the end. then we will + // decrement both remain and blockRemain by bytesRead, and know that we + // reached the expected EOF, without any null buffer to append. + if (bytesRead === this.remain) { + for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) { + this.buf[i + this.offset] = 0; + bytesRead++; + this.remain++; + } + } + const chunk = this.offset === 0 && bytesRead === this.buf.length ? + this.buf + : this.buf.subarray(this.offset, this.offset + bytesRead); + const flushed = this.write(chunk); + if (!flushed) { + this[AWAITDRAIN](() => this[ONDRAIN]()); + } + else { + this[ONDRAIN](); + } + } + [AWAITDRAIN](cb) { + this.once('drain', cb); + } + write(chunk, encoding, cb) { + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8'); + } + /* c8 ignore stop */ + if (this.blockRemain < chunk.length) { + const er = Object.assign(new Error('writing more data than expected'), { + path: this.absolute, + }); + return this.emit('error', er); + } + this.remain -= chunk.length; + this.blockRemain -= chunk.length; + this.pos += chunk.length; + this.offset += chunk.length; + return super.write(chunk, null, cb); + } + [ONDRAIN]() { + if (!this.remain) { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)); + } + return this[CLOSE](er => er ? this.emit('error', er) : this.end()); + } + /* c8 ignore start */ + if (!this.buf) { + throw new Error('buffer lost somehow in ONDRAIN'); + } + /* c8 ignore stop */ + if (this.offset >= this.length) { + // if we only have a smaller bit left to read, alloc a smaller buffer + // otherwise, keep it the same length it was before. + this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length)); + this.offset = 0; + } + this.length = this.buf.length - this.offset; + this[READ](); + } +} +exports.WriteEntry = WriteEntry; +class WriteEntrySync extends WriteEntry { + sync = true; + [LSTAT]() { + this[ONLSTAT](fs_1.default.lstatSync(this.absolute)); + } + [SYMLINK]() { + this[ONREADLINK](fs_1.default.readlinkSync(this.absolute)); + } + [OPENFILE]() { + this[ONOPENFILE](fs_1.default.openSync(this.absolute, 'r')); + } + [READ]() { + let threw = true; + try { + const { fd, buf, offset, length, pos } = this; + /* c8 ignore start */ + if (fd === undefined || buf === undefined) { + throw new Error('fd and buf must be set in READ method'); + } + /* c8 ignore stop */ + const bytesRead = fs_1.default.readSync(fd, buf, offset, length, pos); + this[ONREAD](bytesRead); + threw = false; + } + finally { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + if (threw) { + try { + this[CLOSE](() => { }); + } + catch (er) { } + } + } + } + [AWAITDRAIN](cb) { + cb(); + } + /* c8 ignore start */ + [CLOSE](cb = () => { }) { + /* c8 ignore stop */ + if (this.fd !== undefined) + fs_1.default.closeSync(this.fd); + cb(); + } +} +exports.WriteEntrySync = WriteEntrySync; +class WriteEntryTar extends minipass_1.Minipass { + blockLen = 0; + blockRemain = 0; + buf = 0; + pos = 0; + remain = 0; + length = 0; + preservePaths; + portable; + strict; + noPax; + noMtime; + readEntry; + type; + prefix; + path; + mode; + uid; + gid; + uname; + gname; + header; + mtime; + atime; + ctime; + linkpath; + size; + onWriteEntry; + warn(code, message, data = {}) { + return (0, warn_method_js_1.warnMethod)(this, code, message, data); + } + constructor(readEntry, opt_ = {}) { + const opt = (0, options_js_1.dealias)(opt_); + super(); + this.preservePaths = !!opt.preservePaths; + this.portable = !!opt.portable; + this.strict = !!opt.strict; + this.noPax = !!opt.noPax; + this.noMtime = !!opt.noMtime; + this.onWriteEntry = opt.onWriteEntry; + this.readEntry = readEntry; + const { type } = readEntry; + /* c8 ignore start */ + if (type === 'Unsupported') { + throw new Error('writing entry that should be ignored'); + } + /* c8 ignore stop */ + this.type = type; + if (this.type === 'Directory' && this.portable) { + this.noMtime = true; + } + this.prefix = opt.prefix; + this.path = (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.path); + this.mode = + readEntry.mode !== undefined ? + this[MODE](readEntry.mode) + : undefined; + this.uid = this.portable ? undefined : readEntry.uid; + this.gid = this.portable ? undefined : readEntry.gid; + this.uname = this.portable ? undefined : readEntry.uname; + this.gname = this.portable ? undefined : readEntry.gname; + this.size = readEntry.size; + this.mtime = + this.noMtime ? undefined : opt.mtime || readEntry.mtime; + this.atime = this.portable ? undefined : readEntry.atime; + this.ctime = this.portable ? undefined : readEntry.ctime; + this.linkpath = + readEntry.linkpath !== undefined ? + (0, normalize_windows_path_js_1.normalizeWindowsPath)(readEntry.linkpath) + : undefined; + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + let pathWarn = false; + if (!this.preservePaths) { + const [root, stripped] = (0, strip_absolute_path_js_1.stripAbsolutePath)(this.path); + if (root && typeof stripped === 'string') { + this.path = stripped; + pathWarn = root; + } + } + this.remain = readEntry.size; + this.blockRemain = readEntry.startBlockSize; + this.onWriteEntry?.(this); + this.header = new header_js_1.Header({ + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this.mode, + uid: this.portable ? undefined : this.uid, + gid: this.portable ? undefined : this.gid, + size: this.size, + mtime: this.noMtime ? undefined : this.mtime, + type: this.type, + uname: this.portable ? undefined : this.uname, + atime: this.portable ? undefined : this.atime, + ctime: this.portable ? undefined : this.ctime, + }); + if (pathWarn) { + this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { + entry: this, + path: pathWarn + this.path, + }); + } + if (this.header.encode() && !this.noPax) { + super.write(new pax_js_1.Pax({ + atime: this.portable ? undefined : this.atime, + ctime: this.portable ? undefined : this.ctime, + gid: this.portable ? undefined : this.gid, + mtime: this.noMtime ? undefined : this.mtime, + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + size: this.size, + uid: this.portable ? undefined : this.uid, + uname: this.portable ? undefined : this.uname, + dev: this.portable ? undefined : this.readEntry.dev, + ino: this.portable ? undefined : this.readEntry.ino, + nlink: this.portable ? undefined : this.readEntry.nlink, + }).encode()); + } + const b = this.header?.block; + /* c8 ignore start */ + if (!b) + throw new Error('failed to encode header'); + /* c8 ignore stop */ + super.write(b); + readEntry.pipe(this); + } + [PREFIX](path) { + return prefixPath(path, this.prefix); + } + [MODE](mode) { + return (0, mode_fix_js_1.modeFix)(mode, this.type === 'Directory', this.portable); + } + write(chunk, encoding, cb) { + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8'); + } + /* c8 ignore stop */ + const writeLen = chunk.length; + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate'); + } + this.blockRemain -= writeLen; + return super.write(chunk, cb); + } + end(chunk, encoding, cb) { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)); + } + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding ?? 'utf8'); + } + if (cb) + this.once('finish', cb); + chunk ? super.end(chunk, cb) : super.end(cb); + /* c8 ignore stop */ + return this; + } +} +exports.WriteEntryTar = WriteEntryTar; +const getType = (stat) => stat.isFile() ? 'File' + : stat.isDirectory() ? 'Directory' + : stat.isSymbolicLink() ? 'SymbolicLink' + : 'Unsupported'; +//# sourceMappingURL=write-entry.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/create.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/create.js new file mode 100644 index 00000000000000..512a9911d70d5b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/create.js @@ -0,0 +1,77 @@ +import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass'; +import path from 'node:path'; +import { list } from './list.js'; +import { makeCommand } from './make-command.js'; +import { Pack, PackSync } from './pack.js'; +const createFileSync = (opt, files) => { + const p = new PackSync(opt); + const stream = new WriteStreamSync(opt.file, { + mode: opt.mode || 0o666, + }); + p.pipe(stream); + addFilesSync(p, files); +}; +const createFile = (opt, files) => { + const p = new Pack(opt); + const stream = new WriteStream(opt.file, { + mode: opt.mode || 0o666, + }); + p.pipe(stream); + const promise = new Promise((res, rej) => { + stream.on('error', rej); + stream.on('close', res); + p.on('error', rej); + }); + addFilesAsync(p, files); + return promise; +}; +const addFilesSync = (p, files) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + list({ + file: path.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onReadEntry: entry => p.add(entry), + }); + } + else { + p.add(file); + } + }); + p.end(); +}; +const addFilesAsync = async (p, files) => { + for (let i = 0; i < files.length; i++) { + const file = String(files[i]); + if (file.charAt(0) === '@') { + await list({ + file: path.resolve(String(p.cwd), file.slice(1)), + noResume: true, + onReadEntry: entry => { + p.add(entry); + }, + }); + } + else { + p.add(file); + } + } + p.end(); +}; +const createSync = (opt, files) => { + const p = new PackSync(opt); + addFilesSync(p, files); + return p; +}; +const createAsync = (opt, files) => { + const p = new Pack(opt); + addFilesAsync(p, files); + return p; +}; +export const create = makeCommand(createFileSync, createFile, createSync, createAsync, (_opt, files) => { + if (!files?.length) { + throw new TypeError('no paths specified to add to archive'); + } +}); +//# sourceMappingURL=create.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/cwd-error.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/cwd-error.js new file mode 100644 index 00000000000000..289a066b8e0317 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/cwd-error.js @@ -0,0 +1,14 @@ +export class CwdError extends Error { + path; + code; + syscall = 'chdir'; + constructor(path, code) { + super(`${code}: Cannot cd into '${path}'`); + this.path = path; + this.code = code; + } + get name() { + return 'CwdError'; + } +} +//# sourceMappingURL=cwd-error.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/extract.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/extract.js new file mode 100644 index 00000000000000..2274feef26e78f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/extract.js @@ -0,0 +1,49 @@ +// tar -x +import * as fsm from '@isaacs/fs-minipass'; +import fs from 'node:fs'; +import { filesFilter } from './list.js'; +import { makeCommand } from './make-command.js'; +import { Unpack, UnpackSync } from './unpack.js'; +const extractFileSync = (opt) => { + const u = new UnpackSync(opt); + const file = opt.file; + const stat = fs.statSync(file); + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + const stream = new fsm.ReadStreamSync(file, { + readSize: readSize, + size: stat.size, + }); + stream.pipe(u); +}; +const extractFile = (opt, _) => { + const u = new Unpack(opt); + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + const file = opt.file; + const p = new Promise((resolve, reject) => { + u.on('error', reject); + u.on('close', resolve); + // This trades a zero-byte read() syscall for a stat + // However, it will usually result in less memory allocation + fs.stat(file, (er, stat) => { + if (er) { + reject(er); + } + else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }); + stream.on('error', reject); + stream.pipe(u); + } + }); + }); + return p; +}; +export const extract = makeCommand(extractFileSync, extractFile, opt => new UnpackSync(opt), opt => new Unpack(opt), (opt, files) => { + if (files?.length) + filesFilter(opt, files); +}); +//# sourceMappingURL=extract.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/get-write-flag.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/get-write-flag.js new file mode 100644 index 00000000000000..2c7f3e8b28fdaf --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/get-write-flag.js @@ -0,0 +1,23 @@ +// Get the appropriate flag to use for creating files +// We use fmap on Windows platforms for files less than +// 512kb. This is a fairly low limit, but avoids making +// things slower in some cases. Since most of what this +// library is used for is extracting tarballs of many +// relatively small files in npm packages and the like, +// it can be a big boost on Windows platforms. +import fs from 'fs'; +const platform = process.env.__FAKE_PLATFORM__ || process.platform; +const isWindows = platform === 'win32'; +/* c8 ignore start */ +const { O_CREAT, O_TRUNC, O_WRONLY } = fs.constants; +const UV_FS_O_FILEMAP = Number(process.env.__FAKE_FS_O_FILENAME__) || + fs.constants.UV_FS_O_FILEMAP || + 0; +/* c8 ignore stop */ +const fMapEnabled = isWindows && !!UV_FS_O_FILEMAP; +const fMapLimit = 512 * 1024; +const fMapFlag = UV_FS_O_FILEMAP | O_TRUNC | O_CREAT | O_WRONLY; +export const getWriteFlag = !fMapEnabled ? + () => 'w' + : (size) => (size < fMapLimit ? fMapFlag : 'w'); +//# sourceMappingURL=get-write-flag.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/header.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/header.js new file mode 100644 index 00000000000000..e15192b14b16e1 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/header.js @@ -0,0 +1,279 @@ +// parse a 512-byte header block to a data object, or vice-versa +// encode returns `true` if a pax extended header is needed, because +// the data could not be faithfully encoded in a simple header. +// (Also, check header.needPax to see if it needs a pax header.) +import { posix as pathModule } from 'node:path'; +import * as large from './large-numbers.js'; +import * as types from './types.js'; +export class Header { + cksumValid = false; + needPax = false; + nullBlock = false; + block; + path; + mode; + uid; + gid; + size; + cksum; + #type = 'Unsupported'; + linkpath; + uname; + gname; + devmaj = 0; + devmin = 0; + atime; + ctime; + mtime; + charset; + comment; + constructor(data, off = 0, ex, gex) { + if (Buffer.isBuffer(data)) { + this.decode(data, off || 0, ex, gex); + } + else if (data) { + this.#slurp(data); + } + } + decode(buf, off, ex, gex) { + if (!off) { + off = 0; + } + if (!buf || !(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header'); + } + this.path = decString(buf, off, 100); + this.mode = decNumber(buf, off + 100, 8); + this.uid = decNumber(buf, off + 108, 8); + this.gid = decNumber(buf, off + 116, 8); + this.size = decNumber(buf, off + 124, 12); + this.mtime = decDate(buf, off + 136, 12); + this.cksum = decNumber(buf, off + 148, 12); + // if we have extended or global extended headers, apply them now + // See https://github.com/npm/node-tar/pull/187 + // Apply global before local, so it overrides + if (gex) + this.#slurp(gex, true); + if (ex) + this.#slurp(ex); + // old tar versions marked dirs as a file with a trailing / + const t = decString(buf, off + 156, 1); + if (types.isCode(t)) { + this.#type = t || '0'; + } + if (this.#type === '0' && this.path.slice(-1) === '/') { + this.#type = '5'; + } + // tar implementations sometimes incorrectly put the stat(dir).size + // as the size in the tarball, even though Directory entries are + // not able to have any body at all. In the very rare chance that + // it actually DOES have a body, we weren't going to do anything with + // it anyway, and it'll just be a warning about an invalid header. + if (this.#type === '5') { + this.size = 0; + } + this.linkpath = decString(buf, off + 157, 100); + if (buf.subarray(off + 257, off + 265).toString() === + 'ustar\u000000') { + this.uname = decString(buf, off + 265, 32); + this.gname = decString(buf, off + 297, 32); + /* c8 ignore start */ + this.devmaj = decNumber(buf, off + 329, 8) ?? 0; + this.devmin = decNumber(buf, off + 337, 8) ?? 0; + /* c8 ignore stop */ + if (buf[off + 475] !== 0) { + // definitely a prefix, definitely >130 chars. + const prefix = decString(buf, off + 345, 155); + this.path = prefix + '/' + this.path; + } + else { + const prefix = decString(buf, off + 345, 130); + if (prefix) { + this.path = prefix + '/' + this.path; + } + this.atime = decDate(buf, off + 476, 12); + this.ctime = decDate(buf, off + 488, 12); + } + } + let sum = 8 * 0x20; + for (let i = off; i < off + 148; i++) { + sum += buf[i]; + } + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i]; + } + this.cksumValid = sum === this.cksum; + if (this.cksum === undefined && sum === 8 * 0x20) { + this.nullBlock = true; + } + } + #slurp(ex, gex = false) { + Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. Also, any + // null/undefined values are ignored. + return !(v === null || + v === undefined || + (k === 'path' && gex) || + (k === 'linkpath' && gex) || + k === 'global'); + }))); + } + encode(buf, off = 0) { + if (!buf) { + buf = this.block = Buffer.alloc(512); + } + if (this.#type === 'Unsupported') { + this.#type = '0'; + } + if (!(buf.length >= off + 512)) { + throw new Error('need 512 bytes for header'); + } + const prefixSize = this.ctime || this.atime ? 130 : 155; + const split = splitPrefix(this.path || '', prefixSize); + const path = split[0]; + const prefix = split[1]; + this.needPax = !!split[2]; + this.needPax = encString(buf, off, 100, path) || this.needPax; + this.needPax = + encNumber(buf, off + 100, 8, this.mode) || this.needPax; + this.needPax = + encNumber(buf, off + 108, 8, this.uid) || this.needPax; + this.needPax = + encNumber(buf, off + 116, 8, this.gid) || this.needPax; + this.needPax = + encNumber(buf, off + 124, 12, this.size) || this.needPax; + this.needPax = + encDate(buf, off + 136, 12, this.mtime) || this.needPax; + buf[off + 156] = this.#type.charCodeAt(0); + this.needPax = + encString(buf, off + 157, 100, this.linkpath) || this.needPax; + buf.write('ustar\u000000', off + 257, 8); + this.needPax = + encString(buf, off + 265, 32, this.uname) || this.needPax; + this.needPax = + encString(buf, off + 297, 32, this.gname) || this.needPax; + this.needPax = + encNumber(buf, off + 329, 8, this.devmaj) || this.needPax; + this.needPax = + encNumber(buf, off + 337, 8, this.devmin) || this.needPax; + this.needPax = + encString(buf, off + 345, prefixSize, prefix) || this.needPax; + if (buf[off + 475] !== 0) { + this.needPax = + encString(buf, off + 345, 155, prefix) || this.needPax; + } + else { + this.needPax = + encString(buf, off + 345, 130, prefix) || this.needPax; + this.needPax = + encDate(buf, off + 476, 12, this.atime) || this.needPax; + this.needPax = + encDate(buf, off + 488, 12, this.ctime) || this.needPax; + } + let sum = 8 * 0x20; + for (let i = off; i < off + 148; i++) { + sum += buf[i]; + } + for (let i = off + 156; i < off + 512; i++) { + sum += buf[i]; + } + this.cksum = sum; + encNumber(buf, off + 148, 8, this.cksum); + this.cksumValid = true; + return this.needPax; + } + get type() { + return (this.#type === 'Unsupported' ? + this.#type + : types.name.get(this.#type)); + } + get typeKey() { + return this.#type; + } + set type(type) { + const c = String(types.code.get(type)); + if (types.isCode(c) || c === 'Unsupported') { + this.#type = c; + } + else if (types.isCode(type)) { + this.#type = type; + } + else { + throw new TypeError('invalid entry type: ' + type); + } + } +} +const splitPrefix = (p, prefixSize) => { + const pathSize = 100; + let pp = p; + let prefix = ''; + let ret = undefined; + const root = pathModule.parse(p).root || '.'; + if (Buffer.byteLength(pp) < pathSize) { + ret = [pp, prefix, false]; + } + else { + // first set prefix to the dir, and path to the base + prefix = pathModule.dirname(pp); + pp = pathModule.basename(pp); + do { + if (Buffer.byteLength(pp) <= pathSize && + Buffer.byteLength(prefix) <= prefixSize) { + // both fit! + ret = [pp, prefix, false]; + } + else if (Buffer.byteLength(pp) > pathSize && + Buffer.byteLength(prefix) <= prefixSize) { + // prefix fits in prefix, but path doesn't fit in path + ret = [pp.slice(0, pathSize - 1), prefix, true]; + } + else { + // make path take a bit from prefix + pp = pathModule.join(pathModule.basename(prefix), pp); + prefix = pathModule.dirname(prefix); + } + } while (prefix !== root && ret === undefined); + // at this point, found no resolution, just truncate + if (!ret) { + ret = [p.slice(0, pathSize - 1), '', true]; + } + } + return ret; +}; +const decString = (buf, off, size) => buf + .subarray(off, off + size) + .toString('utf8') + .replace(/\0.*/, ''); +const decDate = (buf, off, size) => numToDate(decNumber(buf, off, size)); +const numToDate = (num) => num === undefined ? undefined : new Date(num * 1000); +const decNumber = (buf, off, size) => Number(buf[off]) & 0x80 ? + large.parse(buf.subarray(off, off + size)) + : decSmallNumber(buf, off, size); +const nanUndef = (value) => (isNaN(value) ? undefined : value); +const decSmallNumber = (buf, off, size) => nanUndef(parseInt(buf + .subarray(off, off + size) + .toString('utf8') + .replace(/\0.*$/, '') + .trim(), 8)); +// the maximum encodable as a null-terminated octal, by field size +const MAXNUM = { + 12: 0o77777777777, + 8: 0o7777777, +}; +const encNumber = (buf, off, size, num) => num === undefined ? false + : num > MAXNUM[size] || num < 0 ? + (large.encode(num, buf.subarray(off, off + size)), true) + : (encSmallNumber(buf, off, size, num), false); +const encSmallNumber = (buf, off, size, num) => buf.write(octalString(num, size), off, size, 'ascii'); +const octalString = (num, size) => padOctal(Math.floor(num).toString(8), size); +const padOctal = (str, size) => (str.length === size - 1 ? + str + : new Array(size - str.length - 1).join('0') + str + ' ') + '\0'; +const encDate = (buf, off, size, date) => date === undefined ? false : (encNumber(buf, off, size, date.getTime() / 1000)); +// enough to fill the longest string we've got +const NULLS = new Array(156).join('\0'); +// pad with nulls, return true if it's longer or non-ascii +const encString = (buf, off, size, str) => str === undefined ? false : ((buf.write(str + NULLS, off, size, 'utf8'), + str.length !== Buffer.byteLength(str) || str.length > size)); +//# sourceMappingURL=header.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/index.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/index.js new file mode 100644 index 00000000000000..1bac6415c8d732 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/index.js @@ -0,0 +1,20 @@ +export * from './create.js'; +export { create as c } from './create.js'; +export * from './extract.js'; +export { extract as x } from './extract.js'; +export * from './header.js'; +export * from './list.js'; +export { list as t } from './list.js'; +// classes +export * from './pack.js'; +export * from './parse.js'; +export * from './pax.js'; +export * from './read-entry.js'; +export * from './replace.js'; +export { replace as r } from './replace.js'; +export * as types from './types.js'; +export * from './unpack.js'; +export * from './update.js'; +export { update as u } from './update.js'; +export * from './write-entry.js'; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/large-numbers.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/large-numbers.js new file mode 100644 index 00000000000000..4f2f7e5f14fc1b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/large-numbers.js @@ -0,0 +1,94 @@ +// Tar can encode large and negative numbers using a leading byte of +// 0xff for negative, and 0x80 for positive. +export const encode = (num, buf) => { + if (!Number.isSafeInteger(num)) { + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error('cannot encode number outside of javascript safe integer range'); + } + else if (num < 0) { + encodeNegative(num, buf); + } + else { + encodePositive(num, buf); + } + return buf; +}; +const encodePositive = (num, buf) => { + buf[0] = 0x80; + for (var i = buf.length; i > 1; i--) { + buf[i - 1] = num & 0xff; + num = Math.floor(num / 0x100); + } +}; +const encodeNegative = (num, buf) => { + buf[0] = 0xff; + var flipped = false; + num = num * -1; + for (var i = buf.length; i > 1; i--) { + var byte = num & 0xff; + num = Math.floor(num / 0x100); + if (flipped) { + buf[i - 1] = onesComp(byte); + } + else if (byte === 0) { + buf[i - 1] = 0; + } + else { + flipped = true; + buf[i - 1] = twosComp(byte); + } + } +}; +export const parse = (buf) => { + const pre = buf[0]; + const value = pre === 0x80 ? pos(buf.subarray(1, buf.length)) + : pre === 0xff ? twos(buf) + : null; + if (value === null) { + throw Error('invalid base256 encoding'); + } + if (!Number.isSafeInteger(value)) { + // The number is so large that javascript cannot represent it with integer + // precision. + throw Error('parsed number outside of javascript safe integer range'); + } + return value; +}; +const twos = (buf) => { + var len = buf.length; + var sum = 0; + var flipped = false; + for (var i = len - 1; i > -1; i--) { + var byte = Number(buf[i]); + var f; + if (flipped) { + f = onesComp(byte); + } + else if (byte === 0) { + f = byte; + } + else { + flipped = true; + f = twosComp(byte); + } + if (f !== 0) { + sum -= f * Math.pow(256, len - i - 1); + } + } + return sum; +}; +const pos = (buf) => { + var len = buf.length; + var sum = 0; + for (var i = len - 1; i > -1; i--) { + var byte = Number(buf[i]); + if (byte !== 0) { + sum += byte * Math.pow(256, len - i - 1); + } + } + return sum; +}; +const onesComp = (byte) => (0xff ^ byte) & 0xff; +const twosComp = (byte) => ((0xff ^ byte) + 1) & 0xff; +//# sourceMappingURL=large-numbers.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/list.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/list.js new file mode 100644 index 00000000000000..f49068400b6c92 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/list.js @@ -0,0 +1,106 @@ +// tar -t +import * as fsm from '@isaacs/fs-minipass'; +import fs from 'node:fs'; +import { dirname, parse } from 'path'; +import { makeCommand } from './make-command.js'; +import { Parser } from './parse.js'; +import { stripTrailingSlashes } from './strip-trailing-slashes.js'; +const onReadEntryFunction = (opt) => { + const onReadEntry = opt.onReadEntry; + opt.onReadEntry = + onReadEntry ? + e => { + onReadEntry(e); + e.resume(); + } + : e => e.resume(); +}; +// construct a filter that limits the file entries listed +// include child entries if a dir is included +export const filesFilter = (opt, files) => { + const map = new Map(files.map(f => [stripTrailingSlashes(f), true])); + const filter = opt.filter; + const mapHas = (file, r = '') => { + const root = r || parse(file).root || '.'; + let ret; + if (file === root) + ret = false; + else { + const m = map.get(file); + if (m !== undefined) { + ret = m; + } + else { + ret = mapHas(dirname(file), root); + } + } + map.set(file, ret); + return ret; + }; + opt.filter = + filter ? + (file, entry) => filter(file, entry) && mapHas(stripTrailingSlashes(file)) + : file => mapHas(stripTrailingSlashes(file)); +}; +const listFileSync = (opt) => { + const p = new Parser(opt); + const file = opt.file; + let fd; + try { + const stat = fs.statSync(file); + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + if (stat.size < readSize) { + p.end(fs.readFileSync(file)); + } + else { + let pos = 0; + const buf = Buffer.allocUnsafe(readSize); + fd = fs.openSync(file, 'r'); + while (pos < stat.size) { + const bytesRead = fs.readSync(fd, buf, 0, readSize, pos); + pos += bytesRead; + p.write(buf.subarray(0, bytesRead)); + } + p.end(); + } + } + finally { + if (typeof fd === 'number') { + try { + fs.closeSync(fd); + /* c8 ignore next */ + } + catch (er) { } + } + } +}; +const listFile = (opt, _files) => { + const parse = new Parser(opt); + const readSize = opt.maxReadSize || 16 * 1024 * 1024; + const file = opt.file; + const p = new Promise((resolve, reject) => { + parse.on('error', reject); + parse.on('end', resolve); + fs.stat(file, (er, stat) => { + if (er) { + reject(er); + } + else { + const stream = new fsm.ReadStream(file, { + readSize: readSize, + size: stat.size, + }); + stream.on('error', reject); + stream.pipe(parse); + } + }); + }); + return p; +}; +export const list = makeCommand(listFileSync, listFile, opt => new Parser(opt), opt => new Parser(opt), (opt, files) => { + if (files?.length) + filesFilter(opt, files); + if (!opt.noResume) + onReadEntryFunction(opt); +}); +//# sourceMappingURL=list.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/make-command.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/make-command.js new file mode 100644 index 00000000000000..f2f737bca78fd7 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/make-command.js @@ -0,0 +1,57 @@ +import { dealias, isAsyncFile, isAsyncNoFile, isSyncFile, isSyncNoFile, } from './options.js'; +export const makeCommand = (syncFile, asyncFile, syncNoFile, asyncNoFile, validate) => { + return Object.assign((opt_ = [], entries, cb) => { + if (Array.isArray(opt_)) { + entries = opt_; + opt_ = {}; + } + if (typeof entries === 'function') { + cb = entries; + entries = undefined; + } + if (!entries) { + entries = []; + } + else { + entries = Array.from(entries); + } + const opt = dealias(opt_); + validate?.(opt, entries); + if (isSyncFile(opt)) { + if (typeof cb === 'function') { + throw new TypeError('callback not supported for sync tar functions'); + } + return syncFile(opt, entries); + } + else if (isAsyncFile(opt)) { + const p = asyncFile(opt, entries); + // weirdness to make TS happy + const c = cb ? cb : undefined; + return c ? p.then(() => c(), c) : p; + } + else if (isSyncNoFile(opt)) { + if (typeof cb === 'function') { + throw new TypeError('callback not supported for sync tar functions'); + } + return syncNoFile(opt, entries); + } + else if (isAsyncNoFile(opt)) { + if (typeof cb === 'function') { + throw new TypeError('callback only supported with file option'); + } + return asyncNoFile(opt, entries); + /* c8 ignore start */ + } + else { + throw new Error('impossible options??'); + } + /* c8 ignore stop */ + }, { + syncFile, + asyncFile, + syncNoFile, + asyncNoFile, + validate, + }); +}; +//# sourceMappingURL=make-command.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mkdir.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mkdir.js new file mode 100644 index 00000000000000..13498ef0082f0b --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mkdir.js @@ -0,0 +1,201 @@ +import { chownr, chownrSync } from 'chownr'; +import fs from 'fs'; +import { mkdirp, mkdirpSync } from 'mkdirp'; +import path from 'node:path'; +import { CwdError } from './cwd-error.js'; +import { normalizeWindowsPath } from './normalize-windows-path.js'; +import { SymlinkError } from './symlink-error.js'; +const cGet = (cache, key) => cache.get(normalizeWindowsPath(key)); +const cSet = (cache, key, val) => cache.set(normalizeWindowsPath(key), val); +const checkCwd = (dir, cb) => { + fs.stat(dir, (er, st) => { + if (er || !st.isDirectory()) { + er = new CwdError(dir, er?.code || 'ENOTDIR'); + } + cb(er); + }); +}; +/** + * Wrapper around mkdirp for tar's needs. + * + * The main purpose is to avoid creating directories if we know that + * they already exist (and track which ones exist for this purpose), + * and prevent entries from being extracted into symlinked folders, + * if `preservePaths` is not set. + */ +export const mkdir = (dir, opt, cb) => { + dir = normalizeWindowsPath(dir); + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + /* c8 ignore next */ + const umask = opt.umask ?? 0o22; + const mode = opt.mode | 0o0700; + const needChmod = (mode & umask) !== 0; + const uid = opt.uid; + const gid = opt.gid; + const doChown = typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid); + const preserve = opt.preserve; + const unlink = opt.unlink; + const cache = opt.cache; + const cwd = normalizeWindowsPath(opt.cwd); + const done = (er, created) => { + if (er) { + cb(er); + } + else { + cSet(cache, dir, true); + if (created && doChown) { + chownr(created, uid, gid, er => done(er)); + } + else if (needChmod) { + fs.chmod(dir, mode, cb); + } + else { + cb(); + } + } + }; + if (cache && cGet(cache, dir) === true) { + return done(); + } + if (dir === cwd) { + return checkCwd(dir, done); + } + if (preserve) { + return mkdirp(dir, { mode }).then(made => done(null, made ?? undefined), // oh, ts + done); + } + const sub = normalizeWindowsPath(path.relative(cwd, dir)); + const parts = sub.split('/'); + mkdir_(cwd, parts, mode, cache, unlink, cwd, undefined, done); +}; +const mkdir_ = (base, parts, mode, cache, unlink, cwd, created, cb) => { + if (!parts.length) { + return cb(null, created); + } + const p = parts.shift(); + const part = normalizeWindowsPath(path.resolve(base + '/' + p)); + if (cGet(cache, part)) { + return mkdir_(part, parts, mode, cache, unlink, cwd, created, cb); + } + fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)); +}; +const onmkdir = (part, parts, mode, cache, unlink, cwd, created, cb) => (er) => { + if (er) { + fs.lstat(part, (statEr, st) => { + if (statEr) { + statEr.path = + statEr.path && normalizeWindowsPath(statEr.path); + cb(statEr); + } + else if (st.isDirectory()) { + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb); + } + else if (unlink) { + fs.unlink(part, er => { + if (er) { + return cb(er); + } + fs.mkdir(part, mode, onmkdir(part, parts, mode, cache, unlink, cwd, created, cb)); + }); + } + else if (st.isSymbolicLink()) { + return cb(new SymlinkError(part, part + '/' + parts.join('/'))); + } + else { + cb(er); + } + }); + } + else { + created = created || part; + mkdir_(part, parts, mode, cache, unlink, cwd, created, cb); + } +}; +const checkCwdSync = (dir) => { + let ok = false; + let code = undefined; + try { + ok = fs.statSync(dir).isDirectory(); + } + catch (er) { + code = er?.code; + } + finally { + if (!ok) { + throw new CwdError(dir, code ?? 'ENOTDIR'); + } + } +}; +export const mkdirSync = (dir, opt) => { + dir = normalizeWindowsPath(dir); + // if there's any overlap between mask and mode, + // then we'll need an explicit chmod + /* c8 ignore next */ + const umask = opt.umask ?? 0o22; + const mode = opt.mode | 0o700; + const needChmod = (mode & umask) !== 0; + const uid = opt.uid; + const gid = opt.gid; + const doChown = typeof uid === 'number' && + typeof gid === 'number' && + (uid !== opt.processUid || gid !== opt.processGid); + const preserve = opt.preserve; + const unlink = opt.unlink; + const cache = opt.cache; + const cwd = normalizeWindowsPath(opt.cwd); + const done = (created) => { + cSet(cache, dir, true); + if (created && doChown) { + chownrSync(created, uid, gid); + } + if (needChmod) { + fs.chmodSync(dir, mode); + } + }; + if (cache && cGet(cache, dir) === true) { + return done(); + } + if (dir === cwd) { + checkCwdSync(cwd); + return done(); + } + if (preserve) { + return done(mkdirpSync(dir, mode) ?? undefined); + } + const sub = normalizeWindowsPath(path.relative(cwd, dir)); + const parts = sub.split('/'); + let created = undefined; + for (let p = parts.shift(), part = cwd; p && (part += '/' + p); p = parts.shift()) { + part = normalizeWindowsPath(path.resolve(part)); + if (cGet(cache, part)) { + continue; + } + try { + fs.mkdirSync(part, mode); + created = created || part; + cSet(cache, part, true); + } + catch (er) { + const st = fs.lstatSync(part); + if (st.isDirectory()) { + cSet(cache, part, true); + continue; + } + else if (unlink) { + fs.unlinkSync(part); + fs.mkdirSync(part, mode); + created = created || part; + cSet(cache, part, true); + continue; + } + else if (st.isSymbolicLink()) { + return new SymlinkError(part, part + '/' + parts.join('/')); + } + } + } + return done(created); +}; +//# sourceMappingURL=mkdir.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mode-fix.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mode-fix.js new file mode 100644 index 00000000000000..5fd3bb88c1cb25 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/mode-fix.js @@ -0,0 +1,25 @@ +export const modeFix = (mode, isDir, portable) => { + mode &= 0o7777; + // in portable mode, use the minimum reasonable umask + // if this system creates files with 0o664 by default + // (as some linux distros do), then we'll write the + // archive with 0o644 instead. Also, don't ever create + // a file that is not readable/writable by the owner. + if (portable) { + mode = (mode | 0o600) & ~0o22; + } + // if dirs are readable, then they should be listable + if (isDir) { + if (mode & 0o400) { + mode |= 0o100; + } + if (mode & 0o40) { + mode |= 0o10; + } + if (mode & 0o4) { + mode |= 0o1; + } + } + return mode; +}; +//# sourceMappingURL=mode-fix.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-unicode.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-unicode.js new file mode 100644 index 00000000000000..94e5095476d6e0 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-unicode.js @@ -0,0 +1,13 @@ +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +const normalizeCache = Object.create(null); +const { hasOwnProperty } = Object.prototype; +export const normalizeUnicode = (s) => { + if (!hasOwnProperty.call(normalizeCache, s)) { + normalizeCache[s] = s.normalize('NFD'); + } + return normalizeCache[s]; +}; +//# sourceMappingURL=normalize-unicode.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-windows-path.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-windows-path.js new file mode 100644 index 00000000000000..2d97d2b884e627 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/normalize-windows-path.js @@ -0,0 +1,9 @@ +// on windows, either \ or / are valid directory separators. +// on unix, \ is a valid character in filenames. +// so, on windows, and only on windows, we replace all \ chars with /, +// so that we can use / as our one and only directory separator char. +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform; +export const normalizeWindowsPath = platform !== 'win32' ? + (p) => p + : (p) => p && p.replace(/\\/g, '/'); +//# sourceMappingURL=normalize-windows-path.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/options.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/options.js new file mode 100644 index 00000000000000..a006d36c23c923 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/options.js @@ -0,0 +1,54 @@ +// turn tar(1) style args like `C` into the more verbose things like `cwd` +const argmap = new Map([ + ['C', 'cwd'], + ['f', 'file'], + ['z', 'gzip'], + ['P', 'preservePaths'], + ['U', 'unlink'], + ['strip-components', 'strip'], + ['stripComponents', 'strip'], + ['keep-newer', 'newer'], + ['keepNewer', 'newer'], + ['keep-newer-files', 'newer'], + ['keepNewerFiles', 'newer'], + ['k', 'keep'], + ['keep-existing', 'keep'], + ['keepExisting', 'keep'], + ['m', 'noMtime'], + ['no-mtime', 'noMtime'], + ['p', 'preserveOwner'], + ['L', 'follow'], + ['h', 'follow'], + ['onentry', 'onReadEntry'], +]); +export const isSyncFile = (o) => !!o.sync && !!o.file; +export const isAsyncFile = (o) => !o.sync && !!o.file; +export const isSyncNoFile = (o) => !!o.sync && !o.file; +export const isAsyncNoFile = (o) => !o.sync && !o.file; +export const isSync = (o) => !!o.sync; +export const isAsync = (o) => !o.sync; +export const isFile = (o) => !!o.file; +export const isNoFile = (o) => !o.file; +const dealiasKey = (k) => { + const d = argmap.get(k); + if (d) + return d; + return k; +}; +export const dealias = (opt = {}) => { + if (!opt) + return {}; + const result = {}; + for (const [key, v] of Object.entries(opt)) { + // TS doesn't know that aliases are going to always be the same type + const k = dealiasKey(key); + result[k] = v; + } + // affordance for deprecated noChmod -> chmod + if (result.chmod === undefined && result.noChmod === false) { + result.chmod = true; + } + delete result.noChmod; + return result; +}; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pack.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pack.js new file mode 100644 index 00000000000000..f59f32f94201fa --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pack.js @@ -0,0 +1,445 @@ +// A readable tar stream creator +// Technically, this is a transform stream that you write paths into, +// and tar format comes out of. +// The `add()` method is like `write()` but returns this, +// and end() return `this` as well, so you can +// do `new Pack(opt).add('files').add('dir').end().pipe(output) +// You could also do something like: +// streamOfPaths().pipe(new Pack()).pipe(new fs.WriteStream('out.tar')) +import fs from 'fs'; +import { WriteEntry, WriteEntrySync, WriteEntryTar, } from './write-entry.js'; +export class PackJob { + path; + absolute; + entry; + stat; + readdir; + pending = false; + ignore = false; + piped = false; + constructor(path, absolute) { + this.path = path || './'; + this.absolute = absolute; + } +} +import { Minipass } from 'minipass'; +import * as zlib from 'minizlib'; +import { Yallist } from 'yallist'; +import { ReadEntry } from './read-entry.js'; +import { warnMethod, } from './warn-method.js'; +const EOF = Buffer.alloc(1024); +const ONSTAT = Symbol('onStat'); +const ENDED = Symbol('ended'); +const QUEUE = Symbol('queue'); +const CURRENT = Symbol('current'); +const PROCESS = Symbol('process'); +const PROCESSING = Symbol('processing'); +const PROCESSJOB = Symbol('processJob'); +const JOBS = Symbol('jobs'); +const JOBDONE = Symbol('jobDone'); +const ADDFSENTRY = Symbol('addFSEntry'); +const ADDTARENTRY = Symbol('addTarEntry'); +const STAT = Symbol('stat'); +const READDIR = Symbol('readdir'); +const ONREADDIR = Symbol('onreaddir'); +const PIPE = Symbol('pipe'); +const ENTRY = Symbol('entry'); +const ENTRYOPT = Symbol('entryOpt'); +const WRITEENTRYCLASS = Symbol('writeEntryClass'); +const WRITE = Symbol('write'); +const ONDRAIN = Symbol('ondrain'); +import path from 'path'; +import { normalizeWindowsPath } from './normalize-windows-path.js'; +export class Pack extends Minipass { + opt; + cwd; + maxReadSize; + preservePaths; + strict; + noPax; + prefix; + linkCache; + statCache; + file; + portable; + zip; + readdirCache; + noDirRecurse; + follow; + noMtime; + mtime; + filter; + jobs; + [WRITEENTRYCLASS]; + onWriteEntry; + [QUEUE]; + [JOBS] = 0; + [PROCESSING] = false; + [ENDED] = false; + constructor(opt = {}) { + //@ts-ignore + super(); + this.opt = opt; + this.file = opt.file || ''; + this.cwd = opt.cwd || process.cwd(); + this.maxReadSize = opt.maxReadSize; + this.preservePaths = !!opt.preservePaths; + this.strict = !!opt.strict; + this.noPax = !!opt.noPax; + this.prefix = normalizeWindowsPath(opt.prefix || ''); + this.linkCache = opt.linkCache || new Map(); + this.statCache = opt.statCache || new Map(); + this.readdirCache = opt.readdirCache || new Map(); + this.onWriteEntry = opt.onWriteEntry; + this[WRITEENTRYCLASS] = WriteEntry; + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + this.portable = !!opt.portable; + if (opt.gzip || opt.brotli) { + if (opt.gzip && opt.brotli) { + throw new TypeError('gzip and brotli are mutually exclusive'); + } + if (opt.gzip) { + if (typeof opt.gzip !== 'object') { + opt.gzip = {}; + } + if (this.portable) { + opt.gzip.portable = true; + } + this.zip = new zlib.Gzip(opt.gzip); + } + if (opt.brotli) { + if (typeof opt.brotli !== 'object') { + opt.brotli = {}; + } + this.zip = new zlib.BrotliCompress(opt.brotli); + } + /* c8 ignore next */ + if (!this.zip) + throw new Error('impossible'); + const zip = this.zip; + zip.on('data', chunk => super.write(chunk)); + zip.on('end', () => super.end()); + zip.on('drain', () => this[ONDRAIN]()); + this.on('resume', () => zip.resume()); + } + else { + this.on('drain', this[ONDRAIN]); + } + this.noDirRecurse = !!opt.noDirRecurse; + this.follow = !!opt.follow; + this.noMtime = !!opt.noMtime; + if (opt.mtime) + this.mtime = opt.mtime; + this.filter = + typeof opt.filter === 'function' ? opt.filter : () => true; + this[QUEUE] = new Yallist(); + this[JOBS] = 0; + this.jobs = Number(opt.jobs) || 4; + this[PROCESSING] = false; + this[ENDED] = false; + } + [WRITE](chunk) { + return super.write(chunk); + } + add(path) { + this.write(path); + return this; + } + end(path, encoding, cb) { + /* c8 ignore start */ + if (typeof path === 'function') { + cb = path; + path = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (path) { + this.add(path); + } + this[ENDED] = true; + this[PROCESS](); + /* c8 ignore next */ + if (cb) + cb(); + return this; + } + write(path) { + if (this[ENDED]) { + throw new Error('write after end'); + } + if (path instanceof ReadEntry) { + this[ADDTARENTRY](path); + } + else { + this[ADDFSENTRY](path); + } + return this.flowing; + } + [ADDTARENTRY](p) { + const absolute = normalizeWindowsPath(path.resolve(this.cwd, p.path)); + // in this case, we don't have to wait for the stat + if (!this.filter(p.path, p)) { + p.resume(); + } + else { + const job = new PackJob(p.path, absolute); + job.entry = new WriteEntryTar(p, this[ENTRYOPT](job)); + job.entry.on('end', () => this[JOBDONE](job)); + this[JOBS] += 1; + this[QUEUE].push(job); + } + this[PROCESS](); + } + [ADDFSENTRY](p) { + const absolute = normalizeWindowsPath(path.resolve(this.cwd, p)); + this[QUEUE].push(new PackJob(p, absolute)); + this[PROCESS](); + } + [STAT](job) { + job.pending = true; + this[JOBS] += 1; + const stat = this.follow ? 'stat' : 'lstat'; + fs[stat](job.absolute, (er, stat) => { + job.pending = false; + this[JOBS] -= 1; + if (er) { + this.emit('error', er); + } + else { + this[ONSTAT](job, stat); + } + }); + } + [ONSTAT](job, stat) { + this.statCache.set(job.absolute, stat); + job.stat = stat; + // now we have the stat, we can filter it. + if (!this.filter(job.path, stat)) { + job.ignore = true; + } + this[PROCESS](); + } + [READDIR](job) { + job.pending = true; + this[JOBS] += 1; + fs.readdir(job.absolute, (er, entries) => { + job.pending = false; + this[JOBS] -= 1; + if (er) { + return this.emit('error', er); + } + this[ONREADDIR](job, entries); + }); + } + [ONREADDIR](job, entries) { + this.readdirCache.set(job.absolute, entries); + job.readdir = entries; + this[PROCESS](); + } + [PROCESS]() { + if (this[PROCESSING]) { + return; + } + this[PROCESSING] = true; + for (let w = this[QUEUE].head; !!w && this[JOBS] < this.jobs; w = w.next) { + this[PROCESSJOB](w.value); + if (w.value.ignore) { + const p = w.next; + this[QUEUE].removeNode(w); + w.next = p; + } + } + this[PROCESSING] = false; + if (this[ENDED] && !this[QUEUE].length && this[JOBS] === 0) { + if (this.zip) { + this.zip.end(EOF); + } + else { + super.write(EOF); + super.end(); + } + } + } + get [CURRENT]() { + return this[QUEUE] && this[QUEUE].head && this[QUEUE].head.value; + } + [JOBDONE](_job) { + this[QUEUE].shift(); + this[JOBS] -= 1; + this[PROCESS](); + } + [PROCESSJOB](job) { + if (job.pending) { + return; + } + if (job.entry) { + if (job === this[CURRENT] && !job.piped) { + this[PIPE](job); + } + return; + } + if (!job.stat) { + const sc = this.statCache.get(job.absolute); + if (sc) { + this[ONSTAT](job, sc); + } + else { + this[STAT](job); + } + } + if (!job.stat) { + return; + } + // filtered out! + if (job.ignore) { + return; + } + if (!this.noDirRecurse && + job.stat.isDirectory() && + !job.readdir) { + const rc = this.readdirCache.get(job.absolute); + if (rc) { + this[ONREADDIR](job, rc); + } + else { + this[READDIR](job); + } + if (!job.readdir) { + return; + } + } + // we know it doesn't have an entry, because that got checked above + job.entry = this[ENTRY](job); + if (!job.entry) { + job.ignore = true; + return; + } + if (job === this[CURRENT] && !job.piped) { + this[PIPE](job); + } + } + [ENTRYOPT](job) { + return { + onwarn: (code, msg, data) => this.warn(code, msg, data), + noPax: this.noPax, + cwd: this.cwd, + absolute: job.absolute, + preservePaths: this.preservePaths, + maxReadSize: this.maxReadSize, + strict: this.strict, + portable: this.portable, + linkCache: this.linkCache, + statCache: this.statCache, + noMtime: this.noMtime, + mtime: this.mtime, + prefix: this.prefix, + onWriteEntry: this.onWriteEntry, + }; + } + [ENTRY](job) { + this[JOBS] += 1; + try { + const e = new this[WRITEENTRYCLASS](job.path, this[ENTRYOPT](job)); + return e + .on('end', () => this[JOBDONE](job)) + .on('error', er => this.emit('error', er)); + } + catch (er) { + this.emit('error', er); + } + } + [ONDRAIN]() { + if (this[CURRENT] && this[CURRENT].entry) { + this[CURRENT].entry.resume(); + } + } + // like .pipe() but using super, because our write() is special + [PIPE](job) { + job.piped = true; + if (job.readdir) { + job.readdir.forEach(entry => { + const p = job.path; + const base = p === './' ? '' : p.replace(/\/*$/, '/'); + this[ADDFSENTRY](base + entry); + }); + } + const source = job.entry; + const zip = this.zip; + /* c8 ignore start */ + if (!source) + throw new Error('cannot pipe without source'); + /* c8 ignore stop */ + if (zip) { + source.on('data', chunk => { + if (!zip.write(chunk)) { + source.pause(); + } + }); + } + else { + source.on('data', chunk => { + if (!super.write(chunk)) { + source.pause(); + } + }); + } + } + pause() { + if (this.zip) { + this.zip.pause(); + } + return super.pause(); + } + warn(code, message, data = {}) { + warnMethod(this, code, message, data); + } +} +export class PackSync extends Pack { + sync = true; + constructor(opt) { + super(opt); + this[WRITEENTRYCLASS] = WriteEntrySync; + } + // pause/resume are no-ops in sync streams. + pause() { } + resume() { } + [STAT](job) { + const stat = this.follow ? 'statSync' : 'lstatSync'; + this[ONSTAT](job, fs[stat](job.absolute)); + } + [READDIR](job) { + this[ONREADDIR](job, fs.readdirSync(job.absolute)); + } + // gotta get it all in this tick + [PIPE](job) { + const source = job.entry; + const zip = this.zip; + if (job.readdir) { + job.readdir.forEach(entry => { + const p = job.path; + const base = p === './' ? '' : p.replace(/\/*$/, '/'); + this[ADDFSENTRY](base + entry); + }); + } + /* c8 ignore start */ + if (!source) + throw new Error('Cannot pipe without source'); + /* c8 ignore stop */ + if (zip) { + source.on('data', chunk => { + zip.write(chunk); + }); + } + else { + source.on('data', chunk => { + super[WRITE](chunk); + }); + } + } +} +//# sourceMappingURL=pack.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/package.json b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/parse.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/parse.js new file mode 100644 index 00000000000000..f2c802e6eef04d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/parse.js @@ -0,0 +1,595 @@ +// this[BUFFER] is the remainder of a chunk if we're waiting for +// the full 512 bytes of a header to come in. We will Buffer.concat() +// it to the next write(), which is a mem copy, but a small one. +// +// this[QUEUE] is a Yallist of entries that haven't been emitted +// yet this can only get filled up if the user keeps write()ing after +// a write() returns false, or does a write() with more than one entry +// +// We don't buffer chunks, we always parse them and either create an +// entry, or push it into the active entry. The ReadEntry class knows +// to throw data away if .ignore=true +// +// Shift entry off the buffer when it emits 'end', and emit 'entry' for +// the next one in the list. +// +// At any time, we're pushing body chunks into the entry at WRITEENTRY, +// and waiting for 'end' on the entry at READENTRY +// +// ignored entries get .resume() called on them straight away +import { EventEmitter as EE } from 'events'; +import { BrotliDecompress, Unzip } from 'minizlib'; +import { Yallist } from 'yallist'; +import { Header } from './header.js'; +import { Pax } from './pax.js'; +import { ReadEntry } from './read-entry.js'; +import { warnMethod, } from './warn-method.js'; +const maxMetaEntrySize = 1024 * 1024; +const gzipHeader = Buffer.from([0x1f, 0x8b]); +const STATE = Symbol('state'); +const WRITEENTRY = Symbol('writeEntry'); +const READENTRY = Symbol('readEntry'); +const NEXTENTRY = Symbol('nextEntry'); +const PROCESSENTRY = Symbol('processEntry'); +const EX = Symbol('extendedHeader'); +const GEX = Symbol('globalExtendedHeader'); +const META = Symbol('meta'); +const EMITMETA = Symbol('emitMeta'); +const BUFFER = Symbol('buffer'); +const QUEUE = Symbol('queue'); +const ENDED = Symbol('ended'); +const EMITTEDEND = Symbol('emittedEnd'); +const EMIT = Symbol('emit'); +const UNZIP = Symbol('unzip'); +const CONSUMECHUNK = Symbol('consumeChunk'); +const CONSUMECHUNKSUB = Symbol('consumeChunkSub'); +const CONSUMEBODY = Symbol('consumeBody'); +const CONSUMEMETA = Symbol('consumeMeta'); +const CONSUMEHEADER = Symbol('consumeHeader'); +const CONSUMING = Symbol('consuming'); +const BUFFERCONCAT = Symbol('bufferConcat'); +const MAYBEEND = Symbol('maybeEnd'); +const WRITING = Symbol('writing'); +const ABORTED = Symbol('aborted'); +const DONE = Symbol('onDone'); +const SAW_VALID_ENTRY = Symbol('sawValidEntry'); +const SAW_NULL_BLOCK = Symbol('sawNullBlock'); +const SAW_EOF = Symbol('sawEOF'); +const CLOSESTREAM = Symbol('closeStream'); +const noop = () => true; +export class Parser extends EE { + file; + strict; + maxMetaEntrySize; + filter; + brotli; + writable = true; + readable = false; + [QUEUE] = new Yallist(); + [BUFFER]; + [READENTRY]; + [WRITEENTRY]; + [STATE] = 'begin'; + [META] = ''; + [EX]; + [GEX]; + [ENDED] = false; + [UNZIP]; + [ABORTED] = false; + [SAW_VALID_ENTRY]; + [SAW_NULL_BLOCK] = false; + [SAW_EOF] = false; + [WRITING] = false; + [CONSUMING] = false; + [EMITTEDEND] = false; + constructor(opt = {}) { + super(); + this.file = opt.file || ''; + // these BADARCHIVE errors can't be detected early. listen on DONE. + this.on(DONE, () => { + if (this[STATE] === 'begin' || + this[SAW_VALID_ENTRY] === false) { + // either less than 1 block of data, or all entries were invalid. + // Either way, probably not even a tarball. + this.warn('TAR_BAD_ARCHIVE', 'Unrecognized archive format'); + } + }); + if (opt.ondone) { + this.on(DONE, opt.ondone); + } + else { + this.on(DONE, () => { + this.emit('prefinish'); + this.emit('finish'); + this.emit('end'); + }); + } + this.strict = !!opt.strict; + this.maxMetaEntrySize = opt.maxMetaEntrySize || maxMetaEntrySize; + this.filter = typeof opt.filter === 'function' ? opt.filter : noop; + // Unlike gzip, brotli doesn't have any magic bytes to identify it + // Users need to explicitly tell us they're extracting a brotli file + // Or we infer from the file extension + const isTBR = opt.file && + (opt.file.endsWith('.tar.br') || opt.file.endsWith('.tbr')); + // if it's a tbr file it MIGHT be brotli, but we don't know until + // we look at it and verify it's not a valid tar file. + this.brotli = + !opt.gzip && opt.brotli !== undefined ? opt.brotli + : isTBR ? undefined + : false; + // have to set this so that streams are ok piping into it + this.on('end', () => this[CLOSESTREAM]()); + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + if (typeof opt.onReadEntry === 'function') { + this.on('entry', opt.onReadEntry); + } + } + warn(code, message, data = {}) { + warnMethod(this, code, message, data); + } + [CONSUMEHEADER](chunk, position) { + if (this[SAW_VALID_ENTRY] === undefined) { + this[SAW_VALID_ENTRY] = false; + } + let header; + try { + header = new Header(chunk, position, this[EX], this[GEX]); + } + catch (er) { + return this.warn('TAR_ENTRY_INVALID', er); + } + if (header.nullBlock) { + if (this[SAW_NULL_BLOCK]) { + this[SAW_EOF] = true; + // ending an archive with no entries. pointless, but legal. + if (this[STATE] === 'begin') { + this[STATE] = 'header'; + } + this[EMIT]('eof'); + } + else { + this[SAW_NULL_BLOCK] = true; + this[EMIT]('nullBlock'); + } + } + else { + this[SAW_NULL_BLOCK] = false; + if (!header.cksumValid) { + this.warn('TAR_ENTRY_INVALID', 'checksum failure', { header }); + } + else if (!header.path) { + this.warn('TAR_ENTRY_INVALID', 'path is required', { header }); + } + else { + const type = header.type; + if (/^(Symbolic)?Link$/.test(type) && !header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath required', { + header, + }); + } + else if (!/^(Symbolic)?Link$/.test(type) && + !/^(Global)?ExtendedHeader$/.test(type) && + header.linkpath) { + this.warn('TAR_ENTRY_INVALID', 'linkpath forbidden', { + header, + }); + } + else { + const entry = (this[WRITEENTRY] = new ReadEntry(header, this[EX], this[GEX])); + // we do this for meta & ignored entries as well, because they + // are still valid tar, or else we wouldn't know to ignore them + if (!this[SAW_VALID_ENTRY]) { + if (entry.remain) { + // this might be the one! + const onend = () => { + if (!entry.invalid) { + this[SAW_VALID_ENTRY] = true; + } + }; + entry.on('end', onend); + } + else { + this[SAW_VALID_ENTRY] = true; + } + } + if (entry.meta) { + if (entry.size > this.maxMetaEntrySize) { + entry.ignore = true; + this[EMIT]('ignoredEntry', entry); + this[STATE] = 'ignore'; + entry.resume(); + } + else if (entry.size > 0) { + this[META] = ''; + entry.on('data', c => (this[META] += c)); + this[STATE] = 'meta'; + } + } + else { + this[EX] = undefined; + entry.ignore = + entry.ignore || !this.filter(entry.path, entry); + if (entry.ignore) { + // probably valid, just not something we care about + this[EMIT]('ignoredEntry', entry); + this[STATE] = entry.remain ? 'ignore' : 'header'; + entry.resume(); + } + else { + if (entry.remain) { + this[STATE] = 'body'; + } + else { + this[STATE] = 'header'; + entry.end(); + } + if (!this[READENTRY]) { + this[QUEUE].push(entry); + this[NEXTENTRY](); + } + else { + this[QUEUE].push(entry); + } + } + } + } + } + } + } + [CLOSESTREAM]() { + queueMicrotask(() => this.emit('close')); + } + [PROCESSENTRY](entry) { + let go = true; + if (!entry) { + this[READENTRY] = undefined; + go = false; + } + else if (Array.isArray(entry)) { + const [ev, ...args] = entry; + this.emit(ev, ...args); + } + else { + this[READENTRY] = entry; + this.emit('entry', entry); + if (!entry.emittedEnd) { + entry.on('end', () => this[NEXTENTRY]()); + go = false; + } + } + return go; + } + [NEXTENTRY]() { + do { } while (this[PROCESSENTRY](this[QUEUE].shift())); + if (!this[QUEUE].length) { + // At this point, there's nothing in the queue, but we may have an + // entry which is being consumed (readEntry). + // If we don't, then we definitely can handle more data. + // If we do, and either it's flowing, or it has never had any data + // written to it, then it needs more. + // The only other possibility is that it has returned false from a + // write() call, so we wait for the next drain to continue. + const re = this[READENTRY]; + const drainNow = !re || re.flowing || re.size === re.remain; + if (drainNow) { + if (!this[WRITING]) { + this.emit('drain'); + } + } + else { + re.once('drain', () => this.emit('drain')); + } + } + } + [CONSUMEBODY](chunk, position) { + // write up to but no more than writeEntry.blockRemain + const entry = this[WRITEENTRY]; + /* c8 ignore start */ + if (!entry) { + throw new Error('attempt to consume body without entry??'); + } + const br = entry.blockRemain ?? 0; + /* c8 ignore stop */ + const c = br >= chunk.length && position === 0 ? + chunk + : chunk.subarray(position, position + br); + entry.write(c); + if (!entry.blockRemain) { + this[STATE] = 'header'; + this[WRITEENTRY] = undefined; + entry.end(); + } + return c.length; + } + [CONSUMEMETA](chunk, position) { + const entry = this[WRITEENTRY]; + const ret = this[CONSUMEBODY](chunk, position); + // if we finished, then the entry is reset + if (!this[WRITEENTRY] && entry) { + this[EMITMETA](entry); + } + return ret; + } + [EMIT](ev, data, extra) { + if (!this[QUEUE].length && !this[READENTRY]) { + this.emit(ev, data, extra); + } + else { + this[QUEUE].push([ev, data, extra]); + } + } + [EMITMETA](entry) { + this[EMIT]('meta', this[META]); + switch (entry.type) { + case 'ExtendedHeader': + case 'OldExtendedHeader': + this[EX] = Pax.parse(this[META], this[EX], false); + break; + case 'GlobalExtendedHeader': + this[GEX] = Pax.parse(this[META], this[GEX], true); + break; + case 'NextFileHasLongPath': + case 'OldGnuLongPath': { + const ex = this[EX] ?? Object.create(null); + this[EX] = ex; + ex.path = this[META].replace(/\0.*/, ''); + break; + } + case 'NextFileHasLongLinkpath': { + const ex = this[EX] || Object.create(null); + this[EX] = ex; + ex.linkpath = this[META].replace(/\0.*/, ''); + break; + } + /* c8 ignore start */ + default: + throw new Error('unknown meta: ' + entry.type); + /* c8 ignore stop */ + } + } + abort(error) { + this[ABORTED] = true; + this.emit('abort', error); + // always throws, even in non-strict mode + this.warn('TAR_ABORT', error, { recoverable: false }); + } + write(chunk, encoding, cb) { + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, + /* c8 ignore next */ + typeof encoding === 'string' ? encoding : 'utf8'); + } + if (this[ABORTED]) { + /* c8 ignore next */ + cb?.(); + return false; + } + // first write, might be gzipped + const needSniff = this[UNZIP] === undefined || + (this.brotli === undefined && this[UNZIP] === false); + if (needSniff && chunk) { + if (this[BUFFER]) { + chunk = Buffer.concat([this[BUFFER], chunk]); + this[BUFFER] = undefined; + } + if (chunk.length < gzipHeader.length) { + this[BUFFER] = chunk; + /* c8 ignore next */ + cb?.(); + return true; + } + // look for gzip header + for (let i = 0; this[UNZIP] === undefined && i < gzipHeader.length; i++) { + if (chunk[i] !== gzipHeader[i]) { + this[UNZIP] = false; + } + } + const maybeBrotli = this.brotli === undefined; + if (this[UNZIP] === false && maybeBrotli) { + // read the first header to see if it's a valid tar file. If so, + // we can safely assume that it's not actually brotli, despite the + // .tbr or .tar.br file extension. + // if we ended before getting a full chunk, yes, def brotli + if (chunk.length < 512) { + if (this[ENDED]) { + this.brotli = true; + } + else { + this[BUFFER] = chunk; + /* c8 ignore next */ + cb?.(); + return true; + } + } + else { + // if it's tar, it's pretty reliably not brotli, chances of + // that happening are astronomical. + try { + new Header(chunk.subarray(0, 512)); + this.brotli = false; + } + catch (_) { + this.brotli = true; + } + } + } + if (this[UNZIP] === undefined || + (this[UNZIP] === false && this.brotli)) { + const ended = this[ENDED]; + this[ENDED] = false; + this[UNZIP] = + this[UNZIP] === undefined ? + new Unzip({}) + : new BrotliDecompress({}); + this[UNZIP].on('data', chunk => this[CONSUMECHUNK](chunk)); + this[UNZIP].on('error', er => this.abort(er)); + this[UNZIP].on('end', () => { + this[ENDED] = true; + this[CONSUMECHUNK](); + }); + this[WRITING] = true; + const ret = !!this[UNZIP][ended ? 'end' : 'write'](chunk); + this[WRITING] = false; + cb?.(); + return ret; + } + } + this[WRITING] = true; + if (this[UNZIP]) { + this[UNZIP].write(chunk); + } + else { + this[CONSUMECHUNK](chunk); + } + this[WRITING] = false; + // return false if there's a queue, or if the current entry isn't flowing + const ret = this[QUEUE].length ? false + : this[READENTRY] ? this[READENTRY].flowing + : true; + // if we have no queue, then that means a clogged READENTRY + if (!ret && !this[QUEUE].length) { + this[READENTRY]?.once('drain', () => this.emit('drain')); + } + /* c8 ignore next */ + cb?.(); + return ret; + } + [BUFFERCONCAT](c) { + if (c && !this[ABORTED]) { + this[BUFFER] = + this[BUFFER] ? Buffer.concat([this[BUFFER], c]) : c; + } + } + [MAYBEEND]() { + if (this[ENDED] && + !this[EMITTEDEND] && + !this[ABORTED] && + !this[CONSUMING]) { + this[EMITTEDEND] = true; + const entry = this[WRITEENTRY]; + if (entry && entry.blockRemain) { + // truncated, likely a damaged file + const have = this[BUFFER] ? this[BUFFER].length : 0; + this.warn('TAR_BAD_ARCHIVE', `Truncated input (needed ${entry.blockRemain} more bytes, only ${have} available)`, { entry }); + if (this[BUFFER]) { + entry.write(this[BUFFER]); + } + entry.end(); + } + this[EMIT](DONE); + } + } + [CONSUMECHUNK](chunk) { + if (this[CONSUMING] && chunk) { + this[BUFFERCONCAT](chunk); + } + else if (!chunk && !this[BUFFER]) { + this[MAYBEEND](); + } + else if (chunk) { + this[CONSUMING] = true; + if (this[BUFFER]) { + this[BUFFERCONCAT](chunk); + const c = this[BUFFER]; + this[BUFFER] = undefined; + this[CONSUMECHUNKSUB](c); + } + else { + this[CONSUMECHUNKSUB](chunk); + } + while (this[BUFFER] && + this[BUFFER]?.length >= 512 && + !this[ABORTED] && + !this[SAW_EOF]) { + const c = this[BUFFER]; + this[BUFFER] = undefined; + this[CONSUMECHUNKSUB](c); + } + this[CONSUMING] = false; + } + if (!this[BUFFER] || this[ENDED]) { + this[MAYBEEND](); + } + } + [CONSUMECHUNKSUB](chunk) { + // we know that we are in CONSUMING mode, so anything written goes into + // the buffer. Advance the position and put any remainder in the buffer. + let position = 0; + const length = chunk.length; + while (position + 512 <= length && + !this[ABORTED] && + !this[SAW_EOF]) { + switch (this[STATE]) { + case 'begin': + case 'header': + this[CONSUMEHEADER](chunk, position); + position += 512; + break; + case 'ignore': + case 'body': + position += this[CONSUMEBODY](chunk, position); + break; + case 'meta': + position += this[CONSUMEMETA](chunk, position); + break; + /* c8 ignore start */ + default: + throw new Error('invalid state: ' + this[STATE]); + /* c8 ignore stop */ + } + } + if (position < length) { + if (this[BUFFER]) { + this[BUFFER] = Buffer.concat([ + chunk.subarray(position), + this[BUFFER], + ]); + } + else { + this[BUFFER] = chunk.subarray(position); + } + } + } + end(chunk, encoding, cb) { + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding); + } + if (cb) + this.once('finish', cb); + if (!this[ABORTED]) { + if (this[UNZIP]) { + /* c8 ignore start */ + if (chunk) + this[UNZIP].write(chunk); + /* c8 ignore stop */ + this[UNZIP].end(); + } + else { + this[ENDED] = true; + if (this.brotli === undefined) + chunk = chunk || Buffer.alloc(0); + if (chunk) + this.write(chunk); + this[MAYBEEND](); + } + } + return this; + } +} +//# sourceMappingURL=parse.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/path-reservations.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/path-reservations.js new file mode 100644 index 00000000000000..e63b9c91e9a808 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/path-reservations.js @@ -0,0 +1,166 @@ +// A path exclusive reservation system +// reserve([list, of, paths], fn) +// When the fn is first in line for all its paths, it +// is called with a cb that clears the reservation. +// +// Used by async unpack to avoid clobbering paths in use, +// while still allowing maximal safe parallelization. +import { join } from 'node:path'; +import { normalizeUnicode } from './normalize-unicode.js'; +import { stripTrailingSlashes } from './strip-trailing-slashes.js'; +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform; +const isWindows = platform === 'win32'; +// return a set of parent dirs for a given path +// '/a/b/c/d' -> ['/', '/a', '/a/b', '/a/b/c', '/a/b/c/d'] +const getDirs = (path) => { + const dirs = path + .split('/') + .slice(0, -1) + .reduce((set, path) => { + const s = set[set.length - 1]; + if (s !== undefined) { + path = join(s, path); + } + set.push(path || '/'); + return set; + }, []); + return dirs; +}; +export class PathReservations { + // path => [function or Set] + // A Set object means a directory reservation + // A fn is a direct reservation on that path + #queues = new Map(); + // fn => {paths:[path,...], dirs:[path, ...]} + #reservations = new Map(); + // functions currently running + #running = new Set(); + reserve(paths, fn) { + paths = + isWindows ? + ['win32 parallelization disabled'] + : paths.map(p => { + // don't need normPath, because we skip this entirely for windows + return stripTrailingSlashes(join(normalizeUnicode(p))).toLowerCase(); + }); + const dirs = new Set(paths.map(path => getDirs(path)).reduce((a, b) => a.concat(b))); + this.#reservations.set(fn, { dirs, paths }); + for (const p of paths) { + const q = this.#queues.get(p); + if (!q) { + this.#queues.set(p, [fn]); + } + else { + q.push(fn); + } + } + for (const dir of dirs) { + const q = this.#queues.get(dir); + if (!q) { + this.#queues.set(dir, [new Set([fn])]); + } + else { + const l = q[q.length - 1]; + if (l instanceof Set) { + l.add(fn); + } + else { + q.push(new Set([fn])); + } + } + } + return this.#run(fn); + } + // return the queues for each path the function cares about + // fn => {paths, dirs} + #getQueues(fn) { + const res = this.#reservations.get(fn); + /* c8 ignore start */ + if (!res) { + throw new Error('function does not have any path reservations'); + } + /* c8 ignore stop */ + return { + paths: res.paths.map((path) => this.#queues.get(path)), + dirs: [...res.dirs].map(path => this.#queues.get(path)), + }; + } + // check if fn is first in line for all its paths, and is + // included in the first set for all its dir queues + check(fn) { + const { paths, dirs } = this.#getQueues(fn); + return (paths.every(q => q && q[0] === fn) && + dirs.every(q => q && q[0] instanceof Set && q[0].has(fn))); + } + // run the function if it's first in line and not already running + #run(fn) { + if (this.#running.has(fn) || !this.check(fn)) { + return false; + } + this.#running.add(fn); + fn(() => this.#clear(fn)); + return true; + } + #clear(fn) { + if (!this.#running.has(fn)) { + return false; + } + const res = this.#reservations.get(fn); + /* c8 ignore start */ + if (!res) { + throw new Error('invalid reservation'); + } + /* c8 ignore stop */ + const { paths, dirs } = res; + const next = new Set(); + for (const path of paths) { + const q = this.#queues.get(path); + /* c8 ignore start */ + if (!q || q?.[0] !== fn) { + continue; + } + /* c8 ignore stop */ + const q0 = q[1]; + if (!q0) { + this.#queues.delete(path); + continue; + } + q.shift(); + if (typeof q0 === 'function') { + next.add(q0); + } + else { + for (const f of q0) { + next.add(f); + } + } + } + for (const dir of dirs) { + const q = this.#queues.get(dir); + const q0 = q?.[0]; + /* c8 ignore next - type safety only */ + if (!q || !(q0 instanceof Set)) + continue; + if (q0.size === 1 && q.length === 1) { + this.#queues.delete(dir); + continue; + } + else if (q0.size === 1) { + q.shift(); + // next one must be a function, + // or else the Set would've been reused + const n = q[0]; + if (typeof n === 'function') { + next.add(n); + } + } + else { + q0.delete(fn); + } + } + this.#running.delete(fn); + next.forEach(fn => this.#run(fn)); + return true; + } +} +//# sourceMappingURL=path-reservations.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pax.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pax.js new file mode 100644 index 00000000000000..832808f344da53 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/pax.js @@ -0,0 +1,154 @@ +import { basename } from 'node:path'; +import { Header } from './header.js'; +export class Pax { + atime; + mtime; + ctime; + charset; + comment; + gid; + uid; + gname; + uname; + linkpath; + dev; + ino; + nlink; + path; + size; + mode; + global; + constructor(obj, global = false) { + this.atime = obj.atime; + this.charset = obj.charset; + this.comment = obj.comment; + this.ctime = obj.ctime; + this.dev = obj.dev; + this.gid = obj.gid; + this.global = global; + this.gname = obj.gname; + this.ino = obj.ino; + this.linkpath = obj.linkpath; + this.mtime = obj.mtime; + this.nlink = obj.nlink; + this.path = obj.path; + this.size = obj.size; + this.uid = obj.uid; + this.uname = obj.uname; + } + encode() { + const body = this.encodeBody(); + if (body === '') { + return Buffer.allocUnsafe(0); + } + const bodyLen = Buffer.byteLength(body); + // round up to 512 bytes + // add 512 for header + const bufLen = 512 * Math.ceil(1 + bodyLen / 512); + const buf = Buffer.allocUnsafe(bufLen); + // 0-fill the header section, it might not hit every field + for (let i = 0; i < 512; i++) { + buf[i] = 0; + } + new Header({ + // XXX split the path + // then the path should be PaxHeader + basename, but less than 99, + // prepend with the dirname + /* c8 ignore start */ + path: ('PaxHeader/' + basename(this.path ?? '')).slice(0, 99), + /* c8 ignore stop */ + mode: this.mode || 0o644, + uid: this.uid, + gid: this.gid, + size: bodyLen, + mtime: this.mtime, + type: this.global ? 'GlobalExtendedHeader' : 'ExtendedHeader', + linkpath: '', + uname: this.uname || '', + gname: this.gname || '', + devmaj: 0, + devmin: 0, + atime: this.atime, + ctime: this.ctime, + }).encode(buf); + buf.write(body, 512, bodyLen, 'utf8'); + // null pad after the body + for (let i = bodyLen + 512; i < buf.length; i++) { + buf[i] = 0; + } + return buf; + } + encodeBody() { + return (this.encodeField('path') + + this.encodeField('ctime') + + this.encodeField('atime') + + this.encodeField('dev') + + this.encodeField('ino') + + this.encodeField('nlink') + + this.encodeField('charset') + + this.encodeField('comment') + + this.encodeField('gid') + + this.encodeField('gname') + + this.encodeField('linkpath') + + this.encodeField('mtime') + + this.encodeField('size') + + this.encodeField('uid') + + this.encodeField('uname')); + } + encodeField(field) { + if (this[field] === undefined) { + return ''; + } + const r = this[field]; + const v = r instanceof Date ? r.getTime() / 1000 : r; + const s = ' ' + + (field === 'dev' || field === 'ino' || field === 'nlink' ? + 'SCHILY.' + : '') + + field + + '=' + + v + + '\n'; + const byteLen = Buffer.byteLength(s); + // the digits includes the length of the digits in ascii base-10 + // so if it's 9 characters, then adding 1 for the 9 makes it 10 + // which makes it 11 chars. + let digits = Math.floor(Math.log(byteLen) / Math.log(10)) + 1; + if (byteLen + digits >= Math.pow(10, digits)) { + digits += 1; + } + const len = digits + byteLen; + return len + s; + } + static parse(str, ex, g = false) { + return new Pax(merge(parseKV(str), ex), g); + } +} +const merge = (a, b) => b ? Object.assign({}, b, a) : a; +const parseKV = (str) => str + .replace(/\n$/, '') + .split('\n') + .reduce(parseKVLine, Object.create(null)); +const parseKVLine = (set, line) => { + const n = parseInt(line, 10); + // XXX Values with \n in them will fail this. + // Refactor to not be a naive line-by-line parse. + if (n !== Buffer.byteLength(line) + 1) { + return set; + } + line = line.slice((n + ' ').length); + const kv = line.split('='); + const r = kv.shift(); + if (!r) { + return set; + } + const k = r.replace(/^SCHILY\.(dev|ino|nlink)/, '$1'); + const v = kv.join('='); + set[k] = + /^([A-Z]+\.)?([mac]|birth|creation)time$/.test(k) ? + new Date(Number(v) * 1000) + : /^[0-9]+$/.test(v) ? +v + : v; + return set; +}; +//# sourceMappingURL=pax.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/read-entry.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/read-entry.js new file mode 100644 index 00000000000000..23cc673e610879 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/read-entry.js @@ -0,0 +1,136 @@ +import { Minipass } from 'minipass'; +import { normalizeWindowsPath } from './normalize-windows-path.js'; +export class ReadEntry extends Minipass { + extended; + globalExtended; + header; + startBlockSize; + blockRemain; + remain; + type; + meta = false; + ignore = false; + path; + mode; + uid; + gid; + uname; + gname; + size = 0; + mtime; + atime; + ctime; + linkpath; + dev; + ino; + nlink; + invalid = false; + absolute; + unsupported = false; + constructor(header, ex, gex) { + super({}); + // read entries always start life paused. this is to avoid the + // situation where Minipass's auto-ending empty streams results + // in an entry ending before we're ready for it. + this.pause(); + this.extended = ex; + this.globalExtended = gex; + this.header = header; + /* c8 ignore start */ + this.remain = header.size ?? 0; + /* c8 ignore stop */ + this.startBlockSize = 512 * Math.ceil(this.remain / 512); + this.blockRemain = this.startBlockSize; + this.type = header.type; + switch (this.type) { + case 'File': + case 'OldFile': + case 'Link': + case 'SymbolicLink': + case 'CharacterDevice': + case 'BlockDevice': + case 'Directory': + case 'FIFO': + case 'ContiguousFile': + case 'GNUDumpDir': + break; + case 'NextFileHasLongLinkpath': + case 'NextFileHasLongPath': + case 'OldGnuLongPath': + case 'GlobalExtendedHeader': + case 'ExtendedHeader': + case 'OldExtendedHeader': + this.meta = true; + break; + // NOTE: gnutar and bsdtar treat unrecognized types as 'File' + // it may be worth doing the same, but with a warning. + default: + this.ignore = true; + } + /* c8 ignore start */ + if (!header.path) { + throw new Error('no path provided for tar.ReadEntry'); + } + /* c8 ignore stop */ + this.path = normalizeWindowsPath(header.path); + this.mode = header.mode; + if (this.mode) { + this.mode = this.mode & 0o7777; + } + this.uid = header.uid; + this.gid = header.gid; + this.uname = header.uname; + this.gname = header.gname; + this.size = this.remain; + this.mtime = header.mtime; + this.atime = header.atime; + this.ctime = header.ctime; + /* c8 ignore start */ + this.linkpath = + header.linkpath ? + normalizeWindowsPath(header.linkpath) + : undefined; + /* c8 ignore stop */ + this.uname = header.uname; + this.gname = header.gname; + if (ex) { + this.#slurp(ex); + } + if (gex) { + this.#slurp(gex, true); + } + } + write(data) { + const writeLen = data.length; + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate'); + } + const r = this.remain; + const br = this.blockRemain; + this.remain = Math.max(0, r - writeLen); + this.blockRemain = Math.max(0, br - writeLen); + if (this.ignore) { + return true; + } + if (r >= writeLen) { + return super.write(data); + } + // r < writeLen + return super.write(data.subarray(0, r)); + } + #slurp(ex, gex = false) { + if (ex.path) + ex.path = normalizeWindowsPath(ex.path); + if (ex.linkpath) + ex.linkpath = normalizeWindowsPath(ex.linkpath); + Object.assign(this, Object.fromEntries(Object.entries(ex).filter(([k, v]) => { + // we slurp in everything except for the path attribute in + // a global extended header, because that's weird. Also, any + // null/undefined values are ignored. + return !(v === null || + v === undefined || + (k === 'path' && gex)); + }))); + } +} +//# sourceMappingURL=read-entry.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/replace.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/replace.js new file mode 100644 index 00000000000000..c461a4c7d8b63c --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/replace.js @@ -0,0 +1,225 @@ +// tar -r +import { WriteStream, WriteStreamSync } from '@isaacs/fs-minipass'; +import fs from 'node:fs'; +import path from 'node:path'; +import { Header } from './header.js'; +import { list } from './list.js'; +import { makeCommand } from './make-command.js'; +import { isFile, } from './options.js'; +import { Pack, PackSync } from './pack.js'; +// starting at the head of the file, read a Header +// If the checksum is invalid, that's our position to start writing +// If it is, jump forward by the specified size (round up to 512) +// and try again. +// Write the new Pack stream starting there. +const replaceSync = (opt, files) => { + const p = new PackSync(opt); + let threw = true; + let fd; + let position; + try { + try { + fd = fs.openSync(opt.file, 'r+'); + } + catch (er) { + if (er?.code === 'ENOENT') { + fd = fs.openSync(opt.file, 'w+'); + } + else { + throw er; + } + } + const st = fs.fstatSync(fd); + const headBuf = Buffer.alloc(512); + POSITION: for (position = 0; position < st.size; position += 512) { + for (let bufPos = 0, bytes = 0; bufPos < 512; bufPos += bytes) { + bytes = fs.readSync(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos); + if (position === 0 && + headBuf[0] === 0x1f && + headBuf[1] === 0x8b) { + throw new Error('cannot append to compressed archives'); + } + if (!bytes) { + break POSITION; + } + } + const h = new Header(headBuf); + if (!h.cksumValid) { + break; + } + const entryBlockSize = 512 * Math.ceil((h.size || 0) / 512); + if (position + entryBlockSize + 512 > st.size) { + break; + } + // the 512 for the header we just parsed will be added as well + // also jump ahead all the blocks for the body + position += entryBlockSize; + if (opt.mtimeCache && h.mtime) { + opt.mtimeCache.set(String(h.path), h.mtime); + } + } + threw = false; + streamSync(opt, p, position, fd, files); + } + finally { + if (threw) { + try { + fs.closeSync(fd); + } + catch (er) { } + } + } +}; +const streamSync = (opt, p, position, fd, files) => { + const stream = new WriteStreamSync(opt.file, { + fd: fd, + start: position, + }); + p.pipe(stream); + addFilesSync(p, files); +}; +const replaceAsync = (opt, files) => { + files = Array.from(files); + const p = new Pack(opt); + const getPos = (fd, size, cb_) => { + const cb = (er, pos) => { + if (er) { + fs.close(fd, _ => cb_(er)); + } + else { + cb_(null, pos); + } + }; + let position = 0; + if (size === 0) { + return cb(null, 0); + } + let bufPos = 0; + const headBuf = Buffer.alloc(512); + const onread = (er, bytes) => { + if (er || typeof bytes === 'undefined') { + return cb(er); + } + bufPos += bytes; + if (bufPos < 512 && bytes) { + return fs.read(fd, headBuf, bufPos, headBuf.length - bufPos, position + bufPos, onread); + } + if (position === 0 && + headBuf[0] === 0x1f && + headBuf[1] === 0x8b) { + return cb(new Error('cannot append to compressed archives')); + } + // truncated header + if (bufPos < 512) { + return cb(null, position); + } + const h = new Header(headBuf); + if (!h.cksumValid) { + return cb(null, position); + } + /* c8 ignore next */ + const entryBlockSize = 512 * Math.ceil((h.size ?? 0) / 512); + if (position + entryBlockSize + 512 > size) { + return cb(null, position); + } + position += entryBlockSize + 512; + if (position >= size) { + return cb(null, position); + } + if (opt.mtimeCache && h.mtime) { + opt.mtimeCache.set(String(h.path), h.mtime); + } + bufPos = 0; + fs.read(fd, headBuf, 0, 512, position, onread); + }; + fs.read(fd, headBuf, 0, 512, position, onread); + }; + const promise = new Promise((resolve, reject) => { + p.on('error', reject); + let flag = 'r+'; + const onopen = (er, fd) => { + if (er && er.code === 'ENOENT' && flag === 'r+') { + flag = 'w+'; + return fs.open(opt.file, flag, onopen); + } + if (er || !fd) { + return reject(er); + } + fs.fstat(fd, (er, st) => { + if (er) { + return fs.close(fd, () => reject(er)); + } + getPos(fd, st.size, (er, position) => { + if (er) { + return reject(er); + } + const stream = new WriteStream(opt.file, { + fd: fd, + start: position, + }); + p.pipe(stream); + stream.on('error', reject); + stream.on('close', resolve); + addFilesAsync(p, files); + }); + }); + }; + fs.open(opt.file, flag, onopen); + }); + return promise; +}; +const addFilesSync = (p, files) => { + files.forEach(file => { + if (file.charAt(0) === '@') { + list({ + file: path.resolve(p.cwd, file.slice(1)), + sync: true, + noResume: true, + onReadEntry: entry => p.add(entry), + }); + } + else { + p.add(file); + } + }); + p.end(); +}; +const addFilesAsync = async (p, files) => { + for (let i = 0; i < files.length; i++) { + const file = String(files[i]); + if (file.charAt(0) === '@') { + await list({ + file: path.resolve(String(p.cwd), file.slice(1)), + noResume: true, + onReadEntry: entry => p.add(entry), + }); + } + else { + p.add(file); + } + } + p.end(); +}; +export const replace = makeCommand(replaceSync, replaceAsync, +/* c8 ignore start */ +() => { + throw new TypeError('file is required'); +}, () => { + throw new TypeError('file is required'); +}, +/* c8 ignore stop */ +(opt, entries) => { + if (!isFile(opt)) { + throw new TypeError('file is required'); + } + if (opt.gzip || + opt.brotli || + opt.file.endsWith('.br') || + opt.file.endsWith('.tbr')) { + throw new TypeError('cannot append to compressed archives'); + } + if (!entries?.length) { + throw new TypeError('no paths specified to add/replace'); + } +}); +//# sourceMappingURL=replace.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-absolute-path.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-absolute-path.js new file mode 100644 index 00000000000000..cce5ff80b00db3 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-absolute-path.js @@ -0,0 +1,25 @@ +// unix absolute paths are also absolute on win32, so we use this for both +import { win32 } from 'node:path'; +const { isAbsolute, parse } = win32; +// returns [root, stripped] +// Note that windows will think that //x/y/z/a has a "root" of //x/y, and in +// those cases, we want to sanitize it to x/y/z/a, not z/a, so we strip / +// explicitly if it's the first character. +// drive-specific relative paths on Windows get their root stripped off even +// though they are not absolute, so `c:../foo` becomes ['c:', '../foo'] +export const stripAbsolutePath = (path) => { + let r = ''; + let parsed = parse(path); + while (isAbsolute(path) || parsed.root) { + // windows will think that //x/y/z has a "root" of //x/y/ + // but strip the //?/C:/ off of //?/C:/path + const root = path.charAt(0) === '/' && path.slice(0, 4) !== '//?/' ? + '/' + : parsed.root; + path = path.slice(root.length); + r += root; + parsed = parse(path); + } + return [r, path]; +}; +//# sourceMappingURL=strip-absolute-path.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-trailing-slashes.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-trailing-slashes.js new file mode 100644 index 00000000000000..ace4218a7547bf --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/strip-trailing-slashes.js @@ -0,0 +1,14 @@ +// warning: extremely hot code path. +// This has been meticulously optimized for use +// within npm install on large package trees. +// Do not edit without careful benchmarking. +export const stripTrailingSlashes = (str) => { + let i = str.length - 1; + let slashesStart = -1; + while (i > -1 && str.charAt(i) === '/') { + slashesStart = i; + i--; + } + return slashesStart === -1 ? str : str.slice(0, slashesStart); +}; +//# sourceMappingURL=strip-trailing-slashes.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/symlink-error.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/symlink-error.js new file mode 100644 index 00000000000000..d31766e2e0afa0 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/symlink-error.js @@ -0,0 +1,15 @@ +export class SymlinkError extends Error { + path; + symlink; + syscall = 'symlink'; + code = 'TAR_SYMLINK_ERROR'; + constructor(symlink, path) { + super('TAR_SYMLINK_ERROR: Cannot extract through symbolic link'); + this.symlink = symlink; + this.path = path; + } + get name() { + return 'SymlinkError'; + } +} +//# sourceMappingURL=symlink-error.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/types.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/types.js new file mode 100644 index 00000000000000..27b982ae1e0922 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/types.js @@ -0,0 +1,45 @@ +export const isCode = (c) => name.has(c); +export const isName = (c) => code.has(c); +// map types from key to human-friendly name +export const name = new Map([ + ['0', 'File'], + // same as File + ['', 'OldFile'], + ['1', 'Link'], + ['2', 'SymbolicLink'], + // Devices and FIFOs aren't fully supported + // they are parsed, but skipped when unpacking + ['3', 'CharacterDevice'], + ['4', 'BlockDevice'], + ['5', 'Directory'], + ['6', 'FIFO'], + // same as File + ['7', 'ContiguousFile'], + // pax headers + ['g', 'GlobalExtendedHeader'], + ['x', 'ExtendedHeader'], + // vendor-specific stuff + // skip + ['A', 'SolarisACL'], + // like 5, but with data, which should be skipped + ['D', 'GNUDumpDir'], + // metadata only, skip + ['I', 'Inode'], + // data = link path of next file + ['K', 'NextFileHasLongLinkpath'], + // data = path of next file + ['L', 'NextFileHasLongPath'], + // skip + ['M', 'ContinuationFile'], + // like L + ['N', 'OldGnuLongPath'], + // skip + ['S', 'SparseFile'], + // skip + ['V', 'TapeVolumeHeader'], + // like x + ['X', 'OldExtendedHeader'], +]); +// map the other direction +export const code = new Map(Array.from(name).map(kv => [kv[1], kv[0]])); +//# sourceMappingURL=types.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/unpack.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/unpack.js new file mode 100644 index 00000000000000..6e744cfc1a6f9f --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/unpack.js @@ -0,0 +1,888 @@ +// the PEND/UNPEND stuff tracks whether we're ready to emit end/close yet. +// but the path reservations are required to avoid race conditions where +// parallelized unpack ops may mess with one another, due to dependencies +// (like a Link depending on its target) or destructive operations (like +// clobbering an fs object to create one of a different type.) +import * as fsm from '@isaacs/fs-minipass'; +import assert from 'node:assert'; +import { randomBytes } from 'node:crypto'; +import fs from 'node:fs'; +import path from 'node:path'; +import { getWriteFlag } from './get-write-flag.js'; +import { mkdir, mkdirSync } from './mkdir.js'; +import { normalizeUnicode } from './normalize-unicode.js'; +import { normalizeWindowsPath } from './normalize-windows-path.js'; +import { Parser } from './parse.js'; +import { stripAbsolutePath } from './strip-absolute-path.js'; +import { stripTrailingSlashes } from './strip-trailing-slashes.js'; +import * as wc from './winchars.js'; +import { PathReservations } from './path-reservations.js'; +const ONENTRY = Symbol('onEntry'); +const CHECKFS = Symbol('checkFs'); +const CHECKFS2 = Symbol('checkFs2'); +const PRUNECACHE = Symbol('pruneCache'); +const ISREUSABLE = Symbol('isReusable'); +const MAKEFS = Symbol('makeFs'); +const FILE = Symbol('file'); +const DIRECTORY = Symbol('directory'); +const LINK = Symbol('link'); +const SYMLINK = Symbol('symlink'); +const HARDLINK = Symbol('hardlink'); +const UNSUPPORTED = Symbol('unsupported'); +const CHECKPATH = Symbol('checkPath'); +const MKDIR = Symbol('mkdir'); +const ONERROR = Symbol('onError'); +const PENDING = Symbol('pending'); +const PEND = Symbol('pend'); +const UNPEND = Symbol('unpend'); +const ENDED = Symbol('ended'); +const MAYBECLOSE = Symbol('maybeClose'); +const SKIP = Symbol('skip'); +const DOCHOWN = Symbol('doChown'); +const UID = Symbol('uid'); +const GID = Symbol('gid'); +const CHECKED_CWD = Symbol('checkedCwd'); +const platform = process.env.TESTING_TAR_FAKE_PLATFORM || process.platform; +const isWindows = platform === 'win32'; +const DEFAULT_MAX_DEPTH = 1024; +// Unlinks on Windows are not atomic. +// +// This means that if you have a file entry, followed by another +// file entry with an identical name, and you cannot re-use the file +// (because it's a hardlink, or because unlink:true is set, or it's +// Windows, which does not have useful nlink values), then the unlink +// will be committed to the disk AFTER the new file has been written +// over the old one, deleting the new file. +// +// To work around this, on Windows systems, we rename the file and then +// delete the renamed file. It's a sloppy kludge, but frankly, I do not +// know of a better way to do this, given windows' non-atomic unlink +// semantics. +// +// See: https://github.com/npm/node-tar/issues/183 +/* c8 ignore start */ +const unlinkFile = (path, cb) => { + if (!isWindows) { + return fs.unlink(path, cb); + } + const name = path + '.DELETE.' + randomBytes(16).toString('hex'); + fs.rename(path, name, er => { + if (er) { + return cb(er); + } + fs.unlink(name, cb); + }); +}; +/* c8 ignore stop */ +/* c8 ignore start */ +const unlinkFileSync = (path) => { + if (!isWindows) { + return fs.unlinkSync(path); + } + const name = path + '.DELETE.' + randomBytes(16).toString('hex'); + fs.renameSync(path, name); + fs.unlinkSync(name); +}; +/* c8 ignore stop */ +// this.gid, entry.gid, this.processUid +const uint32 = (a, b, c) => a !== undefined && a === a >>> 0 ? a + : b !== undefined && b === b >>> 0 ? b + : c; +// clear the cache if it's a case-insensitive unicode-squashing match. +// we can't know if the current file system is case-sensitive or supports +// unicode fully, so we check for similarity on the maximally compatible +// representation. Err on the side of pruning, since all it's doing is +// preventing lstats, and it's not the end of the world if we get a false +// positive. +// Note that on windows, we always drop the entire cache whenever a +// symbolic link is encountered, because 8.3 filenames are impossible +// to reason about, and collisions are hazards rather than just failures. +const cacheKeyNormalize = (path) => stripTrailingSlashes(normalizeWindowsPath(normalizeUnicode(path))).toLowerCase(); +// remove all cache entries matching ${abs}/** +const pruneCache = (cache, abs) => { + abs = cacheKeyNormalize(abs); + for (const path of cache.keys()) { + const pnorm = cacheKeyNormalize(path); + if (pnorm === abs || pnorm.indexOf(abs + '/') === 0) { + cache.delete(path); + } + } +}; +const dropCache = (cache) => { + for (const key of cache.keys()) { + cache.delete(key); + } +}; +export class Unpack extends Parser { + [ENDED] = false; + [CHECKED_CWD] = false; + [PENDING] = 0; + reservations = new PathReservations(); + transform; + writable = true; + readable = false; + dirCache; + uid; + gid; + setOwner; + preserveOwner; + processGid; + processUid; + maxDepth; + forceChown; + win32; + newer; + keep; + noMtime; + preservePaths; + unlink; + cwd; + strip; + processUmask; + umask; + dmode; + fmode; + chmod; + constructor(opt = {}) { + opt.ondone = () => { + this[ENDED] = true; + this[MAYBECLOSE](); + }; + super(opt); + this.transform = opt.transform; + this.dirCache = opt.dirCache || new Map(); + this.chmod = !!opt.chmod; + if (typeof opt.uid === 'number' || typeof opt.gid === 'number') { + // need both or neither + if (typeof opt.uid !== 'number' || + typeof opt.gid !== 'number') { + throw new TypeError('cannot set owner without number uid and gid'); + } + if (opt.preserveOwner) { + throw new TypeError('cannot preserve owner in archive and also set owner explicitly'); + } + this.uid = opt.uid; + this.gid = opt.gid; + this.setOwner = true; + } + else { + this.uid = undefined; + this.gid = undefined; + this.setOwner = false; + } + // default true for root + if (opt.preserveOwner === undefined && + typeof opt.uid !== 'number') { + this.preserveOwner = !!(process.getuid && process.getuid() === 0); + } + else { + this.preserveOwner = !!opt.preserveOwner; + } + this.processUid = + (this.preserveOwner || this.setOwner) && process.getuid ? + process.getuid() + : undefined; + this.processGid = + (this.preserveOwner || this.setOwner) && process.getgid ? + process.getgid() + : undefined; + // prevent excessively deep nesting of subfolders + // set to `Infinity` to remove this restriction + this.maxDepth = + typeof opt.maxDepth === 'number' ? + opt.maxDepth + : DEFAULT_MAX_DEPTH; + // mostly just for testing, but useful in some cases. + // Forcibly trigger a chown on every entry, no matter what + this.forceChown = opt.forceChown === true; + // turn > this[ONENTRY](entry)); + } + // a bad or damaged archive is a warning for Parser, but an error + // when extracting. Mark those errors as unrecoverable, because + // the Unpack contract cannot be met. + warn(code, msg, data = {}) { + if (code === 'TAR_BAD_ARCHIVE' || code === 'TAR_ABORT') { + data.recoverable = false; + } + return super.warn(code, msg, data); + } + [MAYBECLOSE]() { + if (this[ENDED] && this[PENDING] === 0) { + this.emit('prefinish'); + this.emit('finish'); + this.emit('end'); + } + } + [CHECKPATH](entry) { + const p = normalizeWindowsPath(entry.path); + const parts = p.split('/'); + if (this.strip) { + if (parts.length < this.strip) { + return false; + } + if (entry.type === 'Link') { + const linkparts = normalizeWindowsPath(String(entry.linkpath)).split('/'); + if (linkparts.length >= this.strip) { + entry.linkpath = linkparts.slice(this.strip).join('/'); + } + else { + return false; + } + } + parts.splice(0, this.strip); + entry.path = parts.join('/'); + } + if (isFinite(this.maxDepth) && parts.length > this.maxDepth) { + this.warn('TAR_ENTRY_ERROR', 'path excessively deep', { + entry, + path: p, + depth: parts.length, + maxDepth: this.maxDepth, + }); + return false; + } + if (!this.preservePaths) { + if (parts.includes('..') || + /* c8 ignore next */ + (isWindows && /^[a-z]:\.\.$/i.test(parts[0] ?? ''))) { + this.warn('TAR_ENTRY_ERROR', `path contains '..'`, { + entry, + path: p, + }); + return false; + } + // strip off the root + const [root, stripped] = stripAbsolutePath(p); + if (root) { + entry.path = String(stripped); + this.warn('TAR_ENTRY_INFO', `stripping ${root} from absolute path`, { + entry, + path: p, + }); + } + } + if (path.isAbsolute(entry.path)) { + entry.absolute = normalizeWindowsPath(path.resolve(entry.path)); + } + else { + entry.absolute = normalizeWindowsPath(path.resolve(this.cwd, entry.path)); + } + // if we somehow ended up with a path that escapes the cwd, and we are + // not in preservePaths mode, then something is fishy! This should have + // been prevented above, so ignore this for coverage. + /* c8 ignore start - defense in depth */ + if (!this.preservePaths && + typeof entry.absolute === 'string' && + entry.absolute.indexOf(this.cwd + '/') !== 0 && + entry.absolute !== this.cwd) { + this.warn('TAR_ENTRY_ERROR', 'path escaped extraction target', { + entry, + path: normalizeWindowsPath(entry.path), + resolvedPath: entry.absolute, + cwd: this.cwd, + }); + return false; + } + /* c8 ignore stop */ + // an archive can set properties on the extraction directory, but it + // may not replace the cwd with a different kind of thing entirely. + if (entry.absolute === this.cwd && + entry.type !== 'Directory' && + entry.type !== 'GNUDumpDir') { + return false; + } + // only encode : chars that aren't drive letter indicators + if (this.win32) { + const { root: aRoot } = path.win32.parse(String(entry.absolute)); + entry.absolute = + aRoot + wc.encode(String(entry.absolute).slice(aRoot.length)); + const { root: pRoot } = path.win32.parse(entry.path); + entry.path = pRoot + wc.encode(entry.path.slice(pRoot.length)); + } + return true; + } + [ONENTRY](entry) { + if (!this[CHECKPATH](entry)) { + return entry.resume(); + } + assert.equal(typeof entry.absolute, 'string'); + switch (entry.type) { + case 'Directory': + case 'GNUDumpDir': + if (entry.mode) { + entry.mode = entry.mode | 0o700; + } + // eslint-disable-next-line no-fallthrough + case 'File': + case 'OldFile': + case 'ContiguousFile': + case 'Link': + case 'SymbolicLink': + return this[CHECKFS](entry); + case 'CharacterDevice': + case 'BlockDevice': + case 'FIFO': + default: + return this[UNSUPPORTED](entry); + } + } + [ONERROR](er, entry) { + // Cwd has to exist, or else nothing works. That's serious. + // Other errors are warnings, which raise the error in strict + // mode, but otherwise continue on. + if (er.name === 'CwdError') { + this.emit('error', er); + } + else { + this.warn('TAR_ENTRY_ERROR', er, { entry }); + this[UNPEND](); + entry.resume(); + } + } + [MKDIR](dir, mode, cb) { + mkdir(normalizeWindowsPath(dir), { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + }, cb); + } + [DOCHOWN](entry) { + // in preserve owner mode, chown if the entry doesn't match process + // in set owner mode, chown if setting doesn't match process + return (this.forceChown || + (this.preserveOwner && + ((typeof entry.uid === 'number' && + entry.uid !== this.processUid) || + (typeof entry.gid === 'number' && + entry.gid !== this.processGid))) || + (typeof this.uid === 'number' && + this.uid !== this.processUid) || + (typeof this.gid === 'number' && this.gid !== this.processGid)); + } + [UID](entry) { + return uint32(this.uid, entry.uid, this.processUid); + } + [GID](entry) { + return uint32(this.gid, entry.gid, this.processGid); + } + [FILE](entry, fullyDone) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.fmode; + const stream = new fsm.WriteStream(String(entry.absolute), { + // slight lie, but it can be numeric flags + flags: getWriteFlag(entry.size), + mode: mode, + autoClose: false, + }); + stream.on('error', (er) => { + if (stream.fd) { + fs.close(stream.fd, () => { }); + } + // flush all the data out so that we aren't left hanging + // if the error wasn't actually fatal. otherwise the parse + // is blocked, and we never proceed. + stream.write = () => true; + this[ONERROR](er, entry); + fullyDone(); + }); + let actions = 1; + const done = (er) => { + if (er) { + /* c8 ignore start - we should always have a fd by now */ + if (stream.fd) { + fs.close(stream.fd, () => { }); + } + /* c8 ignore stop */ + this[ONERROR](er, entry); + fullyDone(); + return; + } + if (--actions === 0) { + if (stream.fd !== undefined) { + fs.close(stream.fd, er => { + if (er) { + this[ONERROR](er, entry); + } + else { + this[UNPEND](); + } + fullyDone(); + }); + } + } + }; + stream.on('finish', () => { + // if futimes fails, try utimes + // if utimes fails, fail with the original error + // same for fchown/chown + const abs = String(entry.absolute); + const fd = stream.fd; + if (typeof fd === 'number' && entry.mtime && !this.noMtime) { + actions++; + const atime = entry.atime || new Date(); + const mtime = entry.mtime; + fs.futimes(fd, atime, mtime, er => er ? + fs.utimes(abs, atime, mtime, er2 => done(er2 && er)) + : done()); + } + if (typeof fd === 'number' && this[DOCHOWN](entry)) { + actions++; + const uid = this[UID](entry); + const gid = this[GID](entry); + if (typeof uid === 'number' && typeof gid === 'number') { + fs.fchown(fd, uid, gid, er => er ? + fs.chown(abs, uid, gid, er2 => done(er2 && er)) + : done()); + } + } + done(); + }); + const tx = this.transform ? this.transform(entry) || entry : entry; + if (tx !== entry) { + tx.on('error', (er) => { + this[ONERROR](er, entry); + fullyDone(); + }); + entry.pipe(tx); + } + tx.pipe(stream); + } + [DIRECTORY](entry, fullyDone) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.dmode; + this[MKDIR](String(entry.absolute), mode, er => { + if (er) { + this[ONERROR](er, entry); + fullyDone(); + return; + } + let actions = 1; + const done = () => { + if (--actions === 0) { + fullyDone(); + this[UNPEND](); + entry.resume(); + } + }; + if (entry.mtime && !this.noMtime) { + actions++; + fs.utimes(String(entry.absolute), entry.atime || new Date(), entry.mtime, done); + } + if (this[DOCHOWN](entry)) { + actions++; + fs.chown(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry)), done); + } + done(); + }); + } + [UNSUPPORTED](entry) { + entry.unsupported = true; + this.warn('TAR_ENTRY_UNSUPPORTED', `unsupported entry type: ${entry.type}`, { entry }); + entry.resume(); + } + [SYMLINK](entry, done) { + this[LINK](entry, String(entry.linkpath), 'symlink', done); + } + [HARDLINK](entry, done) { + const linkpath = normalizeWindowsPath(path.resolve(this.cwd, String(entry.linkpath))); + this[LINK](entry, linkpath, 'link', done); + } + [PEND]() { + this[PENDING]++; + } + [UNPEND]() { + this[PENDING]--; + this[MAYBECLOSE](); + } + [SKIP](entry) { + this[UNPEND](); + entry.resume(); + } + // Check if we can reuse an existing filesystem entry safely and + // overwrite it, rather than unlinking and recreating + // Windows doesn't report a useful nlink, so we just never reuse entries + [ISREUSABLE](entry, st) { + return (entry.type === 'File' && + !this.unlink && + st.isFile() && + st.nlink <= 1 && + !isWindows); + } + // check if a thing is there, and if so, try to clobber it + [CHECKFS](entry) { + this[PEND](); + const paths = [entry.path]; + if (entry.linkpath) { + paths.push(entry.linkpath); + } + this.reservations.reserve(paths, done => this[CHECKFS2](entry, done)); + } + [PRUNECACHE](entry) { + // if we are not creating a directory, and the path is in the dirCache, + // then that means we are about to delete the directory we created + // previously, and it is no longer going to be a directory, and neither + // is any of its children. + // If a symbolic link is encountered, all bets are off. There is no + // reasonable way to sanitize the cache in such a way we will be able to + // avoid having filesystem collisions. If this happens with a non-symlink + // entry, it'll just fail to unpack, but a symlink to a directory, using an + // 8.3 shortname or certain unicode attacks, can evade detection and lead + // to arbitrary writes to anywhere on the system. + if (entry.type === 'SymbolicLink') { + dropCache(this.dirCache); + } + else if (entry.type !== 'Directory') { + pruneCache(this.dirCache, String(entry.absolute)); + } + } + [CHECKFS2](entry, fullyDone) { + this[PRUNECACHE](entry); + const done = (er) => { + this[PRUNECACHE](entry); + fullyDone(er); + }; + const checkCwd = () => { + this[MKDIR](this.cwd, this.dmode, er => { + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + this[CHECKED_CWD] = true; + start(); + }); + }; + const start = () => { + if (entry.absolute !== this.cwd) { + const parent = normalizeWindowsPath(path.dirname(String(entry.absolute))); + if (parent !== this.cwd) { + return this[MKDIR](parent, this.dmode, er => { + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + afterMakeParent(); + }); + } + } + afterMakeParent(); + }; + const afterMakeParent = () => { + fs.lstat(String(entry.absolute), (lstatEr, st) => { + if (st && + (this.keep || + /* c8 ignore next */ + (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) { + this[SKIP](entry); + done(); + return; + } + if (lstatEr || this[ISREUSABLE](entry, st)) { + return this[MAKEFS](null, entry, done); + } + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = this.chmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode; + const afterChmod = (er) => this[MAKEFS](er ?? null, entry, done); + if (!needChmod) { + return afterChmod(); + } + return fs.chmod(String(entry.absolute), Number(entry.mode), afterChmod); + } + // Not a dir entry, have to remove it. + // NB: the only way to end up with an entry that is the cwd + // itself, in such a way that == does not detect, is a + // tricky windows absolute path with UNC or 8.3 parts (and + // preservePaths:true, or else it will have been stripped). + // In that case, the user has opted out of path protections + // explicitly, so if they blow away the cwd, c'est la vie. + if (entry.absolute !== this.cwd) { + return fs.rmdir(String(entry.absolute), (er) => this[MAKEFS](er ?? null, entry, done)); + } + } + // not a dir, and not reusable + // don't remove if the cwd, we want that error + if (entry.absolute === this.cwd) { + return this[MAKEFS](null, entry, done); + } + unlinkFile(String(entry.absolute), er => this[MAKEFS](er ?? null, entry, done)); + }); + }; + if (this[CHECKED_CWD]) { + start(); + } + else { + checkCwd(); + } + } + [MAKEFS](er, entry, done) { + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + switch (entry.type) { + case 'File': + case 'OldFile': + case 'ContiguousFile': + return this[FILE](entry, done); + case 'Link': + return this[HARDLINK](entry, done); + case 'SymbolicLink': + return this[SYMLINK](entry, done); + case 'Directory': + case 'GNUDumpDir': + return this[DIRECTORY](entry, done); + } + } + [LINK](entry, linkpath, link, done) { + // XXX: get the type ('symlink' or 'junction') for windows + fs[link](linkpath, String(entry.absolute), er => { + if (er) { + this[ONERROR](er, entry); + } + else { + this[UNPEND](); + entry.resume(); + } + done(); + }); + } +} +const callSync = (fn) => { + try { + return [null, fn()]; + } + catch (er) { + return [er, null]; + } +}; +export class UnpackSync extends Unpack { + sync = true; + [MAKEFS](er, entry) { + return super[MAKEFS](er, entry, () => { }); + } + [CHECKFS](entry) { + this[PRUNECACHE](entry); + if (!this[CHECKED_CWD]) { + const er = this[MKDIR](this.cwd, this.dmode); + if (er) { + return this[ONERROR](er, entry); + } + this[CHECKED_CWD] = true; + } + // don't bother to make the parent if the current entry is the cwd, + // we've already checked it. + if (entry.absolute !== this.cwd) { + const parent = normalizeWindowsPath(path.dirname(String(entry.absolute))); + if (parent !== this.cwd) { + const mkParent = this[MKDIR](parent, this.dmode); + if (mkParent) { + return this[ONERROR](mkParent, entry); + } + } + } + const [lstatEr, st] = callSync(() => fs.lstatSync(String(entry.absolute))); + if (st && + (this.keep || + /* c8 ignore next */ + (this.newer && st.mtime > (entry.mtime ?? st.mtime)))) { + return this[SKIP](entry); + } + if (lstatEr || this[ISREUSABLE](entry, st)) { + return this[MAKEFS](null, entry); + } + if (st.isDirectory()) { + if (entry.type === 'Directory') { + const needChmod = this.chmod && + entry.mode && + (st.mode & 0o7777) !== entry.mode; + const [er] = needChmod ? + callSync(() => { + fs.chmodSync(String(entry.absolute), Number(entry.mode)); + }) + : []; + return this[MAKEFS](er, entry); + } + // not a dir entry, have to remove it + const [er] = callSync(() => fs.rmdirSync(String(entry.absolute))); + this[MAKEFS](er, entry); + } + // not a dir, and not reusable. + // don't remove if it's the cwd, since we want that error. + const [er] = entry.absolute === this.cwd ? + [] + : callSync(() => unlinkFileSync(String(entry.absolute))); + this[MAKEFS](er, entry); + } + [FILE](entry, done) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.fmode; + const oner = (er) => { + let closeError; + try { + fs.closeSync(fd); + } + catch (e) { + closeError = e; + } + if (er || closeError) { + this[ONERROR](er || closeError, entry); + } + done(); + }; + let fd; + try { + fd = fs.openSync(String(entry.absolute), getWriteFlag(entry.size), mode); + } + catch (er) { + return oner(er); + } + const tx = this.transform ? this.transform(entry) || entry : entry; + if (tx !== entry) { + tx.on('error', (er) => this[ONERROR](er, entry)); + entry.pipe(tx); + } + tx.on('data', (chunk) => { + try { + fs.writeSync(fd, chunk, 0, chunk.length); + } + catch (er) { + oner(er); + } + }); + tx.on('end', () => { + let er = null; + // try both, falling futimes back to utimes + // if either fails, handle the first error + if (entry.mtime && !this.noMtime) { + const atime = entry.atime || new Date(); + const mtime = entry.mtime; + try { + fs.futimesSync(fd, atime, mtime); + } + catch (futimeser) { + try { + fs.utimesSync(String(entry.absolute), atime, mtime); + } + catch (utimeser) { + er = futimeser; + } + } + } + if (this[DOCHOWN](entry)) { + const uid = this[UID](entry); + const gid = this[GID](entry); + try { + fs.fchownSync(fd, Number(uid), Number(gid)); + } + catch (fchowner) { + try { + fs.chownSync(String(entry.absolute), Number(uid), Number(gid)); + } + catch (chowner) { + er = er || fchowner; + } + } + } + oner(er); + }); + } + [DIRECTORY](entry, done) { + const mode = typeof entry.mode === 'number' ? + entry.mode & 0o7777 + : this.dmode; + const er = this[MKDIR](String(entry.absolute), mode); + if (er) { + this[ONERROR](er, entry); + done(); + return; + } + if (entry.mtime && !this.noMtime) { + try { + fs.utimesSync(String(entry.absolute), entry.atime || new Date(), entry.mtime); + /* c8 ignore next */ + } + catch (er) { } + } + if (this[DOCHOWN](entry)) { + try { + fs.chownSync(String(entry.absolute), Number(this[UID](entry)), Number(this[GID](entry))); + } + catch (er) { } + } + done(); + entry.resume(); + } + [MKDIR](dir, mode) { + try { + return mkdirSync(normalizeWindowsPath(dir), { + uid: this.uid, + gid: this.gid, + processUid: this.processUid, + processGid: this.processGid, + umask: this.processUmask, + preserve: this.preservePaths, + unlink: this.unlink, + cache: this.dirCache, + cwd: this.cwd, + mode: mode, + }); + } + catch (er) { + return er; + } + } + [LINK](entry, linkpath, link, done) { + const ls = `${link}Sync`; + try { + fs[ls](linkpath, String(entry.absolute)); + done(); + entry.resume(); + } + catch (er) { + return this[ONERROR](er, entry); + } + } +} +//# sourceMappingURL=unpack.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/update.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/update.js new file mode 100644 index 00000000000000..21398e9766663d --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/update.js @@ -0,0 +1,30 @@ +// tar -u +import { makeCommand } from './make-command.js'; +import { replace as r } from './replace.js'; +// just call tar.r with the filter and mtimeCache +export const update = makeCommand(r.syncFile, r.asyncFile, r.syncNoFile, r.asyncNoFile, (opt, entries = []) => { + r.validate?.(opt, entries); + mtimeFilter(opt); +}); +const mtimeFilter = (opt) => { + const filter = opt.filter; + if (!opt.mtimeCache) { + opt.mtimeCache = new Map(); + } + opt.filter = + filter ? + (path, stat) => filter(path, stat) && + !( + /* c8 ignore start */ + ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > + (stat.mtime ?? 0)) + /* c8 ignore stop */ + ) + : (path, stat) => !( + /* c8 ignore start */ + ((opt.mtimeCache?.get(path) ?? stat.mtime ?? 0) > + (stat.mtime ?? 0)) + /* c8 ignore stop */ + ); +}; +//# sourceMappingURL=update.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/warn-method.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/warn-method.js new file mode 100644 index 00000000000000..13e798afefc85e --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/warn-method.js @@ -0,0 +1,27 @@ +export const warnMethod = (self, code, message, data = {}) => { + if (self.file) { + data.file = self.file; + } + if (self.cwd) { + data.cwd = self.cwd; + } + data.code = + (message instanceof Error && + message.code) || + code; + data.tarCode = code; + if (!self.strict && data.recoverable !== false) { + if (message instanceof Error) { + data = Object.assign(message, data); + message = message.message; + } + self.emit('warn', code, message, data); + } + else if (message instanceof Error) { + self.emit('error', Object.assign(message, data)); + } + else { + self.emit('error', Object.assign(new Error(`${code}: ${message}`), data)); + } +}; +//# sourceMappingURL=warn-method.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/winchars.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/winchars.js new file mode 100644 index 00000000000000..c41eb86d69a4bb --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/winchars.js @@ -0,0 +1,9 @@ +// When writing files on Windows, translate the characters to their +// 0xf000 higher-encoded versions. +const raw = ['|', '<', '>', '?', ':']; +const win = raw.map(char => String.fromCharCode(0xf000 + char.charCodeAt(0))); +const toWin = new Map(raw.map((char, i) => [char, win[i]])); +const toRaw = new Map(win.map((char, i) => [char, raw[i]])); +export const encode = (s) => raw.reduce((s, c) => s.split(c).join(toWin.get(c)), s); +export const decode = (s) => win.reduce((s, c) => s.split(c).join(toRaw.get(c)), s); +//# sourceMappingURL=winchars.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/write-entry.js b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/write-entry.js new file mode 100644 index 00000000000000..9028cd676b4cd2 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/dist/esm/write-entry.js @@ -0,0 +1,657 @@ +import fs from 'fs'; +import { Minipass } from 'minipass'; +import path from 'path'; +import { Header } from './header.js'; +import { modeFix } from './mode-fix.js'; +import { normalizeWindowsPath } from './normalize-windows-path.js'; +import { dealias, } from './options.js'; +import { Pax } from './pax.js'; +import { stripAbsolutePath } from './strip-absolute-path.js'; +import { stripTrailingSlashes } from './strip-trailing-slashes.js'; +import { warnMethod, } from './warn-method.js'; +import * as winchars from './winchars.js'; +const prefixPath = (path, prefix) => { + if (!prefix) { + return normalizeWindowsPath(path); + } + path = normalizeWindowsPath(path).replace(/^\.(\/|$)/, ''); + return stripTrailingSlashes(prefix) + '/' + path; +}; +const maxReadSize = 16 * 1024 * 1024; +const PROCESS = Symbol('process'); +const FILE = Symbol('file'); +const DIRECTORY = Symbol('directory'); +const SYMLINK = Symbol('symlink'); +const HARDLINK = Symbol('hardlink'); +const HEADER = Symbol('header'); +const READ = Symbol('read'); +const LSTAT = Symbol('lstat'); +const ONLSTAT = Symbol('onlstat'); +const ONREAD = Symbol('onread'); +const ONREADLINK = Symbol('onreadlink'); +const OPENFILE = Symbol('openfile'); +const ONOPENFILE = Symbol('onopenfile'); +const CLOSE = Symbol('close'); +const MODE = Symbol('mode'); +const AWAITDRAIN = Symbol('awaitDrain'); +const ONDRAIN = Symbol('ondrain'); +const PREFIX = Symbol('prefix'); +export class WriteEntry extends Minipass { + path; + portable; + myuid = (process.getuid && process.getuid()) || 0; + // until node has builtin pwnam functions, this'll have to do + myuser = process.env.USER || ''; + maxReadSize; + linkCache; + statCache; + preservePaths; + cwd; + strict; + mtime; + noPax; + noMtime; + prefix; + fd; + blockLen = 0; + blockRemain = 0; + buf; + pos = 0; + remain = 0; + length = 0; + offset = 0; + win32; + absolute; + header; + type; + linkpath; + stat; + onWriteEntry; + #hadError = false; + constructor(p, opt_ = {}) { + const opt = dealias(opt_); + super(); + this.path = normalizeWindowsPath(p); + // suppress atime, ctime, uid, gid, uname, gname + this.portable = !!opt.portable; + this.maxReadSize = opt.maxReadSize || maxReadSize; + this.linkCache = opt.linkCache || new Map(); + this.statCache = opt.statCache || new Map(); + this.preservePaths = !!opt.preservePaths; + this.cwd = normalizeWindowsPath(opt.cwd || process.cwd()); + this.strict = !!opt.strict; + this.noPax = !!opt.noPax; + this.noMtime = !!opt.noMtime; + this.mtime = opt.mtime; + this.prefix = + opt.prefix ? normalizeWindowsPath(opt.prefix) : undefined; + this.onWriteEntry = opt.onWriteEntry; + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + let pathWarn = false; + if (!this.preservePaths) { + const [root, stripped] = stripAbsolutePath(this.path); + if (root && typeof stripped === 'string') { + this.path = stripped; + pathWarn = root; + } + } + this.win32 = !!opt.win32 || process.platform === 'win32'; + if (this.win32) { + // force the \ to / normalization, since we might not *actually* + // be on windows, but want \ to be considered a path separator. + this.path = winchars.decode(this.path.replace(/\\/g, '/')); + p = p.replace(/\\/g, '/'); + } + this.absolute = normalizeWindowsPath(opt.absolute || path.resolve(this.cwd, p)); + if (this.path === '') { + this.path = './'; + } + if (pathWarn) { + this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { + entry: this, + path: pathWarn + this.path, + }); + } + const cs = this.statCache.get(this.absolute); + if (cs) { + this[ONLSTAT](cs); + } + else { + this[LSTAT](); + } + } + warn(code, message, data = {}) { + return warnMethod(this, code, message, data); + } + emit(ev, ...data) { + if (ev === 'error') { + this.#hadError = true; + } + return super.emit(ev, ...data); + } + [LSTAT]() { + fs.lstat(this.absolute, (er, stat) => { + if (er) { + return this.emit('error', er); + } + this[ONLSTAT](stat); + }); + } + [ONLSTAT](stat) { + this.statCache.set(this.absolute, stat); + this.stat = stat; + if (!stat.isFile()) { + stat.size = 0; + } + this.type = getType(stat); + this.emit('stat', stat); + this[PROCESS](); + } + [PROCESS]() { + switch (this.type) { + case 'File': + return this[FILE](); + case 'Directory': + return this[DIRECTORY](); + case 'SymbolicLink': + return this[SYMLINK](); + // unsupported types are ignored. + default: + return this.end(); + } + } + [MODE](mode) { + return modeFix(mode, this.type === 'Directory', this.portable); + } + [PREFIX](path) { + return prefixPath(path, this.prefix); + } + [HEADER]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot write header before stat'); + } + /* c8 ignore stop */ + if (this.type === 'Directory' && this.portable) { + this.noMtime = true; + } + this.onWriteEntry?.(this); + this.header = new Header({ + path: this[PREFIX](this.path), + // only apply the prefix to hard links. + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this[MODE](this.stat.mode), + uid: this.portable ? undefined : this.stat.uid, + gid: this.portable ? undefined : this.stat.gid, + size: this.stat.size, + mtime: this.noMtime ? undefined : this.mtime || this.stat.mtime, + /* c8 ignore next */ + type: this.type === 'Unsupported' ? undefined : this.type, + uname: this.portable ? undefined + : this.stat.uid === this.myuid ? this.myuser + : '', + atime: this.portable ? undefined : this.stat.atime, + ctime: this.portable ? undefined : this.stat.ctime, + }); + if (this.header.encode() && !this.noPax) { + super.write(new Pax({ + atime: this.portable ? undefined : this.header.atime, + ctime: this.portable ? undefined : this.header.ctime, + gid: this.portable ? undefined : this.header.gid, + mtime: this.noMtime ? undefined : (this.mtime || this.header.mtime), + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + size: this.header.size, + uid: this.portable ? undefined : this.header.uid, + uname: this.portable ? undefined : this.header.uname, + dev: this.portable ? undefined : this.stat.dev, + ino: this.portable ? undefined : this.stat.ino, + nlink: this.portable ? undefined : this.stat.nlink, + }).encode()); + } + const block = this.header?.block; + /* c8 ignore start */ + if (!block) { + throw new Error('failed to encode header'); + } + /* c8 ignore stop */ + super.write(block); + } + [DIRECTORY]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create directory entry without stat'); + } + /* c8 ignore stop */ + if (this.path.slice(-1) !== '/') { + this.path += '/'; + } + this.stat.size = 0; + this[HEADER](); + this.end(); + } + [SYMLINK]() { + fs.readlink(this.absolute, (er, linkpath) => { + if (er) { + return this.emit('error', er); + } + this[ONREADLINK](linkpath); + }); + } + [ONREADLINK](linkpath) { + this.linkpath = normalizeWindowsPath(linkpath); + this[HEADER](); + this.end(); + } + [HARDLINK](linkpath) { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create link entry without stat'); + } + /* c8 ignore stop */ + this.type = 'Link'; + this.linkpath = normalizeWindowsPath(path.relative(this.cwd, linkpath)); + this.stat.size = 0; + this[HEADER](); + this.end(); + } + [FILE]() { + /* c8 ignore start */ + if (!this.stat) { + throw new Error('cannot create file entry without stat'); + } + /* c8 ignore stop */ + if (this.stat.nlink > 1) { + const linkKey = `${this.stat.dev}:${this.stat.ino}`; + const linkpath = this.linkCache.get(linkKey); + if (linkpath?.indexOf(this.cwd) === 0) { + return this[HARDLINK](linkpath); + } + this.linkCache.set(linkKey, this.absolute); + } + this[HEADER](); + if (this.stat.size === 0) { + return this.end(); + } + this[OPENFILE](); + } + [OPENFILE]() { + fs.open(this.absolute, 'r', (er, fd) => { + if (er) { + return this.emit('error', er); + } + this[ONOPENFILE](fd); + }); + } + [ONOPENFILE](fd) { + this.fd = fd; + if (this.#hadError) { + return this[CLOSE](); + } + /* c8 ignore start */ + if (!this.stat) { + throw new Error('should stat before calling onopenfile'); + } + /* c8 ignore start */ + this.blockLen = 512 * Math.ceil(this.stat.size / 512); + this.blockRemain = this.blockLen; + const bufLen = Math.min(this.blockLen, this.maxReadSize); + this.buf = Buffer.allocUnsafe(bufLen); + this.offset = 0; + this.pos = 0; + this.remain = this.stat.size; + this.length = this.buf.length; + this[READ](); + } + [READ]() { + const { fd, buf, offset, length, pos } = this; + if (fd === undefined || buf === undefined) { + throw new Error('cannot read file without first opening'); + } + fs.read(fd, buf, offset, length, pos, (er, bytesRead) => { + if (er) { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + return this[CLOSE](() => this.emit('error', er)); + } + this[ONREAD](bytesRead); + }); + } + /* c8 ignore start */ + [CLOSE](cb = () => { }) { + /* c8 ignore stop */ + if (this.fd !== undefined) + fs.close(this.fd, cb); + } + [ONREAD](bytesRead) { + if (bytesRead <= 0 && this.remain > 0) { + const er = Object.assign(new Error('encountered unexpected EOF'), { + path: this.absolute, + syscall: 'read', + code: 'EOF', + }); + return this[CLOSE](() => this.emit('error', er)); + } + if (bytesRead > this.remain) { + const er = Object.assign(new Error('did not encounter expected EOF'), { + path: this.absolute, + syscall: 'read', + code: 'EOF', + }); + return this[CLOSE](() => this.emit('error', er)); + } + /* c8 ignore start */ + if (!this.buf) { + throw new Error('should have created buffer prior to reading'); + } + /* c8 ignore stop */ + // null out the rest of the buffer, if we could fit the block padding + // at the end of this loop, we've incremented bytesRead and this.remain + // to be incremented up to the blockRemain level, as if we had expected + // to get a null-padded file, and read it until the end. then we will + // decrement both remain and blockRemain by bytesRead, and know that we + // reached the expected EOF, without any null buffer to append. + if (bytesRead === this.remain) { + for (let i = bytesRead; i < this.length && bytesRead < this.blockRemain; i++) { + this.buf[i + this.offset] = 0; + bytesRead++; + this.remain++; + } + } + const chunk = this.offset === 0 && bytesRead === this.buf.length ? + this.buf + : this.buf.subarray(this.offset, this.offset + bytesRead); + const flushed = this.write(chunk); + if (!flushed) { + this[AWAITDRAIN](() => this[ONDRAIN]()); + } + else { + this[ONDRAIN](); + } + } + [AWAITDRAIN](cb) { + this.once('drain', cb); + } + write(chunk, encoding, cb) { + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8'); + } + /* c8 ignore stop */ + if (this.blockRemain < chunk.length) { + const er = Object.assign(new Error('writing more data than expected'), { + path: this.absolute, + }); + return this.emit('error', er); + } + this.remain -= chunk.length; + this.blockRemain -= chunk.length; + this.pos += chunk.length; + this.offset += chunk.length; + return super.write(chunk, null, cb); + } + [ONDRAIN]() { + if (!this.remain) { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)); + } + return this[CLOSE](er => er ? this.emit('error', er) : this.end()); + } + /* c8 ignore start */ + if (!this.buf) { + throw new Error('buffer lost somehow in ONDRAIN'); + } + /* c8 ignore stop */ + if (this.offset >= this.length) { + // if we only have a smaller bit left to read, alloc a smaller buffer + // otherwise, keep it the same length it was before. + this.buf = Buffer.allocUnsafe(Math.min(this.blockRemain, this.buf.length)); + this.offset = 0; + } + this.length = this.buf.length - this.offset; + this[READ](); + } +} +export class WriteEntrySync extends WriteEntry { + sync = true; + [LSTAT]() { + this[ONLSTAT](fs.lstatSync(this.absolute)); + } + [SYMLINK]() { + this[ONREADLINK](fs.readlinkSync(this.absolute)); + } + [OPENFILE]() { + this[ONOPENFILE](fs.openSync(this.absolute, 'r')); + } + [READ]() { + let threw = true; + try { + const { fd, buf, offset, length, pos } = this; + /* c8 ignore start */ + if (fd === undefined || buf === undefined) { + throw new Error('fd and buf must be set in READ method'); + } + /* c8 ignore stop */ + const bytesRead = fs.readSync(fd, buf, offset, length, pos); + this[ONREAD](bytesRead); + threw = false; + } + finally { + // ignoring the error from close(2) is a bad practice, but at + // this point we already have an error, don't need another one + if (threw) { + try { + this[CLOSE](() => { }); + } + catch (er) { } + } + } + } + [AWAITDRAIN](cb) { + cb(); + } + /* c8 ignore start */ + [CLOSE](cb = () => { }) { + /* c8 ignore stop */ + if (this.fd !== undefined) + fs.closeSync(this.fd); + cb(); + } +} +export class WriteEntryTar extends Minipass { + blockLen = 0; + blockRemain = 0; + buf = 0; + pos = 0; + remain = 0; + length = 0; + preservePaths; + portable; + strict; + noPax; + noMtime; + readEntry; + type; + prefix; + path; + mode; + uid; + gid; + uname; + gname; + header; + mtime; + atime; + ctime; + linkpath; + size; + onWriteEntry; + warn(code, message, data = {}) { + return warnMethod(this, code, message, data); + } + constructor(readEntry, opt_ = {}) { + const opt = dealias(opt_); + super(); + this.preservePaths = !!opt.preservePaths; + this.portable = !!opt.portable; + this.strict = !!opt.strict; + this.noPax = !!opt.noPax; + this.noMtime = !!opt.noMtime; + this.onWriteEntry = opt.onWriteEntry; + this.readEntry = readEntry; + const { type } = readEntry; + /* c8 ignore start */ + if (type === 'Unsupported') { + throw new Error('writing entry that should be ignored'); + } + /* c8 ignore stop */ + this.type = type; + if (this.type === 'Directory' && this.portable) { + this.noMtime = true; + } + this.prefix = opt.prefix; + this.path = normalizeWindowsPath(readEntry.path); + this.mode = + readEntry.mode !== undefined ? + this[MODE](readEntry.mode) + : undefined; + this.uid = this.portable ? undefined : readEntry.uid; + this.gid = this.portable ? undefined : readEntry.gid; + this.uname = this.portable ? undefined : readEntry.uname; + this.gname = this.portable ? undefined : readEntry.gname; + this.size = readEntry.size; + this.mtime = + this.noMtime ? undefined : opt.mtime || readEntry.mtime; + this.atime = this.portable ? undefined : readEntry.atime; + this.ctime = this.portable ? undefined : readEntry.ctime; + this.linkpath = + readEntry.linkpath !== undefined ? + normalizeWindowsPath(readEntry.linkpath) + : undefined; + if (typeof opt.onwarn === 'function') { + this.on('warn', opt.onwarn); + } + let pathWarn = false; + if (!this.preservePaths) { + const [root, stripped] = stripAbsolutePath(this.path); + if (root && typeof stripped === 'string') { + this.path = stripped; + pathWarn = root; + } + } + this.remain = readEntry.size; + this.blockRemain = readEntry.startBlockSize; + this.onWriteEntry?.(this); + this.header = new Header({ + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + // only the permissions and setuid/setgid/sticky bitflags + // not the higher-order bits that specify file type + mode: this.mode, + uid: this.portable ? undefined : this.uid, + gid: this.portable ? undefined : this.gid, + size: this.size, + mtime: this.noMtime ? undefined : this.mtime, + type: this.type, + uname: this.portable ? undefined : this.uname, + atime: this.portable ? undefined : this.atime, + ctime: this.portable ? undefined : this.ctime, + }); + if (pathWarn) { + this.warn('TAR_ENTRY_INFO', `stripping ${pathWarn} from absolute path`, { + entry: this, + path: pathWarn + this.path, + }); + } + if (this.header.encode() && !this.noPax) { + super.write(new Pax({ + atime: this.portable ? undefined : this.atime, + ctime: this.portable ? undefined : this.ctime, + gid: this.portable ? undefined : this.gid, + mtime: this.noMtime ? undefined : this.mtime, + path: this[PREFIX](this.path), + linkpath: this.type === 'Link' && this.linkpath !== undefined ? + this[PREFIX](this.linkpath) + : this.linkpath, + size: this.size, + uid: this.portable ? undefined : this.uid, + uname: this.portable ? undefined : this.uname, + dev: this.portable ? undefined : this.readEntry.dev, + ino: this.portable ? undefined : this.readEntry.ino, + nlink: this.portable ? undefined : this.readEntry.nlink, + }).encode()); + } + const b = this.header?.block; + /* c8 ignore start */ + if (!b) + throw new Error('failed to encode header'); + /* c8 ignore stop */ + super.write(b); + readEntry.pipe(this); + } + [PREFIX](path) { + return prefixPath(path, this.prefix); + } + [MODE](mode) { + return modeFix(mode, this.type === 'Directory', this.portable); + } + write(chunk, encoding, cb) { + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, typeof encoding === 'string' ? encoding : 'utf8'); + } + /* c8 ignore stop */ + const writeLen = chunk.length; + if (writeLen > this.blockRemain) { + throw new Error('writing more to entry than is appropriate'); + } + this.blockRemain -= writeLen; + return super.write(chunk, cb); + } + end(chunk, encoding, cb) { + if (this.blockRemain) { + super.write(Buffer.alloc(this.blockRemain)); + } + /* c8 ignore start - just junk to comply with NodeJS.WritableStream */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + if (typeof chunk === 'string') { + chunk = Buffer.from(chunk, encoding ?? 'utf8'); + } + if (cb) + this.once('finish', cb); + chunk ? super.end(chunk, cb) : super.end(cb); + /* c8 ignore stop */ + return this; + } +} +const getType = (stat) => stat.isFile() ? 'File' + : stat.isDirectory() ? 'Directory' + : stat.isSymbolicLink() ? 'SymbolicLink' + : 'Unsupported'; +//# sourceMappingURL=write-entry.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/tar/package.json b/deps/npm/node_modules/cacache/node_modules/tar/package.json new file mode 100644 index 00000000000000..0283103ee9eaf9 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/tar/package.json @@ -0,0 +1,325 @@ +{ + "author": "Isaac Z. Schlueter", + "name": "tar", + "description": "tar for node", + "version": "7.4.3", + "repository": { + "type": "git", + "url": "https://github.com/isaacs/node-tar.git" + }, + "scripts": { + "genparse": "node scripts/generate-parse-fixtures.js", + "snap": "tap", + "test": "tap", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "prepare": "tshy", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --log-level warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "dependencies": { + "@isaacs/fs-minipass": "^4.0.0", + "chownr": "^3.0.0", + "minipass": "^7.1.2", + "minizlib": "^3.0.1", + "mkdirp": "^3.0.1", + "yallist": "^5.0.0" + }, + "devDependencies": { + "chmodr": "^1.2.0", + "end-of-stream": "^1.4.3", + "events-to-array": "^2.0.3", + "mutate-fs": "^2.1.1", + "nock": "^13.5.4", + "prettier": "^3.2.5", + "rimraf": "^5.0.5", + "tap": "^18.7.2", + "tshy": "^1.13.1", + "typedoc": "^0.25.13" + }, + "license": "ISC", + "engines": { + "node": ">=18" + }, + "files": [ + "dist" + ], + "tap": { + "coverage-map": "map.js", + "timeout": 0, + "typecheck": true + }, + "prettier": { + "experimentalTernaries": true, + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts", + "./c": "./src/create.ts", + "./create": "./src/create.ts", + "./replace": "./src/create.ts", + "./r": "./src/create.ts", + "./list": "./src/list.ts", + "./t": "./src/list.ts", + "./update": "./src/update.ts", + "./u": "./src/update.ts", + "./extract": "./src/extract.ts", + "./x": "./src/extract.ts", + "./pack": "./src/pack.ts", + "./unpack": "./src/unpack.ts", + "./parse": "./src/parse.ts", + "./read-entry": "./src/read-entry.ts", + "./write-entry": "./src/write-entry.ts", + "./header": "./src/header.ts", + "./pax": "./src/pax.ts", + "./types": "./src/types.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "source": "./src/index.ts", + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "source": "./src/index.ts", + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + }, + "./c": { + "import": { + "source": "./src/create.ts", + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "source": "./src/create.ts", + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./create": { + "import": { + "source": "./src/create.ts", + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "source": "./src/create.ts", + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./replace": { + "import": { + "source": "./src/create.ts", + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "source": "./src/create.ts", + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./r": { + "import": { + "source": "./src/create.ts", + "types": "./dist/esm/create.d.ts", + "default": "./dist/esm/create.js" + }, + "require": { + "source": "./src/create.ts", + "types": "./dist/commonjs/create.d.ts", + "default": "./dist/commonjs/create.js" + } + }, + "./list": { + "import": { + "source": "./src/list.ts", + "types": "./dist/esm/list.d.ts", + "default": "./dist/esm/list.js" + }, + "require": { + "source": "./src/list.ts", + "types": "./dist/commonjs/list.d.ts", + "default": "./dist/commonjs/list.js" + } + }, + "./t": { + "import": { + "source": "./src/list.ts", + "types": "./dist/esm/list.d.ts", + "default": "./dist/esm/list.js" + }, + "require": { + "source": "./src/list.ts", + "types": "./dist/commonjs/list.d.ts", + "default": "./dist/commonjs/list.js" + } + }, + "./update": { + "import": { + "source": "./src/update.ts", + "types": "./dist/esm/update.d.ts", + "default": "./dist/esm/update.js" + }, + "require": { + "source": "./src/update.ts", + "types": "./dist/commonjs/update.d.ts", + "default": "./dist/commonjs/update.js" + } + }, + "./u": { + "import": { + "source": "./src/update.ts", + "types": "./dist/esm/update.d.ts", + "default": "./dist/esm/update.js" + }, + "require": { + "source": "./src/update.ts", + "types": "./dist/commonjs/update.d.ts", + "default": "./dist/commonjs/update.js" + } + }, + "./extract": { + "import": { + "source": "./src/extract.ts", + "types": "./dist/esm/extract.d.ts", + "default": "./dist/esm/extract.js" + }, + "require": { + "source": "./src/extract.ts", + "types": "./dist/commonjs/extract.d.ts", + "default": "./dist/commonjs/extract.js" + } + }, + "./x": { + "import": { + "source": "./src/extract.ts", + "types": "./dist/esm/extract.d.ts", + "default": "./dist/esm/extract.js" + }, + "require": { + "source": "./src/extract.ts", + "types": "./dist/commonjs/extract.d.ts", + "default": "./dist/commonjs/extract.js" + } + }, + "./pack": { + "import": { + "source": "./src/pack.ts", + "types": "./dist/esm/pack.d.ts", + "default": "./dist/esm/pack.js" + }, + "require": { + "source": "./src/pack.ts", + "types": "./dist/commonjs/pack.d.ts", + "default": "./dist/commonjs/pack.js" + } + }, + "./unpack": { + "import": { + "source": "./src/unpack.ts", + "types": "./dist/esm/unpack.d.ts", + "default": "./dist/esm/unpack.js" + }, + "require": { + "source": "./src/unpack.ts", + "types": "./dist/commonjs/unpack.d.ts", + "default": "./dist/commonjs/unpack.js" + } + }, + "./parse": { + "import": { + "source": "./src/parse.ts", + "types": "./dist/esm/parse.d.ts", + "default": "./dist/esm/parse.js" + }, + "require": { + "source": "./src/parse.ts", + "types": "./dist/commonjs/parse.d.ts", + "default": "./dist/commonjs/parse.js" + } + }, + "./read-entry": { + "import": { + "source": "./src/read-entry.ts", + "types": "./dist/esm/read-entry.d.ts", + "default": "./dist/esm/read-entry.js" + }, + "require": { + "source": "./src/read-entry.ts", + "types": "./dist/commonjs/read-entry.d.ts", + "default": "./dist/commonjs/read-entry.js" + } + }, + "./write-entry": { + "import": { + "source": "./src/write-entry.ts", + "types": "./dist/esm/write-entry.d.ts", + "default": "./dist/esm/write-entry.js" + }, + "require": { + "source": "./src/write-entry.ts", + "types": "./dist/commonjs/write-entry.d.ts", + "default": "./dist/commonjs/write-entry.js" + } + }, + "./header": { + "import": { + "source": "./src/header.ts", + "types": "./dist/esm/header.d.ts", + "default": "./dist/esm/header.js" + }, + "require": { + "source": "./src/header.ts", + "types": "./dist/commonjs/header.d.ts", + "default": "./dist/commonjs/header.js" + } + }, + "./pax": { + "import": { + "source": "./src/pax.ts", + "types": "./dist/esm/pax.d.ts", + "default": "./dist/esm/pax.js" + }, + "require": { + "source": "./src/pax.ts", + "types": "./dist/commonjs/pax.d.ts", + "default": "./dist/commonjs/pax.js" + } + }, + "./types": { + "import": { + "source": "./src/types.ts", + "types": "./dist/esm/types.d.ts", + "default": "./dist/esm/types.js" + }, + "require": { + "source": "./src/types.ts", + "types": "./dist/commonjs/types.d.ts", + "default": "./dist/commonjs/types.js" + } + } + }, + "type": "module", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts" +} diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/LICENSE.md b/deps/npm/node_modules/cacache/node_modules/yallist/LICENSE.md new file mode 100644 index 00000000000000..881248b6d7f0ca --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/yallist/LICENSE.md @@ -0,0 +1,63 @@ +All packages under `src/` are licensed according to the terms in +their respective `LICENSE` or `LICENSE.md` files. + +The remainder of this project is licensed under the Blue Oak +Model License, as follows: + +----- + +# Blue Oak Model License + +Version 1.0.0 + +## Purpose + +This license gives everyone as much permission to work with +this software as possible, while protecting contributors +from liability. + +## Acceptance + +In order to receive this license, you must agree to its +rules. The rules of this license are both obligations +under that agreement and conditions to your license. +You must not do anything with this software that triggers +a rule that you cannot or will not follow. + +## Copyright + +Each contributor licenses you to do everything with this +software that would otherwise infringe that contributor's +copyright in it. + +## Notices + +You must ensure that everyone who gets a copy of +any part of this software from you, with or without +changes, also gets the text of this license or a link to +. + +## Excuse + +If anyone notifies you in writing that you have not +complied with [Notices](#notices), you can keep your +license by taking all practical steps to comply within 30 +days after the notice. If you do not do so, your license +ends immediately. + +## Patent + +Each contributor licenses you to do everything with this +software that would otherwise infringe any patent claims +they can license or become able to license. + +## Reliability + +No contributor can revoke this license. + +## No Liability + +***As far as the law allows, this software comes as is, +without any warranty or condition, and no contributor +will be liable to anyone for any damages related to this +software or this license, under any kind of legal claim.*** diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/index.js b/deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/index.js new file mode 100644 index 00000000000000..c1e1e4741689d9 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/index.js @@ -0,0 +1,384 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.Node = exports.Yallist = void 0; +class Yallist { + tail; + head; + length = 0; + static create(list = []) { + return new Yallist(list); + } + constructor(list = []) { + for (const item of list) { + this.push(item); + } + } + *[Symbol.iterator]() { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value; + } + } + removeNode(node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list'); + } + const next = node.next; + const prev = node.prev; + if (next) { + next.prev = prev; + } + if (prev) { + prev.next = next; + } + if (node === this.head) { + this.head = next; + } + if (node === this.tail) { + this.tail = prev; + } + this.length--; + node.next = undefined; + node.prev = undefined; + node.list = undefined; + return next; + } + unshiftNode(node) { + if (node === this.head) { + return; + } + if (node.list) { + node.list.removeNode(node); + } + const head = this.head; + node.list = this; + node.next = head; + if (head) { + head.prev = node; + } + this.head = node; + if (!this.tail) { + this.tail = node; + } + this.length++; + } + pushNode(node) { + if (node === this.tail) { + return; + } + if (node.list) { + node.list.removeNode(node); + } + const tail = this.tail; + node.list = this; + node.prev = tail; + if (tail) { + tail.next = node; + } + this.tail = node; + if (!this.head) { + this.head = node; + } + this.length++; + } + push(...args) { + for (let i = 0, l = args.length; i < l; i++) { + push(this, args[i]); + } + return this.length; + } + unshift(...args) { + for (var i = 0, l = args.length; i < l; i++) { + unshift(this, args[i]); + } + return this.length; + } + pop() { + if (!this.tail) { + return undefined; + } + const res = this.tail.value; + const t = this.tail; + this.tail = this.tail.prev; + if (this.tail) { + this.tail.next = undefined; + } + else { + this.head = undefined; + } + t.list = undefined; + this.length--; + return res; + } + shift() { + if (!this.head) { + return undefined; + } + const res = this.head.value; + const h = this.head; + this.head = this.head.next; + if (this.head) { + this.head.prev = undefined; + } + else { + this.tail = undefined; + } + h.list = undefined; + this.length--; + return res; + } + forEach(fn, thisp) { + thisp = thisp || this; + for (let walker = this.head, i = 0; !!walker; i++) { + fn.call(thisp, walker.value, i, this); + walker = walker.next; + } + } + forEachReverse(fn, thisp) { + thisp = thisp || this; + for (let walker = this.tail, i = this.length - 1; !!walker; i--) { + fn.call(thisp, walker.value, i, this); + walker = walker.prev; + } + } + get(n) { + let i = 0; + let walker = this.head; + for (; !!walker && i < n; i++) { + walker = walker.next; + } + if (i === n && !!walker) { + return walker.value; + } + } + getReverse(n) { + let i = 0; + let walker = this.tail; + for (; !!walker && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev; + } + if (i === n && !!walker) { + return walker.value; + } + } + map(fn, thisp) { + thisp = thisp || this; + const res = new Yallist(); + for (let walker = this.head; !!walker;) { + res.push(fn.call(thisp, walker.value, this)); + walker = walker.next; + } + return res; + } + mapReverse(fn, thisp) { + thisp = thisp || this; + var res = new Yallist(); + for (let walker = this.tail; !!walker;) { + res.push(fn.call(thisp, walker.value, this)); + walker = walker.prev; + } + return res; + } + reduce(fn, initial) { + let acc; + let walker = this.head; + if (arguments.length > 1) { + acc = initial; + } + else if (this.head) { + walker = this.head.next; + acc = this.head.value; + } + else { + throw new TypeError('Reduce of empty list with no initial value'); + } + for (var i = 0; !!walker; i++) { + acc = fn(acc, walker.value, i); + walker = walker.next; + } + return acc; + } + reduceReverse(fn, initial) { + let acc; + let walker = this.tail; + if (arguments.length > 1) { + acc = initial; + } + else if (this.tail) { + walker = this.tail.prev; + acc = this.tail.value; + } + else { + throw new TypeError('Reduce of empty list with no initial value'); + } + for (let i = this.length - 1; !!walker; i--) { + acc = fn(acc, walker.value, i); + walker = walker.prev; + } + return acc; + } + toArray() { + const arr = new Array(this.length); + for (let i = 0, walker = this.head; !!walker; i++) { + arr[i] = walker.value; + walker = walker.next; + } + return arr; + } + toArrayReverse() { + const arr = new Array(this.length); + for (let i = 0, walker = this.tail; !!walker; i++) { + arr[i] = walker.value; + walker = walker.prev; + } + return arr; + } + slice(from = 0, to = this.length) { + if (to < 0) { + to += this.length; + } + if (from < 0) { + from += this.length; + } + const ret = new Yallist(); + if (to < from || to < 0) { + return ret; + } + if (from < 0) { + from = 0; + } + if (to > this.length) { + to = this.length; + } + let walker = this.head; + let i = 0; + for (i = 0; !!walker && i < from; i++) { + walker = walker.next; + } + for (; !!walker && i < to; i++, walker = walker.next) { + ret.push(walker.value); + } + return ret; + } + sliceReverse(from = 0, to = this.length) { + if (to < 0) { + to += this.length; + } + if (from < 0) { + from += this.length; + } + const ret = new Yallist(); + if (to < from || to < 0) { + return ret; + } + if (from < 0) { + from = 0; + } + if (to > this.length) { + to = this.length; + } + let i = this.length; + let walker = this.tail; + for (; !!walker && i > to; i--) { + walker = walker.prev; + } + for (; !!walker && i > from; i--, walker = walker.prev) { + ret.push(walker.value); + } + return ret; + } + splice(start, deleteCount = 0, ...nodes) { + if (start > this.length) { + start = this.length - 1; + } + if (start < 0) { + start = this.length + start; + } + let walker = this.head; + for (let i = 0; !!walker && i < start; i++) { + walker = walker.next; + } + const ret = []; + for (let i = 0; !!walker && i < deleteCount; i++) { + ret.push(walker.value); + walker = this.removeNode(walker); + } + if (!walker) { + walker = this.tail; + } + else if (walker !== this.tail) { + walker = walker.prev; + } + for (const v of nodes) { + walker = insertAfter(this, walker, v); + } + return ret; + } + reverse() { + const head = this.head; + const tail = this.tail; + for (let walker = head; !!walker; walker = walker.prev) { + const p = walker.prev; + walker.prev = walker.next; + walker.next = p; + } + this.head = tail; + this.tail = head; + return this; + } +} +exports.Yallist = Yallist; +// insertAfter undefined means "make the node the new head of list" +function insertAfter(self, node, value) { + const prev = node; + const next = node ? node.next : self.head; + const inserted = new Node(value, prev, next, self); + if (inserted.next === undefined) { + self.tail = inserted; + } + if (inserted.prev === undefined) { + self.head = inserted; + } + self.length++; + return inserted; +} +function push(self, item) { + self.tail = new Node(item, self.tail, undefined, self); + if (!self.head) { + self.head = self.tail; + } + self.length++; +} +function unshift(self, item) { + self.head = new Node(item, undefined, self.head, self); + if (!self.tail) { + self.tail = self.head; + } + self.length++; +} +class Node { + list; + next; + prev; + value; + constructor(value, prev, next, list) { + this.list = list; + this.value = value; + if (prev) { + prev.next = this; + this.prev = prev; + } + else { + this.prev = undefined; + } + if (next) { + next.prev = this; + this.next = next; + } + else { + this.next = undefined; + } + } +} +exports.Node = Node; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/package.json b/deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/yallist/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/index.js b/deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/index.js new file mode 100644 index 00000000000000..3d81c5113b93a8 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/index.js @@ -0,0 +1,379 @@ +export class Yallist { + tail; + head; + length = 0; + static create(list = []) { + return new Yallist(list); + } + constructor(list = []) { + for (const item of list) { + this.push(item); + } + } + *[Symbol.iterator]() { + for (let walker = this.head; walker; walker = walker.next) { + yield walker.value; + } + } + removeNode(node) { + if (node.list !== this) { + throw new Error('removing node which does not belong to this list'); + } + const next = node.next; + const prev = node.prev; + if (next) { + next.prev = prev; + } + if (prev) { + prev.next = next; + } + if (node === this.head) { + this.head = next; + } + if (node === this.tail) { + this.tail = prev; + } + this.length--; + node.next = undefined; + node.prev = undefined; + node.list = undefined; + return next; + } + unshiftNode(node) { + if (node === this.head) { + return; + } + if (node.list) { + node.list.removeNode(node); + } + const head = this.head; + node.list = this; + node.next = head; + if (head) { + head.prev = node; + } + this.head = node; + if (!this.tail) { + this.tail = node; + } + this.length++; + } + pushNode(node) { + if (node === this.tail) { + return; + } + if (node.list) { + node.list.removeNode(node); + } + const tail = this.tail; + node.list = this; + node.prev = tail; + if (tail) { + tail.next = node; + } + this.tail = node; + if (!this.head) { + this.head = node; + } + this.length++; + } + push(...args) { + for (let i = 0, l = args.length; i < l; i++) { + push(this, args[i]); + } + return this.length; + } + unshift(...args) { + for (var i = 0, l = args.length; i < l; i++) { + unshift(this, args[i]); + } + return this.length; + } + pop() { + if (!this.tail) { + return undefined; + } + const res = this.tail.value; + const t = this.tail; + this.tail = this.tail.prev; + if (this.tail) { + this.tail.next = undefined; + } + else { + this.head = undefined; + } + t.list = undefined; + this.length--; + return res; + } + shift() { + if (!this.head) { + return undefined; + } + const res = this.head.value; + const h = this.head; + this.head = this.head.next; + if (this.head) { + this.head.prev = undefined; + } + else { + this.tail = undefined; + } + h.list = undefined; + this.length--; + return res; + } + forEach(fn, thisp) { + thisp = thisp || this; + for (let walker = this.head, i = 0; !!walker; i++) { + fn.call(thisp, walker.value, i, this); + walker = walker.next; + } + } + forEachReverse(fn, thisp) { + thisp = thisp || this; + for (let walker = this.tail, i = this.length - 1; !!walker; i--) { + fn.call(thisp, walker.value, i, this); + walker = walker.prev; + } + } + get(n) { + let i = 0; + let walker = this.head; + for (; !!walker && i < n; i++) { + walker = walker.next; + } + if (i === n && !!walker) { + return walker.value; + } + } + getReverse(n) { + let i = 0; + let walker = this.tail; + for (; !!walker && i < n; i++) { + // abort out of the list early if we hit a cycle + walker = walker.prev; + } + if (i === n && !!walker) { + return walker.value; + } + } + map(fn, thisp) { + thisp = thisp || this; + const res = new Yallist(); + for (let walker = this.head; !!walker;) { + res.push(fn.call(thisp, walker.value, this)); + walker = walker.next; + } + return res; + } + mapReverse(fn, thisp) { + thisp = thisp || this; + var res = new Yallist(); + for (let walker = this.tail; !!walker;) { + res.push(fn.call(thisp, walker.value, this)); + walker = walker.prev; + } + return res; + } + reduce(fn, initial) { + let acc; + let walker = this.head; + if (arguments.length > 1) { + acc = initial; + } + else if (this.head) { + walker = this.head.next; + acc = this.head.value; + } + else { + throw new TypeError('Reduce of empty list with no initial value'); + } + for (var i = 0; !!walker; i++) { + acc = fn(acc, walker.value, i); + walker = walker.next; + } + return acc; + } + reduceReverse(fn, initial) { + let acc; + let walker = this.tail; + if (arguments.length > 1) { + acc = initial; + } + else if (this.tail) { + walker = this.tail.prev; + acc = this.tail.value; + } + else { + throw new TypeError('Reduce of empty list with no initial value'); + } + for (let i = this.length - 1; !!walker; i--) { + acc = fn(acc, walker.value, i); + walker = walker.prev; + } + return acc; + } + toArray() { + const arr = new Array(this.length); + for (let i = 0, walker = this.head; !!walker; i++) { + arr[i] = walker.value; + walker = walker.next; + } + return arr; + } + toArrayReverse() { + const arr = new Array(this.length); + for (let i = 0, walker = this.tail; !!walker; i++) { + arr[i] = walker.value; + walker = walker.prev; + } + return arr; + } + slice(from = 0, to = this.length) { + if (to < 0) { + to += this.length; + } + if (from < 0) { + from += this.length; + } + const ret = new Yallist(); + if (to < from || to < 0) { + return ret; + } + if (from < 0) { + from = 0; + } + if (to > this.length) { + to = this.length; + } + let walker = this.head; + let i = 0; + for (i = 0; !!walker && i < from; i++) { + walker = walker.next; + } + for (; !!walker && i < to; i++, walker = walker.next) { + ret.push(walker.value); + } + return ret; + } + sliceReverse(from = 0, to = this.length) { + if (to < 0) { + to += this.length; + } + if (from < 0) { + from += this.length; + } + const ret = new Yallist(); + if (to < from || to < 0) { + return ret; + } + if (from < 0) { + from = 0; + } + if (to > this.length) { + to = this.length; + } + let i = this.length; + let walker = this.tail; + for (; !!walker && i > to; i--) { + walker = walker.prev; + } + for (; !!walker && i > from; i--, walker = walker.prev) { + ret.push(walker.value); + } + return ret; + } + splice(start, deleteCount = 0, ...nodes) { + if (start > this.length) { + start = this.length - 1; + } + if (start < 0) { + start = this.length + start; + } + let walker = this.head; + for (let i = 0; !!walker && i < start; i++) { + walker = walker.next; + } + const ret = []; + for (let i = 0; !!walker && i < deleteCount; i++) { + ret.push(walker.value); + walker = this.removeNode(walker); + } + if (!walker) { + walker = this.tail; + } + else if (walker !== this.tail) { + walker = walker.prev; + } + for (const v of nodes) { + walker = insertAfter(this, walker, v); + } + return ret; + } + reverse() { + const head = this.head; + const tail = this.tail; + for (let walker = head; !!walker; walker = walker.prev) { + const p = walker.prev; + walker.prev = walker.next; + walker.next = p; + } + this.head = tail; + this.tail = head; + return this; + } +} +// insertAfter undefined means "make the node the new head of list" +function insertAfter(self, node, value) { + const prev = node; + const next = node ? node.next : self.head; + const inserted = new Node(value, prev, next, self); + if (inserted.next === undefined) { + self.tail = inserted; + } + if (inserted.prev === undefined) { + self.head = inserted; + } + self.length++; + return inserted; +} +function push(self, item) { + self.tail = new Node(item, self.tail, undefined, self); + if (!self.head) { + self.head = self.tail; + } + self.length++; +} +function unshift(self, item) { + self.head = new Node(item, undefined, self.head, self); + if (!self.tail) { + self.tail = self.head; + } + self.length++; +} +export class Node { + list; + next; + prev; + value; + constructor(value, prev, next, list) { + this.list = list; + this.value = value; + if (prev) { + prev.next = this; + this.prev = prev; + } + else { + this.prev = undefined; + } + if (next) { + next.prev = this; + this.next = next; + } + else { + this.next = undefined; + } + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/package.json b/deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/yallist/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/cacache/node_modules/yallist/package.json b/deps/npm/node_modules/cacache/node_modules/yallist/package.json new file mode 100644 index 00000000000000..2f5247808bbea8 --- /dev/null +++ b/deps/npm/node_modules/cacache/node_modules/yallist/package.json @@ -0,0 +1,68 @@ +{ + "name": "yallist", + "version": "5.0.0", + "description": "Yet Another Linked List", + "files": [ + "dist" + ], + "devDependencies": { + "prettier": "^3.2.5", + "tap": "^18.7.2", + "tshy": "^1.13.1", + "typedoc": "^0.25.13" + }, + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache", + "typedoc": "typedoc" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/yallist.git" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "BlueOak-1.0.0", + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 70, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "engines": { + "node": ">=18" + } +} diff --git a/deps/npm/node_modules/cacache/package.json b/deps/npm/node_modules/cacache/package.json index 6e6219158ed759..ebb0f3f8ed4108 100644 --- a/deps/npm/node_modules/cacache/package.json +++ b/deps/npm/node_modules/cacache/package.json @@ -1,6 +1,6 @@ { "name": "cacache", - "version": "18.0.4", + "version": "19.0.1", "cache-version": { "content": "2", "index": "5" @@ -16,13 +16,14 @@ "snap": "tap", "coverage": "tap", "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "npmclilint": "npmcli-lint", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "postsnap": "npm run lintfix --", "postlint": "template-oss-check", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -45,7 +46,7 @@ ], "license": "ISC", "dependencies": { - "@npmcli/fs": "^3.1.0", + "@npmcli/fs": "^4.0.0", "fs-minipass": "^3.0.0", "glob": "^10.2.2", "lru-cache": "^10.0.1", @@ -53,23 +54,23 @@ "minipass-collect": "^2.0.1", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", - "p-map": "^4.0.0", - "ssri": "^10.0.0", - "tar": "^6.1.11", - "unique-filename": "^3.0.0" + "p-map": "^7.0.2", + "ssri": "^12.0.0", + "tar": "^7.4.3", + "unique-filename": "^4.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.22.0", + "version": "4.23.3", "publish": "true" }, "author": "GitHub Inc.", diff --git a/deps/npm/node_modules/cmd-shim/package.json b/deps/npm/node_modules/cmd-shim/package.json index 094ca2df619d21..5f2e85d1c73db0 100644 --- a/deps/npm/node_modules/cmd-shim/package.json +++ b/deps/npm/node_modules/cmd-shim/package.json @@ -1,15 +1,16 @@ { "name": "cmd-shim", - "version": "6.0.3", + "version": "7.0.0", "description": "Used in npm for command line application support", "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -17,8 +18,8 @@ }, "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.1", "tap": "^16.0.1" }, "files": [ @@ -36,12 +37,12 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.1", "publish": true } } diff --git a/deps/npm/node_modules/hosted-git-info/package.json b/deps/npm/node_modules/hosted-git-info/package.json index d7eebd474f6258..3bb8bcd1f49825 100644 --- a/deps/npm/node_modules/hosted-git-info/package.json +++ b/deps/npm/node_modules/hosted-git-info/package.json @@ -1,6 +1,6 @@ { "name": "hosted-git-info", - "version": "7.0.2", + "version": "8.0.0", "description": "Provides metadata and conversions from repository urls for GitHub, Bitbucket and GitLab", "main": "./lib/index.js", "repository": { @@ -24,17 +24,18 @@ "snap": "tap", "test": "tap", "test:coverage": "tap --coverage-report=html", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "dependencies": { "lru-cache": "^10.0.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "files": [ @@ -42,7 +43,7 @@ "lib/" ], "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "tap": { "color": 1, @@ -54,7 +55,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/ignore-walk/package.json b/deps/npm/node_modules/ignore-walk/package.json index f44a7a587a10bb..125fc071939dbf 100644 --- a/deps/npm/node_modules/ignore-walk/package.json +++ b/deps/npm/node_modules/ignore-walk/package.json @@ -1,23 +1,24 @@ { "name": "ignore-walk", - "version": "6.0.5", + "version": "7.0.0", "description": "Nested/recursive `.gitignore`/`.npmignore` parsing and filtering.", "main": "lib/index.js", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "mutate-fs": "^2.1.1", "tap": "^16.0.1" }, "scripts": { "test": "tap", "posttest": "npm run lint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", "test:windows-coverage": "npm pkg set tap.statements=99 --json && npm pkg set tap.branches=98 --json && npm pkg set tap.lines=99 --json", - "snap": "tap" + "snap": "tap", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "keywords": [ "ignorefile", @@ -52,11 +53,11 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "content": "scripts/template-oss", "publish": "true" } diff --git a/deps/npm/node_modules/ini/package.json b/deps/npm/node_modules/ini/package.json index 67aa9278259473..6a3995f158cc5b 100644 --- a/deps/npm/node_modules/ini/package.json +++ b/deps/npm/node_modules/ini/package.json @@ -2,16 +2,16 @@ "author": "GitHub Inc.", "name": "ini", "description": "An ini encoder/decoder for node", - "version": "4.1.3", + "version": "5.0.0", "repository": { "type": "git", "url": "git+https://github.com/npm/ini.git" }, "main": "lib/ini.js", "scripts": { - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "test": "tap", "snap": "tap", "posttest": "npm run lint", @@ -19,8 +19,8 @@ "template-oss-apply": "template-oss-apply --force" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "license": "ISC", @@ -29,11 +29,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/init-package-json/package.json b/deps/npm/node_modules/init-package-json/package.json index 969e124378966e..21021ab701244b 100644 --- a/deps/npm/node_modules/init-package-json/package.json +++ b/deps/npm/node_modules/init-package-json/package.json @@ -1,15 +1,16 @@ { "name": "init-package-json", - "version": "6.0.3", + "version": "7.0.1", "main": "lib/init-package-json.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -19,22 +20,22 @@ "license": "ISC", "description": "A node module to get your node module started", "dependencies": { - "@npmcli/package-json": "^5.0.0", - "npm-package-arg": "^11.0.0", - "promzard": "^1.0.0", - "read": "^3.0.1", + "@npmcli/package-json": "^6.0.0", + "npm-package-arg": "^12.0.0", + "promzard": "^2.0.0", + "read": "^4.0.0", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4", - "validate-npm-package-name": "^5.0.0" + "validate-npm-package-name": "^6.0.0" }, "devDependencies": { "@npmcli/config": "^8.2.0", - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "tap": { "test-ignore": "fixtures/", @@ -60,7 +61,7 @@ ], "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/json-parse-even-better-errors/package.json b/deps/npm/node_modules/json-parse-even-better-errors/package.json index c7156df325fa2b..193f0d9d459a53 100644 --- a/deps/npm/node_modules/json-parse-even-better-errors/package.json +++ b/deps/npm/node_modules/json-parse-even-better-errors/package.json @@ -1,6 +1,6 @@ { "name": "json-parse-even-better-errors", - "version": "3.0.2", + "version": "4.0.0", "description": "JSON.parse with context information on error", "main": "lib/index.js", "files": [ @@ -10,11 +10,12 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -27,8 +28,8 @@ "author": "GitHub Inc.", "license": "MIT", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "tap": { @@ -39,11 +40,11 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/libnpmaccess/package.json b/deps/npm/node_modules/libnpmaccess/package.json index b9d92e4eda6674..0022437adadc60 100644 --- a/deps/npm/node_modules/libnpmaccess/package.json +++ b/deps/npm/node_modules/libnpmaccess/package.json @@ -1,6 +1,6 @@ { "name": "libnpmaccess", - "version": "8.0.6", + "version": "9.0.0", "description": "programmatic library for `npm access` commands", "author": "GitHub Inc.", "license": "ISC", @@ -16,7 +16,7 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.23.3", "nock": "^13.3.3", @@ -30,11 +30,11 @@ "bugs": "https://github.com/npm/libnpmaccess/issues", "homepage": "https://npmjs.com/package/libnpmaccess", "dependencies": { - "npm-package-arg": "^11.0.2", - "npm-registry-fetch": "^17.0.1" + "npm-package-arg": "^12.0.0", + "npm-registry-fetch": "^18.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "files": [ "bin/", diff --git a/deps/npm/node_modules/libnpmdiff/package.json b/deps/npm/node_modules/libnpmdiff/package.json index 0e6d23d54d1c7a..ccb499e78ff66d 100644 --- a/deps/npm/node_modules/libnpmdiff/package.json +++ b/deps/npm/node_modules/libnpmdiff/package.json @@ -1,6 +1,6 @@ { "name": "libnpmdiff", - "version": "6.1.4", + "version": "7.0.0", "description": "The registry diff", "repository": { "type": "git", @@ -13,7 +13,7 @@ "lib/" ], "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "keywords": [ "npm", @@ -42,18 +42,18 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "tap": "^16.3.8" }, "dependencies": { - "@npmcli/arborist": "^7.5.4", - "@npmcli/installed-package-contents": "^2.1.0", + "@npmcli/arborist": "^8.0.0", + "@npmcli/installed-package-contents": "^3.0.0", "binary-extensions": "^2.3.0", "diff": "^5.1.0", "minimatch": "^9.0.4", - "npm-package-arg": "^11.0.2", - "pacote": "^18.0.6", + "npm-package-arg": "^12.0.0", + "pacote": "^19.0.0", "tar": "^6.2.1" }, "templateOSS": { diff --git a/deps/npm/node_modules/libnpmexec/package.json b/deps/npm/node_modules/libnpmexec/package.json index 627af537b4240b..497b971a0841bc 100644 --- a/deps/npm/node_modules/libnpmexec/package.json +++ b/deps/npm/node_modules/libnpmexec/package.json @@ -1,13 +1,13 @@ { "name": "libnpmexec", - "version": "8.1.4", + "version": "9.0.0", "files": [ "bin/", "lib/" ], "main": "lib/index.js", "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "description": "npm exec (npx) programmatic API", "repository": { @@ -50,24 +50,24 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.23.3", - "bin-links": "^4.0.4", + "bin-links": "^5.0.0", "chalk": "^5.2.0", "just-extend": "^6.2.0", "just-safe-set": "^4.2.1", "tap": "^16.3.8" }, "dependencies": { - "@npmcli/arborist": "^7.5.4", - "@npmcli/run-script": "^8.1.0", + "@npmcli/arborist": "^8.0.0", + "@npmcli/run-script": "^9.0.1", "ci-info": "^4.0.0", - "npm-package-arg": "^11.0.2", - "pacote": "^18.0.6", - "proc-log": "^4.2.0", - "read": "^3.0.1", - "read-package-json-fast": "^3.0.2", + "npm-package-arg": "^12.0.0", + "pacote": "^19.0.0", + "proc-log": "^5.0.0", + "read": "^4.0.0", + "read-package-json-fast": "^4.0.0", "semver": "^7.3.7", "walk-up-path": "^3.0.1" }, diff --git a/deps/npm/node_modules/libnpmfund/package.json b/deps/npm/node_modules/libnpmfund/package.json index 03fe5f9b4e5a5f..07c1e33f2a7c38 100644 --- a/deps/npm/node_modules/libnpmfund/package.json +++ b/deps/npm/node_modules/libnpmfund/package.json @@ -1,6 +1,6 @@ { "name": "libnpmfund", - "version": "5.0.12", + "version": "6.0.0", "main": "lib/index.js", "files": [ "bin/", @@ -41,15 +41,15 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "tap": "^16.3.8" }, "dependencies": { - "@npmcli/arborist": "^7.5.4" + "@npmcli/arborist": "^8.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmhook/package.json b/deps/npm/node_modules/libnpmhook/package.json index 741cf8a975b0c2..09157ab08cb209 100644 --- a/deps/npm/node_modules/libnpmhook/package.json +++ b/deps/npm/node_modules/libnpmhook/package.json @@ -1,6 +1,6 @@ { "name": "libnpmhook", - "version": "10.0.5", + "version": "11.0.0", "description": "programmatic API for managing npm registry hooks", "main": "lib/index.js", "files": [ @@ -32,16 +32,16 @@ "license": "ISC", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^17.0.1" + "npm-registry-fetch": "^18.0.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "nock": "^13.3.3", "tap": "^16.3.8" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmorg/package.json b/deps/npm/node_modules/libnpmorg/package.json index 1076474eb97b1f..38800308a31a4e 100644 --- a/deps/npm/node_modules/libnpmorg/package.json +++ b/deps/npm/node_modules/libnpmorg/package.json @@ -1,6 +1,6 @@ { "name": "libnpmorg", - "version": "6.0.6", + "version": "7.0.0", "description": "Programmatic api for `npm org` commands", "author": "GitHub Inc.", "main": "lib/index.js", @@ -28,7 +28,7 @@ "lib/" ], "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "minipass": "^7.1.1", "nock": "^13.3.3", @@ -43,10 +43,10 @@ "homepage": "https://npmjs.com/package/libnpmorg", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^17.0.1" + "npm-registry-fetch": "^18.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmpack/package.json b/deps/npm/node_modules/libnpmpack/package.json index 588921bbfa9655..35d12425b02ff7 100644 --- a/deps/npm/node_modules/libnpmpack/package.json +++ b/deps/npm/node_modules/libnpmpack/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpack", - "version": "7.0.4", + "version": "8.0.0", "description": "Programmatic API for the bits behind npm pack", "author": "GitHub Inc.", "main": "lib/index.js", @@ -23,7 +23,7 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "nock": "^13.3.3", "spawk": "^1.7.1", @@ -37,13 +37,13 @@ "bugs": "https://github.com/npm/libnpmpack/issues", "homepage": "https://npmjs.com/package/libnpmpack", "dependencies": { - "@npmcli/arborist": "^7.5.4", - "@npmcli/run-script": "^8.1.0", - "npm-package-arg": "^11.0.2", - "pacote": "^18.0.6" + "@npmcli/arborist": "^8.0.0", + "@npmcli/run-script": "^9.0.1", + "npm-package-arg": "^12.0.0", + "pacote": "^19.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmpublish/package.json b/deps/npm/node_modules/libnpmpublish/package.json index e19d82dd349edb..f63d50f4e7b9c1 100644 --- a/deps/npm/node_modules/libnpmpublish/package.json +++ b/deps/npm/node_modules/libnpmpublish/package.json @@ -1,6 +1,6 @@ { "name": "libnpmpublish", - "version": "9.0.9", + "version": "10.0.0", "description": "Programmatic API for the bits behind npm publish and unpublish", "author": "GitHub Inc.", "main": "lib/index.js", @@ -24,7 +24,7 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/mock-globals": "^1.0.0", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.23.3", @@ -40,16 +40,16 @@ "homepage": "https://npmjs.com/package/libnpmpublish", "dependencies": { "ci-info": "^4.0.0", - "normalize-package-data": "^6.0.1", - "npm-package-arg": "^11.0.2", - "npm-registry-fetch": "^17.0.1", - "proc-log": "^4.2.0", + "normalize-package-data": "^7.0.0", + "npm-package-arg": "^12.0.0", + "npm-registry-fetch": "^18.0.1", + "proc-log": "^5.0.0", "semver": "^7.3.7", "sigstore": "^2.2.0", - "ssri": "^10.0.6" + "ssri": "^12.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmsearch/package.json b/deps/npm/node_modules/libnpmsearch/package.json index d43aaa4a45459c..a5d2ae424913ef 100644 --- a/deps/npm/node_modules/libnpmsearch/package.json +++ b/deps/npm/node_modules/libnpmsearch/package.json @@ -1,6 +1,6 @@ { "name": "libnpmsearch", - "version": "7.0.6", + "version": "8.0.0", "description": "Programmatic API for searching in npm and compatible registries.", "author": "GitHub Inc.", "main": "lib/index.js", @@ -26,7 +26,7 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "nock": "^13.3.3", "tap": "^16.3.8" @@ -39,10 +39,10 @@ "bugs": "https://github.com/npm/libnpmsearch/issues", "homepage": "https://npmjs.com/package/libnpmsearch", "dependencies": { - "npm-registry-fetch": "^17.0.1" + "npm-registry-fetch": "^18.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmteam/package.json b/deps/npm/node_modules/libnpmteam/package.json index 848769961182a5..fd8f69669f15cf 100644 --- a/deps/npm/node_modules/libnpmteam/package.json +++ b/deps/npm/node_modules/libnpmteam/package.json @@ -1,7 +1,7 @@ { "name": "libnpmteam", "description": "npm Team management APIs", - "version": "6.0.5", + "version": "7.0.0", "author": "GitHub Inc.", "license": "ISC", "main": "lib/index.js", @@ -16,7 +16,7 @@ "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "nock": "^13.3.3", "tap": "^16.3.8" @@ -33,10 +33,10 @@ "homepage": "https://npmjs.com/package/libnpmteam", "dependencies": { "aproba": "^2.0.0", - "npm-registry-fetch": "^17.0.1" + "npm-registry-fetch": "^18.0.1" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/libnpmversion/package.json b/deps/npm/node_modules/libnpmversion/package.json index ccfe5a05cb30a2..cdc9e7bbdf718a 100644 --- a/deps/npm/node_modules/libnpmversion/package.json +++ b/deps/npm/node_modules/libnpmversion/package.json @@ -1,6 +1,6 @@ { "name": "libnpmversion", - "version": "6.0.3", + "version": "7.0.0", "main": "lib/index.js", "files": [ "bin/", @@ -32,20 +32,20 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", + "@npmcli/eslint-config": "^5.0.1", "@npmcli/template-oss": "4.23.3", "require-inject": "^1.4.4", "tap": "^16.3.8" }, "dependencies": { - "@npmcli/git": "^5.0.7", - "@npmcli/run-script": "^8.1.0", - "json-parse-even-better-errors": "^3.0.2", - "proc-log": "^4.2.0", + "@npmcli/git": "^6.0.1", + "@npmcli/run-script": "^9.0.1", + "json-parse-even-better-errors": "^4.0.0", + "proc-log": "^5.0.0", "semver": "^7.3.7" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", diff --git a/deps/npm/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/make-fetch-happen/package.json index 7adb4d1e7f9719..0868ff6d7efa54 100644 --- a/deps/npm/node_modules/make-fetch-happen/package.json +++ b/deps/npm/node_modules/make-fetch-happen/package.json @@ -1,6 +1,6 @@ { "name": "make-fetch-happen", - "version": "13.0.1", + "version": "14.0.1", "description": "Opinionated, caching, retrying fetch client", "main": "lib/index.js", "files": [ @@ -10,16 +10,16 @@ "scripts": { "test": "tap", "posttest": "npm run lint", - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "postlint": "template-oss-check", "snap": "tap", "template-oss-apply": "template-oss-apply --force" }, "repository": { "type": "git", - "url": "https://github.com/npm/make-fetch-happen.git" + "url": "git+https://github.com/npm/make-fetch-happen.git" }, "keywords": [ "http", @@ -33,29 +33,28 @@ "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "@npmcli/agent": "^2.0.0", - "cacache": "^18.0.0", + "@npmcli/agent": "^3.0.0", + "cacache": "^19.0.1", "http-cache-semantics": "^4.1.1", - "is-lambda": "^1.0.1", "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", + "minipass-fetch": "^4.0.0", "minipass-flush": "^1.0.5", "minipass-pipeline": "^1.2.4", "negotiator": "^0.6.3", - "proc-log": "^4.2.0", + "proc-log": "^5.0.0", "promise-retry": "^2.0.1", - "ssri": "^10.0.0" + "ssri": "^12.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.4", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "nock": "^13.2.4", "safe-buffer": "^5.2.1", "standard-version": "^9.3.2", "tap": "^16.0.0" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "tap": { "color": 1, @@ -69,7 +68,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.4", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/LICENSE b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/LICENSE new file mode 100644 index 00000000000000..49f7efe431c9ea --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/LICENSE @@ -0,0 +1,26 @@ +Minizlib was created by Isaac Z. Schlueter. +It is a derivative work of the Node.js project. + +""" +Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors +Copyright (c) 2017-2023 Node.js contributors. All rights reserved. +Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +""" diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/constants.js b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/constants.js new file mode 100644 index 00000000000000..dfc2c1957bfc99 --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/constants.js @@ -0,0 +1,123 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.constants = void 0; +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +const zlib_1 = __importDefault(require("zlib")); +/* c8 ignore start */ +const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +exports.constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/index.js b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/index.js new file mode 100644 index 00000000000000..ad65eef0495076 --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/index.js @@ -0,0 +1,352 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0; +const assert_1 = __importDefault(require("assert")); +const buffer_1 = require("buffer"); +const minipass_1 = require("minipass"); +const zlib_1 = __importDefault(require("zlib")); +const constants_js_1 = require("./constants.js"); +var constants_js_2 = require("./constants.js"); +Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } }); +const OriginalBufferConcat = buffer_1.Buffer.concat; +const _superWrite = Symbol('_superWrite'); +class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +exports.ZlibError = ZlibError; +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends minipass_1.Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new zlib_1.default[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = buffer_1.Buffer.from(chunk, encoding); + if (this.#sawError) + return; + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + buffer_1.Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + buffer_1.Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + buffer_1.Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](buffer_1.Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](buffer_1.Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH; + opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants_js_1.constants.Z_SYNC_FLUSH); + (0, assert_1.default)(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +exports.Zlib = Zlib; +// minimal 2-byte header +class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +exports.Deflate = Deflate; +class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +exports.Inflate = Inflate; +class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +exports.Gzip = Gzip; +class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +exports.Gunzip = Gunzip; +// raw - no header +class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +exports.DeflateRaw = DeflateRaw; +class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +exports.InflateRaw = InflateRaw; +// auto-detect header. +class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +exports.Unzip = Unzip; +class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +exports.Brotli = Brotli; +class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +exports.BrotliCompress = BrotliCompress; +class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +exports.BrotliDecompress = BrotliDecompress; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/package.json b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/constants.js b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/constants.js new file mode 100644 index 00000000000000..7faf40be5068d0 --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/constants.js @@ -0,0 +1,117 @@ +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +import realZlib from 'zlib'; +/* c8 ignore start */ +const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +export const constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/index.js b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/index.js new file mode 100644 index 00000000000000..a6269b505f47cc --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/index.js @@ -0,0 +1,333 @@ +import assert from 'assert'; +import { Buffer } from 'buffer'; +import { Minipass } from 'minipass'; +import realZlib from 'zlib'; +import { constants } from './constants.js'; +export { constants } from './constants.js'; +const OriginalBufferConcat = Buffer.concat; +const _superWrite = Symbol('_superWrite'); +export class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new realZlib[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + assert(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = Buffer.from(chunk, encoding); + if (this.#sawError) + return; + assert(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +export class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants.Z_FINISH; + opts.fullFlushFlag = constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants.Z_SYNC_FLUSH); + assert(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +// minimal 2-byte header +export class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +export class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +export class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +export class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +// raw - no header +export class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +export class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +// auto-detect header. +export class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +export class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +export class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +export class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/package.json b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/package.json b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/package.json new file mode 100644 index 00000000000000..e94623ff43d353 --- /dev/null +++ b/deps/npm/node_modules/minipass-fetch/node_modules/minizlib/package.json @@ -0,0 +1,81 @@ +{ + "name": "minizlib", + "version": "3.0.1", + "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.", + "main": "./dist/commonjs/index.js", + "dependencies": { + "minipass": "^7.0.4", + "rimraf": "^5.0.5" + }, + "scripts": { + "prepare": "tshy", + "pretest": "npm run prepare", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minizlib.git" + }, + "keywords": [ + "zlib", + "gzip", + "gunzip", + "deflate", + "inflate", + "compression", + "zip", + "unzip" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "MIT", + "devDependencies": { + "@types/node": "^20.11.29", + "mkdirp": "^3.0.1", + "tap": "^18.7.1", + "tshy": "^1.12.0", + "typedoc": "^0.25.12" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">= 18" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/deps/npm/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/minipass-fetch/package.json index d491a7fba126d0..6e248345980387 100644 --- a/deps/npm/node_modules/minipass-fetch/package.json +++ b/deps/npm/node_modules/minipass-fetch/package.json @@ -1,6 +1,6 @@ { "name": "minipass-fetch", - "version": "3.0.5", + "version": "4.0.0", "description": "An implementation of window.fetch in Node.js using Minipass streams", "license": "MIT", "main": "lib/index.js", @@ -8,11 +8,12 @@ "test:tls-fixtures": "./test/fixtures/tls/setup.sh", "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "coverage-map": "map.js", @@ -23,8 +24,8 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "@ungap/url-search-params": "^0.2.2", "abort-controller": "^3.0.0", "abortcontroller-polyfill": "~1.7.3", @@ -38,7 +39,7 @@ "dependencies": { "minipass": "^7.0.3", "minipass-sized": "^1.0.3", - "minizlib": "^2.1.2" + "minizlib": "^3.0.1" }, "optionalDependencies": { "encoding": "^0.1.13" @@ -58,12 +59,12 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/mute-stream/package.json b/deps/npm/node_modules/mute-stream/package.json index 37b2f5070ed69f..3725daf0810feb 100644 --- a/deps/npm/node_modules/mute-stream/package.json +++ b/deps/npm/node_modules/mute-stream/package.json @@ -1,24 +1,25 @@ { "name": "mute-stream", - "version": "1.0.0", + "version": "2.0.0", "main": "lib/index.js", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", - "url": "https://github.com/npm/mute-stream.git" + "url": "git+https://github.com/npm/mute-stream.git" }, "keywords": [ "mute", @@ -43,10 +44,11 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.0" + "version": "4.23.3", + "publish": true } } diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js new file mode 100644 index 00000000000000..c541b93001517e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/agents.js @@ -0,0 +1,206 @@ +'use strict' + +const net = require('net') +const tls = require('tls') +const { once } = require('events') +const timers = require('timers/promises') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js') +const Errors = require('./errors.js') +const { Agent: AgentBase } = require('agent-base') + +module.exports = class Agent extends AgentBase { + #options + #timeouts + #proxy + #noProxy + #ProxyAgent + + constructor (options = {}) { + const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options) + + super(normalizedOptions) + + this.#options = normalizedOptions + this.#timeouts = timeouts + + if (proxy) { + this.#proxy = new URL(proxy) + this.#noProxy = noProxy + this.#ProxyAgent = getProxyAgent(proxy) + } + } + + get proxy () { + return this.#proxy ? { url: this.#proxy } : {} + } + + #getProxy (options) { + if (!this.#proxy) { + return + } + + const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, { + proxy: this.#proxy, + noProxy: this.#noProxy, + }) + + if (!proxy) { + return + } + + const cacheKey = cacheOptions({ + ...options, + ...this.#options, + timeouts: this.#timeouts, + proxy, + }) + + if (proxyCache.has(cacheKey)) { + return proxyCache.get(cacheKey) + } + + let ProxyAgent = this.#ProxyAgent + if (Array.isArray(ProxyAgent)) { + ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0] + } + + const proxyAgent = new ProxyAgent(proxy, { + ...this.#options, + socketOptions: { family: this.#options.family }, + }) + proxyCache.set(cacheKey, proxyAgent) + + return proxyAgent + } + + // takes an array of promises and races them against the connection timeout + // which will throw the necessary error if it is hit. This will return the + // result of the promise race. + async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) { + if (timeout) { + const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal }) + .then(() => { + throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`) + }).catch((err) => { + if (err.name === 'AbortError') { + return + } + throw err + }) + promises.push(connectionTimeout) + } + + let result + try { + result = await Promise.race(promises) + ac.abort() + } catch (err) { + ac.abort() + throw err + } + return result + } + + async connect (request, options) { + // if the connection does not have its own lookup function + // set, then use the one from our options + options.lookup ??= this.#options.lookup + + let socket + let timeout = this.#timeouts.connection + const isSecureEndpoint = this.isSecureEndpoint(options) + + const proxy = this.#getProxy(options) + if (proxy) { + // some of the proxies will wait for the socket to fully connect before + // returning so we have to await this while also racing it against the + // connection timeout. + const start = Date.now() + socket = await this.#timeoutConnection({ + options, + timeout, + promises: [proxy.connect(request, options)], + }) + // see how much time proxy.connect took and subtract it from + // the timeout + if (timeout) { + timeout = timeout - (Date.now() - start) + } + } else { + socket = (isSecureEndpoint ? tls : net).connect(options) + } + + socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) + socket.setNoDelay(this.keepAlive) + + const abortController = new AbortController() + const { signal } = abortController + + const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting'] + ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal }) + : Promise.resolve() + + await this.#timeoutConnection({ + options, + timeout, + promises: [ + connectPromise, + once(socket, 'error', { signal }).then((err) => { + throw err[0] + }), + ], + }, abortController) + + if (this.#timeouts.idle) { + socket.setTimeout(this.#timeouts.idle, () => { + socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`)) + }) + } + + return socket + } + + addRequest (request, options) { + const proxy = this.#getProxy(options) + // it would be better to call proxy.addRequest here but this causes the + // http-proxy-agent to call its super.addRequest which causes the request + // to be added to the agent twice. since we only support 3 agents + // currently (see the required agents in proxy.js) we have manually + // checked that the only public methods we need to call are called in the + // next block. this could change in the future and presumably we would get + // failing tests until we have properly called the necessary methods on + // each of our proxy agents + if (proxy?.setRequestProps) { + proxy.setRequestProps(request, options) + } + + request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') + + if (this.#timeouts.response) { + let responseTimeout + request.once('finish', () => { + setTimeout(() => { + request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy)) + }, this.#timeouts.response) + }) + request.once('response', () => { + clearTimeout(responseTimeout) + }) + } + + if (this.#timeouts.transfer) { + let transferTimeout + request.once('response', (res) => { + setTimeout(() => { + res.destroy(new Errors.TransferTimeoutError(request, this.#proxy)) + }, this.#timeouts.transfer) + res.once('close', () => { + clearTimeout(transferTimeout) + }) + }) + } + + return super.addRequest(request, options) + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js new file mode 100644 index 00000000000000..3c6946c566d736 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/dns.js @@ -0,0 +1,53 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const dns = require('dns') + +// this is a factory so that each request can have its own opts (i.e. ttl) +// while still sharing the cache across all requests +const cache = new LRUCache({ max: 50 }) + +const getOptions = ({ + family = 0, + hints = dns.ADDRCONFIG, + all = false, + verbatim = undefined, + ttl = 5 * 60 * 1000, + lookup = dns.lookup, +}) => ({ + // hints and lookup are returned since both are top level properties to (net|tls).connect + hints, + lookup: (hostname, ...args) => { + const callback = args.pop() // callback is always last arg + const lookupOptions = args[0] ?? {} + + const options = { + family, + hints, + all, + verbatim, + ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), + } + + const key = JSON.stringify({ hostname, ...options }) + + if (cache.has(key)) { + const cached = cache.get(key) + return process.nextTick(callback, null, ...cached) + } + + lookup(hostname, options, (err, ...result) => { + if (err) { + return callback(err) + } + + cache.set(key, result, { ttl }) + return callback(null, ...result) + }) + }, +}) + +module.exports = { + cache, + getOptions, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js new file mode 100644 index 00000000000000..70475aec8eb357 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/errors.js @@ -0,0 +1,61 @@ +'use strict' + +class InvalidProxyProtocolError extends Error { + constructor (url) { + super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) + this.code = 'EINVALIDPROXY' + this.proxy = url + } +} + +class ConnectionTimeoutError extends Error { + constructor (host) { + super(`Timeout connecting to host \`${host}\``) + this.code = 'ECONNECTIONTIMEOUT' + this.host = host + } +} + +class IdleTimeoutError extends Error { + constructor (host) { + super(`Idle timeout reached for host \`${host}\``) + this.code = 'EIDLETIMEOUT' + this.host = host + } +} + +class ResponseTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Response timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `connecting to host \`${request.host}\`` + super(msg) + this.code = 'ERESPONSETIMEOUT' + this.proxy = proxy + this.request = request + } +} + +class TransferTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Transfer timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `for \`${request.host}\`` + super(msg) + this.code = 'ETRANSFERTIMEOUT' + this.proxy = proxy + this.request = request + } +} + +module.exports = { + InvalidProxyProtocolError, + ConnectionTimeoutError, + IdleTimeoutError, + ResponseTimeoutError, + TransferTimeoutError, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js new file mode 100644 index 00000000000000..b33d6eaef07a21 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/index.js @@ -0,0 +1,56 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, proxyCache } = require('./proxy.js') +const dns = require('./dns.js') +const Agent = require('./agents.js') + +const agentCache = new LRUCache({ max: 20 }) + +const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => { + // false has meaning so this can't be a simple truthiness check + if (agent != null) { + return agent + } + + url = new URL(url) + + const proxyForUrl = getProxy(url, { proxy, noProxy }) + const normalizedOptions = { + ...normalizeOptions(options), + proxy: proxyForUrl, + } + + const cacheKey = cacheOptions({ + ...normalizedOptions, + secureEndpoint: url.protocol === 'https:', + }) + + if (agentCache.has(cacheKey)) { + return agentCache.get(cacheKey) + } + + const newAgent = new Agent(normalizedOptions) + agentCache.set(cacheKey, newAgent) + + return newAgent +} + +module.exports = { + getAgent, + Agent, + // these are exported for backwards compatability + HttpAgent: Agent, + HttpsAgent: Agent, + cache: { + proxy: proxyCache, + agent: agentCache, + dns: dns.cache, + clear: () => { + proxyCache.clear() + agentCache.clear() + dns.cache.clear() + }, + }, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js new file mode 100644 index 00000000000000..0bf53f725f0846 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/options.js @@ -0,0 +1,86 @@ +'use strict' + +const dns = require('./dns') + +const normalizeOptions = (opts) => { + const family = parseInt(opts.family ?? '0', 10) + const keepAlive = opts.keepAlive ?? true + + const normalized = { + // nodejs http agent options. these are all the defaults + // but kept here to increase the likelihood of cache hits + // https://nodejs.org/api/http.html#new-agentoptions + keepAliveMsecs: keepAlive ? 1000 : undefined, + maxSockets: opts.maxSockets ?? 15, + maxTotalSockets: Infinity, + maxFreeSockets: keepAlive ? 256 : undefined, + scheduling: 'fifo', + // then spread the rest of the options + ...opts, + // we already set these to their defaults that we want + family, + keepAlive, + // our custom timeout options + timeouts: { + // the standard timeout option is mapped to our idle timeout + // and then deleted below + idle: opts.timeout ?? 0, + connection: 0, + response: 0, + transfer: 0, + ...opts.timeouts, + }, + // get the dns options that go at the top level of socket connection + ...dns.getOptions({ family, ...opts.dns }), + } + + // remove timeout since we already used it to set our own idle timeout + delete normalized.timeout + + return normalized +} + +const createKey = (obj) => { + let key = '' + const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) + for (let [k, v] of sorted) { + if (v == null) { + v = 'null' + } else if (v instanceof URL) { + v = v.toString() + } else if (typeof v === 'object') { + v = createKey(v) + } + key += `${k}:${v}:` + } + return key +} + +const cacheOptions = ({ secureEndpoint, ...options }) => createKey({ + secureEndpoint: !!secureEndpoint, + // socket connect options + family: options.family, + hints: options.hints, + localAddress: options.localAddress, + // tls specific connect options + strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false, + ca: secureEndpoint ? options.ca : null, + cert: secureEndpoint ? options.cert : null, + key: secureEndpoint ? options.key : null, + // http agent options + keepAlive: options.keepAlive, + keepAliveMsecs: options.keepAliveMsecs, + maxSockets: options.maxSockets, + maxTotalSockets: options.maxTotalSockets, + maxFreeSockets: options.maxFreeSockets, + scheduling: options.scheduling, + // timeout options + timeouts: options.timeouts, + // proxy + proxy: options.proxy, +}) + +module.exports = { + normalizeOptions, + cacheOptions, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js new file mode 100644 index 00000000000000..6272e929e57bcf --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/lib/proxy.js @@ -0,0 +1,88 @@ +'use strict' + +const { HttpProxyAgent } = require('http-proxy-agent') +const { HttpsProxyAgent } = require('https-proxy-agent') +const { SocksProxyAgent } = require('socks-proxy-agent') +const { LRUCache } = require('lru-cache') +const { InvalidProxyProtocolError } = require('./errors.js') + +const PROXY_CACHE = new LRUCache({ max: 20 }) + +const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) + +const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) + +const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => { + key = key.toLowerCase() + if (PROXY_ENV_KEYS.has(key)) { + acc[key] = value + } + return acc +}, {}) + +const getProxyAgent = (url) => { + url = new URL(url) + + const protocol = url.protocol.slice(0, -1) + if (SOCKS_PROTOCOLS.has(protocol)) { + return SocksProxyAgent + } + if (protocol === 'https' || protocol === 'http') { + return [HttpProxyAgent, HttpsProxyAgent] + } + + throw new InvalidProxyProtocolError(url) +} + +const isNoProxy = (url, noProxy) => { + if (typeof noProxy === 'string') { + noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) + } + + if (!noProxy || !noProxy.length) { + return false + } + + const hostSegments = url.hostname.split('.').reverse() + + return noProxy.some((no) => { + const noSegments = no.split('.').filter(Boolean).reverse() + if (!noSegments.length) { + return false + } + + for (let i = 0; i < noSegments.length; i++) { + if (hostSegments[i] !== noSegments[i]) { + return false + } + } + + return true + }) +} + +const getProxy = (url, { proxy, noProxy }) => { + url = new URL(url) + + if (!proxy) { + proxy = url.protocol === 'https:' + ? PROXY_ENV.https_proxy + : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy + } + + if (!noProxy) { + noProxy = PROXY_ENV.no_proxy + } + + if (!proxy || isNoProxy(url, noProxy)) { + return null + } + + return new URL(proxy) +} + +module.exports = { + getProxyAgent, + getProxy, + proxyCache: PROXY_CACHE, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/package.json new file mode 100644 index 00000000000000..ef5b4e3228cc46 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/agent/package.json @@ -0,0 +1,60 @@ +{ + "name": "@npmcli/agent", + "version": "2.2.2", + "description": "the http/https agent used by the npm cli", + "main": "lib/index.js", + "scripts": { + "gencerts": "bash scripts/create-cert.sh", + "test": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/agent/issues" + }, + "homepage": "https://github.com/npm/agent#readme", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "publish": "true" + }, + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "minipass-fetch": "^3.0.3", + "nock": "^13.2.7", + "semver": "^7.5.4", + "simple-socks": "^3.1.0", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/agent.git" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md new file mode 100644 index 00000000000000..5fc208ff122e08 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js new file mode 100644 index 00000000000000..cb5982f79077ac --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/get-options.js @@ -0,0 +1,20 @@ +// given an input that may or may not be an object, return an object that has +// a copy of every defined property listed in 'copy'. if the input is not an +// object, assign it to the property named by 'wrap' +const getOptions = (input, { copy, wrap }) => { + const result = {} + + if (input && typeof input === 'object') { + for (const prop of copy) { + if (input[prop] !== undefined) { + result[prop] = input[prop] + } + } + } else { + result[wrap] = input + } + + return result +} + +module.exports = getOptions diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js new file mode 100644 index 00000000000000..4d13bc037359d7 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/common/node.js @@ -0,0 +1,9 @@ +const semver = require('semver') + +const satisfies = (range) => { + return semver.satisfies(process.version, range, { includePrerelease: true }) +} + +module.exports = { + satisfies, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE new file mode 100644 index 00000000000000..93546dfb7655bf --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/LICENSE @@ -0,0 +1,15 @@ +(The MIT License) + +Copyright (c) 2011-2017 JP Richardson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files +(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, + merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/errors.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/errors.js new file mode 100644 index 00000000000000..1cd1e05d0c533d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/errors.js @@ -0,0 +1,129 @@ +'use strict' +const { inspect } = require('util') + +// adapted from node's internal/errors +// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js + +// close copy of node's internal SystemError class. +class SystemError { + constructor (code, prefix, context) { + // XXX context.code is undefined in all constructors used in cp/polyfill + // that may be a bug copied from node, maybe the constructor should use + // `code` not `errno`? nodejs/node#41104 + let message = `${prefix}: ${context.syscall} returned ` + + `${context.code} (${context.message})` + + if (context.path !== undefined) { + message += ` ${context.path}` + } + if (context.dest !== undefined) { + message += ` => ${context.dest}` + } + + this.code = code + Object.defineProperties(this, { + name: { + value: 'SystemError', + enumerable: false, + writable: true, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + info: { + value: context, + enumerable: true, + configurable: true, + writable: false, + }, + errno: { + get () { + return context.errno + }, + set (value) { + context.errno = value + }, + enumerable: true, + configurable: true, + }, + syscall: { + get () { + return context.syscall + }, + set (value) { + context.syscall = value + }, + enumerable: true, + configurable: true, + }, + }) + + if (context.path !== undefined) { + Object.defineProperty(this, 'path', { + get () { + return context.path + }, + set (value) { + context.path = value + }, + enumerable: true, + configurable: true, + }) + } + + if (context.dest !== undefined) { + Object.defineProperty(this, 'dest', { + get () { + return context.dest + }, + set (value) { + context.dest = value + }, + enumerable: true, + configurable: true, + }) + } + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } + + [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { + return inspect(this, { + ...ctx, + getters: true, + customInspect: false, + }) + } +} + +function E (code, message) { + module.exports[code] = class NodeError extends SystemError { + constructor (ctx) { + super(code, message, ctx) + } + } +} + +E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') +E('ERR_FS_CP_EEXIST', 'Target already exists') +E('ERR_FS_CP_EINVAL', 'Invalid src or dest') +E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') +E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') +E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') +E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') +E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') +E('ERR_FS_EISDIR', 'Path is a directory') + +module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { + constructor (name, expected, actual) { + super() + this.code = 'ERR_INVALID_ARG_TYPE' + this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js new file mode 100644 index 00000000000000..972ce7aa12abef --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/index.js @@ -0,0 +1,22 @@ +const fs = require('fs/promises') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const polyfill = require('./polyfill.js') + +// node 16.7.0 added fs.cp +const useNative = node.satisfies('>=16.7.0') + +const cp = async (src, dest, opts) => { + const options = getOptions(opts, { + copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], + }) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? fs.cp(src, dest, options) + : polyfill(src, dest, options) +} + +module.exports = cp diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js new file mode 100644 index 00000000000000..80eb10de971918 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/cp/polyfill.js @@ -0,0 +1,428 @@ +// this file is a modified version of the code in node 17.2.0 +// which is, in turn, a modified version of the fs-extra module on npm +// node core changes: +// - Use of the assert module has been replaced with core's error system. +// - All code related to the glob dependency has been removed. +// - Bring your own custom fs module is not currently supported. +// - Some basic code cleanup. +// changes here: +// - remove all callback related code +// - drop sync support +// - change assertions back to non-internal methods (see options.js) +// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows +'use strict' + +const { + ERR_FS_CP_DIR_TO_NON_DIR, + ERR_FS_CP_EEXIST, + ERR_FS_CP_EINVAL, + ERR_FS_CP_FIFO_PIPE, + ERR_FS_CP_NON_DIR_TO_DIR, + ERR_FS_CP_SOCKET, + ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, + ERR_FS_CP_UNKNOWN, + ERR_FS_EISDIR, + ERR_INVALID_ARG_TYPE, +} = require('./errors.js') +const { + constants: { + errno: { + EEXIST, + EISDIR, + EINVAL, + ENOTDIR, + }, + }, +} = require('os') +const { + chmod, + copyFile, + lstat, + mkdir, + readdir, + readlink, + stat, + symlink, + unlink, + utimes, +} = require('fs/promises') +const { + dirname, + isAbsolute, + join, + parse, + resolve, + sep, + toNamespacedPath, +} = require('path') +const { fileURLToPath } = require('url') + +const defaultOptions = { + dereference: false, + errorOnExist: false, + filter: undefined, + force: true, + preserveTimestamps: false, + recursive: false, +} + +async function cp (src, dest, opts) { + if (opts != null && typeof opts !== 'object') { + throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) + } + return cpFn( + toNamespacedPath(getValidatedPath(src)), + toNamespacedPath(getValidatedPath(dest)), + { ...defaultOptions, ...opts }) +} + +function getValidatedPath (fileURLOrPath) { + const path = fileURLOrPath != null && fileURLOrPath.href + && fileURLOrPath.origin + ? fileURLToPath(fileURLOrPath) + : fileURLOrPath + return path +} + +async function cpFn (src, dest, opts) { + // Warn about using preserveTimestamps on 32-bit node + // istanbul ignore next + if (opts.preserveTimestamps && process.arch === 'ia32') { + const warning = 'Using the preserveTimestamps option in 32-bit ' + + 'node is not recommended' + process.emitWarning(warning, 'TimestampPrecisionWarning') + } + const stats = await checkPaths(src, dest, opts) + const { srcStat, destStat } = stats + await checkParentPaths(src, srcStat, dest) + if (opts.filter) { + return handleFilter(checkParentDir, destStat, src, dest, opts) + } + return checkParentDir(destStat, src, dest, opts) +} + +async function checkPaths (src, dest, opts) { + const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) + if (destStat) { + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: 'src and dest cannot be the same', + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + if (srcStat.isDirectory() && !destStat.isDirectory()) { + throw new ERR_FS_CP_DIR_TO_NON_DIR({ + message: `cannot overwrite directory ${src} ` + + `with non-directory ${dest}`, + path: dest, + syscall: 'cp', + errno: EISDIR, + }) + } + if (!srcStat.isDirectory() && destStat.isDirectory()) { + throw new ERR_FS_CP_NON_DIR_TO_DIR({ + message: `cannot overwrite non-directory ${src} ` + + `with directory ${dest}`, + path: dest, + syscall: 'cp', + errno: ENOTDIR, + }) + } + } + + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return { srcStat, destStat } +} + +function areIdentical (srcStat, destStat) { + return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && + destStat.dev === srcStat.dev +} + +function getStats (src, dest, opts) { + const statFunc = opts.dereference ? + (file) => stat(file, { bigint: true }) : + (file) => lstat(file, { bigint: true }) + return Promise.all([ + statFunc(src), + statFunc(dest).catch((err) => { + // istanbul ignore next: unsure how to cover. + if (err.code === 'ENOENT') { + return null + } + // istanbul ignore next: unsure how to cover. + throw err + }), + ]) +} + +async function checkParentDir (destStat, src, dest, opts) { + const destParent = dirname(dest) + const dirExists = await pathExists(destParent) + if (dirExists) { + return getStatsForCopy(destStat, src, dest, opts) + } + await mkdir(destParent, { recursive: true }) + return getStatsForCopy(destStat, src, dest, opts) +} + +function pathExists (dest) { + return stat(dest).then( + () => true, + // istanbul ignore next: not sure when this would occur + (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) +} + +// Recursively check if dest parent is a subdirectory of src. +// It works for all file types including symlinks since it +// checks the src and dest inodes. It starts from the deepest +// parent and stops once it reaches the src parent or the root path. +async function checkParentPaths (src, srcStat, dest) { + const srcParent = resolve(dirname(src)) + const destParent = resolve(dirname(dest)) + if (destParent === srcParent || destParent === parse(destParent).root) { + return + } + let destStat + try { + destStat = await stat(destParent, { bigint: true }) + } catch (err) { + // istanbul ignore else: not sure when this would occur + if (err.code === 'ENOENT') { + return + } + // istanbul ignore next: not sure when this would occur + throw err + } + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return checkParentPaths(src, srcStat, destParent) +} + +const normalizePathToArray = (path) => + resolve(path).split(sep).filter(Boolean) + +// Return true if dest is a subdir of src, otherwise false. +// It only checks the path strings. +function isSrcSubdir (src, dest) { + const srcArr = normalizePathToArray(src) + const destArr = normalizePathToArray(dest) + return srcArr.every((cur, i) => destArr[i] === cur) +} + +async function handleFilter (onInclude, destStat, src, dest, opts, cb) { + const include = await opts.filter(src, dest) + if (include) { + return onInclude(destStat, src, dest, opts, cb) + } +} + +function startCopy (destStat, src, dest, opts) { + if (opts.filter) { + return handleFilter(getStatsForCopy, destStat, src, dest, opts) + } + return getStatsForCopy(destStat, src, dest, opts) +} + +async function getStatsForCopy (destStat, src, dest, opts) { + const statFn = opts.dereference ? stat : lstat + const srcStat = await statFn(src) + // istanbul ignore else: can't portably test FIFO + if (srcStat.isDirectory() && opts.recursive) { + return onDir(srcStat, destStat, src, dest, opts) + } else if (srcStat.isDirectory()) { + throw new ERR_FS_EISDIR({ + message: `${src} is a directory (not copied)`, + path: src, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) { + return onFile(srcStat, destStat, src, dest, opts) + } else if (srcStat.isSymbolicLink()) { + return onLink(destStat, src, dest) + } else if (srcStat.isSocket()) { + throw new ERR_FS_CP_SOCKET({ + message: `cannot copy a socket file: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFIFO()) { + throw new ERR_FS_CP_FIFO_PIPE({ + message: `cannot copy a FIFO pipe: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // istanbul ignore next: should be unreachable + throw new ERR_FS_CP_UNKNOWN({ + message: `cannot copy an unknown file type: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) +} + +function onFile (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return _copyFile(srcStat, src, dest, opts) + } + return mayCopyFile(srcStat, src, dest, opts) +} + +async function mayCopyFile (srcStat, src, dest, opts) { + if (opts.force) { + await unlink(dest) + return _copyFile(srcStat, src, dest, opts) + } else if (opts.errorOnExist) { + throw new ERR_FS_CP_EEXIST({ + message: `${dest} already exists`, + path: dest, + syscall: 'cp', + errno: EEXIST, + }) + } +} + +async function _copyFile (srcStat, src, dest, opts) { + await copyFile(src, dest) + if (opts.preserveTimestamps) { + return handleTimestampsAndMode(srcStat.mode, src, dest) + } + return setDestMode(dest, srcStat.mode) +} + +async function handleTimestampsAndMode (srcMode, src, dest) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) { + await makeFileWritable(dest, srcMode) + return setDestTimestampsAndMode(srcMode, src, dest) + } + return setDestTimestampsAndMode(srcMode, src, dest) +} + +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 +} + +function makeFileWritable (dest, srcMode) { + return setDestMode(dest, srcMode | 0o200) +} + +async function setDestTimestampsAndMode (srcMode, src, dest) { + await setDestTimestamps(src, dest) + return setDestMode(dest, srcMode) +} + +function setDestMode (dest, srcMode) { + return chmod(dest, srcMode) +} + +async function setDestTimestamps (src, dest) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + const updatedSrcStat = await stat(src) + return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) +} + +function onDir (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return mkDirAndCopy(srcStat.mode, src, dest, opts) + } + return copyDir(src, dest, opts) +} + +async function mkDirAndCopy (srcMode, src, dest, opts) { + await mkdir(dest) + await copyDir(src, dest, opts) + return setDestMode(dest, srcMode) +} + +async function copyDir (src, dest, opts) { + const dir = await readdir(src) + for (let i = 0; i < dir.length; i++) { + const item = dir[i] + const srcItem = join(src, item) + const destItem = join(dest, item) + const { destStat } = await checkPaths(srcItem, destItem, opts) + await startCopy(destStat, srcItem, destItem, opts) + } +} + +async function onLink (destStat, src, dest) { + let resolvedSrc = await readlink(src) + if (!isAbsolute(resolvedSrc)) { + resolvedSrc = resolve(dirname(src), resolvedSrc) + } + if (!destStat) { + return symlink(resolvedSrc, dest) + } + let resolvedDest + try { + resolvedDest = await readlink(dest) + } catch (err) { + // Dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + // istanbul ignore next: can only test on windows + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { + return symlink(resolvedSrc, dest) + } + // istanbul ignore next: should not be possible + throw err + } + if (!isAbsolute(resolvedDest)) { + resolvedDest = resolve(dirname(dest), resolvedDest) + } + if (isSrcSubdir(resolvedSrc, resolvedDest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + + `${resolvedDest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // Do not copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + const srcStat = await stat(src) + if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { + throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ + message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return copyLink(resolvedSrc, dest) +} + +async function copyLink (resolvedSrc, dest) { + await unlink(dest) + return symlink(resolvedSrc, dest) +} + +module.exports = cp diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js new file mode 100644 index 00000000000000..81c746304cc428 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/index.js @@ -0,0 +1,13 @@ +'use strict' + +const cp = require('./cp/index.js') +const withTempDir = require('./with-temp-dir.js') +const readdirScoped = require('./readdir-scoped.js') +const moveFile = require('./move-file.js') + +module.exports = { + cp, + withTempDir, + readdirScoped, + moveFile, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/move-file.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/move-file.js new file mode 100644 index 00000000000000..d56e06d384659a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/move-file.js @@ -0,0 +1,78 @@ +const { dirname, join, resolve, relative, isAbsolute } = require('path') +const fs = require('fs/promises') + +const pathExists = async path => { + try { + await fs.access(path) + return true + } catch (er) { + return er.code !== 'ENOENT' + } +} + +const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { + if (!source || !destination) { + throw new TypeError('`source` and `destination` file required') + } + + options = { + overwrite: true, + ...options, + } + + if (!options.overwrite && await pathExists(destination)) { + throw new Error(`The destination file exists: ${destination}`) + } + + await fs.mkdir(dirname(destination), { recursive: true }) + + try { + await fs.rename(source, destination) + } catch (error) { + if (error.code === 'EXDEV' || error.code === 'EPERM') { + const sourceStat = await fs.lstat(source) + if (sourceStat.isDirectory()) { + const files = await fs.readdir(source) + await Promise.all(files.map((file) => + moveFile(join(source, file), join(destination, file), options, false, symlinks) + )) + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + await fs.copyFile(source, destination) + } + } else { + throw error + } + } + + if (root) { + await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { + let target = await fs.readlink(symSource) + // junction symlinks in windows will be absolute paths, so we need to + // make sure they point to the symlink destination + if (isAbsolute(target)) { + target = resolve(symDestination, relative(symSource, target)) + } + // try to determine what the actual file is so we can create the correct + // type of symlink in windows + let targetStat = 'file' + try { + targetStat = await fs.stat(resolve(dirname(symSource), target)) + if (targetStat.isDirectory()) { + targetStat = 'junction' + } + } catch { + // targetStat remains 'file' + } + await fs.symlink( + target, + symDestination, + targetStat + ) + })) + await fs.rm(source, { recursive: true, force: true }) + } +} + +module.exports = moveFile diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/readdir-scoped.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/readdir-scoped.js new file mode 100644 index 00000000000000..cd601dfbe7486b --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/readdir-scoped.js @@ -0,0 +1,20 @@ +const { readdir } = require('fs/promises') +const { join } = require('path') + +const readdirScoped = async (dir) => { + const results = [] + + for (const item of await readdir(dir)) { + if (item.startsWith('@')) { + for (const scopedItem of await readdir(join(dir, item))) { + results.push(join(item, scopedItem)) + } + } else { + results.push(item) + } + } + + return results +} + +module.exports = readdirScoped diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js new file mode 100644 index 00000000000000..0738ac4f29e1be --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/lib/with-temp-dir.js @@ -0,0 +1,39 @@ +const { join, sep } = require('path') + +const getOptions = require('./common/get-options.js') +const { mkdir, mkdtemp, rm } = require('fs/promises') + +// create a temp directory, ensure its permissions match its parent, then call +// the supplied function passing it the path to the directory. clean up after +// the function finishes, whether it throws or not +const withTempDir = async (root, fn, opts) => { + const options = getOptions(opts, { + copy: ['tmpPrefix'], + }) + // create the directory + await mkdir(root, { recursive: true }) + + const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || '')) + let err + let result + + try { + result = await fn(target) + } catch (_err) { + err = _err + } + + try { + await rm(target, { force: true, recursive: true }) + } catch { + // ignore errors + } + + if (err) { + throw err + } + + return result +} + +module.exports = withTempDir diff --git a/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/package.json b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/package.json new file mode 100644 index 00000000000000..5261a11b78000e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/@npmcli/fs/package.json @@ -0,0 +1,52 @@ +{ + "name": "@npmcli/fs", + "version": "3.1.1", + "description": "filesystem utilities for the npm cli", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "snap": "tap", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/fs.git" + }, + "keywords": [ + "npm", + "oss" + ], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.1" + }, + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/abbrev/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/abbrev/LICENSE new file mode 100644 index 00000000000000..9bcfa9d7d8d26e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/abbrev/LICENSE @@ -0,0 +1,46 @@ +This software is dual-licensed under the ISC and MIT licenses. +You may use this software under EITHER of the following licenses. + +---------- + +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------- + +Copyright Isaac Z. Schlueter and Contributors +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/abbrev/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/abbrev/lib/index.js new file mode 100644 index 00000000000000..9f48801f049c9e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/abbrev/lib/index.js @@ -0,0 +1,50 @@ +module.exports = abbrev + +function abbrev (...args) { + let list = args.length === 1 || Array.isArray(args[0]) ? args[0] : args + + for (let i = 0, l = list.length; i < l; i++) { + list[i] = typeof list[i] === 'string' ? list[i] : String(list[i]) + } + + // sort them lexicographically, so that they're next to their nearest kin + list = list.sort(lexSort) + + // walk through each, seeing how much it has in common with the next and previous + const abbrevs = {} + let prev = '' + for (let ii = 0, ll = list.length; ii < ll; ii++) { + const current = list[ii] + const next = list[ii + 1] || '' + let nextMatches = true + let prevMatches = true + if (current === next) { + continue + } + let j = 0 + const cl = current.length + for (; j < cl; j++) { + const curChar = current.charAt(j) + nextMatches = nextMatches && curChar === next.charAt(j) + prevMatches = prevMatches && curChar === prev.charAt(j) + if (!nextMatches && !prevMatches) { + j++ + break + } + } + prev = current + if (j === cl) { + abbrevs[current] = current + continue + } + for (let a = current.slice(0, j); j <= cl; j++) { + abbrevs[a] = current + a += current.charAt(j) + } + } + return abbrevs +} + +function lexSort (a, b) { + return a === b ? 0 : a > b ? 1 : -1 +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/abbrev/package.json b/deps/npm/node_modules/node-gyp/node_modules/abbrev/package.json new file mode 100644 index 00000000000000..e26400445631ad --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/abbrev/package.json @@ -0,0 +1,43 @@ +{ + "name": "abbrev", + "version": "2.0.0", + "description": "Like ruby's abbrev module, but in js", + "author": "GitHub Inc.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/abbrev-js.git" + }, + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.8.0", + "tap": "^16.3.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.8.0" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md b/deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md new file mode 100644 index 00000000000000..8d28acf866d932 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js new file mode 100644 index 00000000000000..ad5a76a4f73f26 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js new file mode 100644 index 00000000000000..5f6192c3cec566 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/read.js @@ -0,0 +1,165 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = size ? { size } : await fs.stat(cpath) + return { stat, cpath, sri } + }) + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + + if (stat.size !== data.length) { + throw sizeError(stat.size, data.length) + } + + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = size ? { size } : await fs.stat(cpath) + return { stat, cpath, sri } + }) + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js new file mode 100644 index 00000000000000..ce58d679e4cb25 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js new file mode 100644 index 00000000000000..e7187abca8788a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/content/write.js @@ -0,0 +1,206 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + this.handleContentP.catch(error => this.emit('error', error)) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js new file mode 100644 index 00000000000000..89c28f2f257d48 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,336 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +const pMap = require('p-map') +const lsStreamConcurrency = 5 + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await pMap(buckets, async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await pMap(subbuckets, async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await pMap(subbucketEntries, async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js new file mode 100644 index 00000000000000..80ec206c7ecaaa --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js new file mode 100644 index 00000000000000..c9b0da5f3a271b --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js new file mode 100644 index 00000000000000..2ecc60912e4563 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const { LRUCache } = require('lru-cache') + +const MEMOIZED = new LRUCache({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js new file mode 100644 index 00000000000000..9fc932d5f6dec5 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js new file mode 100644 index 00000000000000..a94760c7cf2430 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js new file mode 100644 index 00000000000000..8500c1c16a429f --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 00000000000000..445599b5038088 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 00000000000000..0bf5302136ebeb --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js new file mode 100644 index 00000000000000..d7423da1295b68 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime () { + return { startTime: new Date() } +} + +async function markEndTime () { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/cacache/package.json b/deps/npm/node_modules/node-gyp/node_modules/cacache/package.json new file mode 100644 index 00000000000000..6e6219158ed759 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/cacache/package.json @@ -0,0 +1,82 @@ +{ + "name": "cacache", + "version": "18.0.4", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/cacache.git" + }, + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "windowsCI": false, + "version": "4.22.0", + "publish": "true" + }, + "author": "GitHub Inc.", + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/isexe/LICENSE new file mode 100644 index 00000000000000..c925dbe826b670 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2016-2022 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/index.js new file mode 100644 index 00000000000000..cefcb66b5c5434 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/index.js @@ -0,0 +1,46 @@ +"use strict"; +var __createBinding = (this && this.__createBinding) || (Object.create ? (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + var desc = Object.getOwnPropertyDescriptor(m, k); + if (!desc || ("get" in desc ? !m.__esModule : desc.writable || desc.configurable)) { + desc = { enumerable: true, get: function() { return m[k]; } }; + } + Object.defineProperty(o, k2, desc); +}) : (function(o, m, k, k2) { + if (k2 === undefined) k2 = k; + o[k2] = m[k]; +})); +var __setModuleDefault = (this && this.__setModuleDefault) || (Object.create ? (function(o, v) { + Object.defineProperty(o, "default", { enumerable: true, value: v }); +}) : function(o, v) { + o["default"] = v; +}); +var __importStar = (this && this.__importStar) || function (mod) { + if (mod && mod.__esModule) return mod; + var result = {}; + if (mod != null) for (var k in mod) if (k !== "default" && Object.prototype.hasOwnProperty.call(mod, k)) __createBinding(result, mod, k); + __setModuleDefault(result, mod); + return result; +}; +var __exportStar = (this && this.__exportStar) || function(m, exports) { + for (var p in m) if (p !== "default" && !Object.prototype.hasOwnProperty.call(exports, p)) __createBinding(exports, m, p); +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = exports.isexe = exports.posix = exports.win32 = void 0; +const posix = __importStar(require("./posix.js")); +exports.posix = posix; +const win32 = __importStar(require("./win32.js")); +exports.win32 = win32; +__exportStar(require("./options.js"), exports); +const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform; +const impl = platform === 'win32' ? win32 : posix; +/** + * Determine whether a path is executable on the current platform. + */ +exports.isexe = impl.isexe; +/** + * Synchronously determine whether a path is executable on the + * current platform. + */ +exports.sync = impl.sync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/options.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/options.js new file mode 100644 index 00000000000000..0dfad0762cc32c --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/options.js @@ -0,0 +1,3 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/posix.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/posix.js new file mode 100644 index 00000000000000..3bc5e79d7007e9 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/posix.js @@ -0,0 +1,67 @@ +"use strict"; +/** + * This is the Posix implementation of isexe, which uses the file + * mode and uid/gid values. + * + * @module + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = exports.isexe = void 0; +const fs_1 = require("fs"); +const promises_1 = require("fs/promises"); +/** + * Determine whether a path is executable according to the mode and + * current (or specified) user and group IDs. + */ +const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await (0, promises_1.stat)(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.isexe = isexe; +/** + * Synchronously determine whether a path is executable according to + * the mode and current (or specified) user and group IDs. + */ +const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat((0, fs_1.statSync)(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.sync = sync; +const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options); +const checkMode = (stat, options) => { + const myUid = options.uid ?? process.getuid?.(); + const myGroups = options.groups ?? process.getgroups?.() ?? []; + const myGid = options.gid ?? process.getgid?.() ?? myGroups[0]; + if (myUid === undefined || myGid === undefined) { + throw new Error('cannot get uid or gid'); + } + const groups = new Set([myGid, ...myGroups]); + const mod = stat.mode; + const uid = stat.uid; + const gid = stat.gid; + const u = parseInt('100', 8); + const g = parseInt('010', 8); + const o = parseInt('001', 8); + const ug = u | g; + return !!(mod & o || + (mod & g && groups.has(gid)) || + (mod & u && uid === myUid) || + (mod & ug && myUid === 0)); +}; +//# sourceMappingURL=posix.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/win32.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/win32.js new file mode 100644 index 00000000000000..fa7a4d2f7d240d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/cjs/win32.js @@ -0,0 +1,62 @@ +"use strict"; +/** + * This is the Windows implementation of isexe, which uses the file + * extension and PATHEXT setting. + * + * @module + */ +Object.defineProperty(exports, "__esModule", { value: true }); +exports.sync = exports.isexe = void 0; +const fs_1 = require("fs"); +const promises_1 = require("fs/promises"); +/** + * Determine whether a path is executable based on the file extension + * and PATHEXT environment variable (or specified pathExt option) + */ +const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await (0, promises_1.stat)(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.isexe = isexe; +/** + * Synchronously determine whether a path is executable based on the file + * extension and PATHEXT environment variable (or specified pathExt option) + */ +const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat((0, fs_1.statSync)(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +exports.sync = sync; +const checkPathExt = (path, options) => { + const { pathExt = process.env.PATHEXT || '' } = options; + const peSplit = pathExt.split(';'); + if (peSplit.indexOf('') !== -1) { + return true; + } + for (let i = 0; i < peSplit.length; i++) { + const p = peSplit[i].toLowerCase(); + const ext = path.substring(path.length - p.length).toLowerCase(); + if (p && ext === p) { + return true; + } + } + return false; +}; +const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options); +//# sourceMappingURL=win32.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/index.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/index.js new file mode 100644 index 00000000000000..1e309acd7355ec --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/index.js @@ -0,0 +1,16 @@ +import * as posix from './posix.js'; +import * as win32 from './win32.js'; +export * from './options.js'; +export { win32, posix }; +const platform = process.env._ISEXE_TEST_PLATFORM_ || process.platform; +const impl = platform === 'win32' ? win32 : posix; +/** + * Determine whether a path is executable on the current platform. + */ +export const isexe = impl.isexe; +/** + * Synchronously determine whether a path is executable on the + * current platform. + */ +export const sync = impl.sync; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/options.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/options.js new file mode 100644 index 00000000000000..e9ded40bd5b2cd --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/options.js @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=options.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/package.json b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/posix.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/posix.js new file mode 100644 index 00000000000000..c453776c0452f7 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/posix.js @@ -0,0 +1,62 @@ +/** + * This is the Posix implementation of isexe, which uses the file + * mode and uid/gid values. + * + * @module + */ +import { statSync } from 'fs'; +import { stat } from 'fs/promises'; +/** + * Determine whether a path is executable according to the mode and + * current (or specified) user and group IDs. + */ +export const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await stat(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +/** + * Synchronously determine whether a path is executable according to + * the mode and current (or specified) user and group IDs. + */ +export const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(statSync(path), options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +const checkStat = (stat, options) => stat.isFile() && checkMode(stat, options); +const checkMode = (stat, options) => { + const myUid = options.uid ?? process.getuid?.(); + const myGroups = options.groups ?? process.getgroups?.() ?? []; + const myGid = options.gid ?? process.getgid?.() ?? myGroups[0]; + if (myUid === undefined || myGid === undefined) { + throw new Error('cannot get uid or gid'); + } + const groups = new Set([myGid, ...myGroups]); + const mod = stat.mode; + const uid = stat.uid; + const gid = stat.gid; + const u = parseInt('100', 8); + const g = parseInt('010', 8); + const o = parseInt('001', 8); + const ug = u | g; + return !!(mod & o || + (mod & g && groups.has(gid)) || + (mod & u && uid === myUid) || + (mod & ug && myUid === 0)); +}; +//# sourceMappingURL=posix.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/win32.js b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/win32.js new file mode 100644 index 00000000000000..a354ee2a5115c7 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/dist/mjs/win32.js @@ -0,0 +1,57 @@ +/** + * This is the Windows implementation of isexe, which uses the file + * extension and PATHEXT setting. + * + * @module + */ +import { statSync } from 'fs'; +import { stat } from 'fs/promises'; +/** + * Determine whether a path is executable based on the file extension + * and PATHEXT environment variable (or specified pathExt option) + */ +export const isexe = async (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(await stat(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +/** + * Synchronously determine whether a path is executable based on the file + * extension and PATHEXT environment variable (or specified pathExt option) + */ +export const sync = (path, options = {}) => { + const { ignoreErrors = false } = options; + try { + return checkStat(statSync(path), path, options); + } + catch (e) { + const er = e; + if (ignoreErrors || er.code === 'EACCES') + return false; + throw er; + } +}; +const checkPathExt = (path, options) => { + const { pathExt = process.env.PATHEXT || '' } = options; + const peSplit = pathExt.split(';'); + if (peSplit.indexOf('') !== -1) { + return true; + } + for (let i = 0; i < peSplit.length; i++) { + const p = peSplit[i].toLowerCase(); + const ext = path.substring(path.length - p.length).toLowerCase(); + if (p && ext === p) { + return true; + } + } + return false; +}; +const checkStat = (stat, path, options) => stat.isFile() && checkPathExt(path, options); +//# sourceMappingURL=win32.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/node-gyp/node_modules/isexe/package.json b/deps/npm/node_modules/node-gyp/node_modules/isexe/package.json new file mode 100644 index 00000000000000..a0e2cd04bfdbfe --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/isexe/package.json @@ -0,0 +1,96 @@ +{ + "name": "isexe", + "version": "3.1.1", + "description": "Minimal module to check if a file is executable.", + "main": "./dist/cjs/index.js", + "module": "./dist/mjs/index.js", + "types": "./dist/cjs/index.js", + "files": [ + "dist" + ], + "exports": { + ".": { + "import": { + "types": "./dist/mjs/index.d.ts", + "default": "./dist/mjs/index.js" + }, + "require": { + "types": "./dist/cjs/index.d.ts", + "default": "./dist/cjs/index.js" + } + }, + "./posix": { + "import": { + "types": "./dist/mjs/posix.d.ts", + "default": "./dist/mjs/posix.js" + }, + "require": { + "types": "./dist/cjs/posix.d.ts", + "default": "./dist/cjs/posix.js" + } + }, + "./win32": { + "import": { + "types": "./dist/mjs/win32.d.ts", + "default": "./dist/mjs/win32.js" + }, + "require": { + "types": "./dist/cjs/win32.d.ts", + "default": "./dist/cjs/win32.js" + } + }, + "./package.json": "./package.json" + }, + "devDependencies": { + "@types/node": "^20.4.5", + "@types/tap": "^15.0.8", + "c8": "^8.0.1", + "mkdirp": "^0.5.1", + "prettier": "^2.8.8", + "rimraf": "^2.5.0", + "sync-content": "^1.0.2", + "tap": "^16.3.8", + "ts-node": "^10.9.1", + "typedoc": "^0.24.8", + "typescript": "^5.1.6" + }, + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tsc -p tsconfig/cjs.json && tsc -p tsconfig/esm.json && bash ./scripts/fixup.sh", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "c8 tap", + "snap": "c8 tap", + "format": "prettier --write . --loglevel warn --ignore-path ../../.prettierignore --cache", + "typedoc": "typedoc --tsconfig tsconfig/esm.json ./src/*.ts" + }, + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "tap": { + "coverage": false, + "node-arg": [ + "--enable-source-maps", + "--no-warnings", + "--loader", + "ts-node/esm" + ], + "ts": false + }, + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "repository": "https://github.com/isaacs/isexe", + "engines": { + "node": ">=16" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 00000000000000..1808eb2844231c --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright 2017-2022 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 00000000000000..bfcfacbcc95e18 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,471 @@ +const { Request, Response } = require('minipass-fetch') +const { Minipass } = require('minipass') +const MinipassFlush = require('minipass-flush') +const cacache = require('cacache') +const url = require('url') + +const CachingMinipassPipeline = require('../pipeline.js') +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'link', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + + // options on which we must match the request and vary the response + options: { + compress: options.compress != null ? options.compress : request.compress, + }, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) { + metadata.status = response.status + } + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) { + metadata.reqHeaders.host = host + } + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + for (const name of options.cacheAdditionalHeaders) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else { + this.key = cacheKey(request) + } + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // clean out entries with a buggy content-encoding value + if (entry.metadata && + entry.metadata.resHeaders && + entry.metadata.resHeaders['content-encoding'] === null) { + return false + } + + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) { + return !!(entry.metadata && entry.metadata.status) + } + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') { + return + } + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + ...this.entry.metadata.options, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if ( + this.request.method !== 'GET' || + ![200, 301, 308].includes(this.response.status) || + !this.policy.storable() + ) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + integrity: this.options.integrity, + integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }).catch((err) => { + body.emit('error', err) + }) + + body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + // this is always true since if we aren't reusing the one from the remote fetch, we + // are using the one from cacache + body.hasIntegrityEmitter = true + + const onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + // re-emit the integrity and size events on our new response body so they can be reused + cacheStream.on('integrity', i => body.emit('integrity', i)) + cacheStream.on('size', s => body.emit('size', s)) + // stick a flag on here so downstream users will know if they can expect integrity events + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + // eslint-disable-next-line promise/catch-or-return + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + } else { + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + } + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const headers = { ...this.policy.responseHeaders() } + + const onResume = () => { + const cacheStream = cacache.get.stream.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { + await CacheEntry.invalidate(this.request, this.options) + } + body.emit('error', err) + cacheStream.resume() + }) + // emit the integrity and size events based on our metadata so we're consistent + body.emit('integrity', this.entry.integrity) + body.emit('size', Number(headers['content-length'])) + cacheStream.pipe(body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) { + return this.respond(request.method, options, 'stale') + } + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if ( + !hasOwnProperty(metadata.resHeaders, name) && + hasOwnProperty(this.entry.metadata.resHeaders, name) + ) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + } + + for (const name of options.cacheAdditionalHeaders) { + const inMeta = hasOwnProperty(metadata.resHeaders, name) + const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) + const inPolicy = hasOwnProperty(this.policy.response.headers, name) + + // if the header is in the existing entry, but it is not in the metadata + // then we need to write it to the metadata as this will refresh the on-disk cache + if (!inMeta && inEntry) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + // if the header is in the metadata, but not in the policy, then we need to set + // it in the policy so that it's included in the immediate response. future + // responses will load a new cache entry, so we don't need to change that + if (!inPolicy && inMeta) { + this.policy.response.headers[name] = metadata.resHeaders[name] + } + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 00000000000000..67a66573bebe66 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,11 @@ +class NotCachedError extends Error { + constructor (url) { + /* eslint-disable-next-line max-len */ + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 00000000000000..0de49d23fb9336 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,49 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') { + throw new NotCachedError(request.url) + } + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const newEntry = new CacheEntry({ request, response, options }) + return newEntry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') { + return entry.revalidate(request, options) + } + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) { + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + } + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) { + return + } + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 00000000000000..f7684d562b7fae --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 00000000000000..ada3c8600dae92 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + compress: request.compress, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) { + return false + } + + // user explicitly asked not to cache + if (options.cache === 'no-store') { + return false + } + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) { + return false + } + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) { + return false + } + + if (this.request.compress !== _req.compress) { + return false + } + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { + return false + } + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { + return false + } + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { + return false + } + + if (this.options.integrity) { + return ssri.parse(this.options.integrity).match(this.entry.integrity) + } + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 00000000000000..233ba67e165502 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,118 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) { + return false + } + + if (options.redirect === 'manual') { + return false + } + + if (options.redirect === 'error') { + throw new FetchError(`redirect mode is set to error: ${request.url}`, + 'no-redirect', { code: 'ENOREDIRECT' }) + } + + if (!response.headers.has('location')) { + throw new FetchError(`redirect location header missing for: ${request.url}`, + 'no-location', { code: 'EINVALIDREDIRECT' }) + } + + if (request.counter >= request.follow) { + throw new FetchError(`maximum redirect reached at: ${request.url}`, + 'max-redirect', { code: 'EMAXREDIRECT' }) + } + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + /** + * @license + * Copyright (c) 2010-2012 Mikeal Rogers + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an "AS + * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) { + request.headers.delete('authorization') + request.headers.delete('cookie') + } + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if ( + response.status === 303 || + (request.method === 'POST' && [301, 302].includes(response.status)) + ) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) { + await cache.invalidate(request, options) + } + + if (!canFollowRedirect(request, response, options)) { + return response + } + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 00000000000000..2f12e8e1b61131 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,41 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return wrappedFetch(finalUrl, finalOptions) + } + + defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => + makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 00000000000000..f77511279f831d --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,54 @@ +const dns = require('dns') + +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const { strictSSL, ...options } = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + options.rejectUnauthorized = strictSSL !== false + + if (!options.retry) { + options.retry = { retries: 0 } + } else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) { + options.retry = { retries } + } else { + options.retry = { retries: 0 } + } + } else if (typeof options.retry === 'number') { + options.retry = { retries: options.retry } + } else { + options.retry = { retries: 0, ...options.retry } + } + + options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) { + options.cache = 'no-store' + } + } + + options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) { + options.cachePath = options.cacheManager + } + + return options +} + +module.exports = configureOptions diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js new file mode 100644 index 00000000000000..b1d221b2d0ce31 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/pipeline.js @@ -0,0 +1,41 @@ +'use strict' + +const MinipassPipeline = require('minipass-pipeline') + +class CachingMinipassPipeline extends MinipassPipeline { + #events = [] + #data = new Map() + + constructor (opts, ...streams) { + // CRITICAL: do NOT pass the streams to the call to super(), this will start + // the flow of data and potentially cause the events we need to catch to emit + // before we've finished our own setup. instead we call super() with no args, + // finish our setup, and then push the streams into ourselves to start the + // data flow + super() + this.#events = opts.events + + /* istanbul ignore next - coverage disabled because this is pointless to test here */ + if (streams.length) { + this.push(...streams) + } + } + + on (event, handler) { + if (this.#events.includes(event) && this.#data.has(event)) { + return handler(...this.#data.get(event)) + } + + return super.on(event, handler) + } + + emit (event, ...data) { + if (this.#events.includes(event)) { + this.#data.set(event, data) + } + + return super.emit(event, ...data) + } +} + +module.exports = CachingMinipassPipeline diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 00000000000000..8554564074de6e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,131 @@ +const { Minipass } = require('minipass') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') +const { log } = require('proc-log') + +const CachingMinipassPipeline = require('./pipeline.js') +const { getAgent } = require('@npmcli/agent') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + // from @npmcli/agent + 'ECONNECTIONTIMEOUT', + 'EIDLETIMEOUT', + 'ERESPONSETIMEOUT', + 'ETRANSFERTIMEOUT', + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) + // EINVALIDPROXY // invalid protocol from @npmcli/agent + // EINVALIDRESPONSE // invalid status code from @npmcli/agent +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) { + request.headers.set('connection', agent ? 'keep-alive' : 'close') + } + + if (!request.headers.has('user-agent')) { + request.headers.set('user-agent', USER_AGENT) + } + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ + algorithms: _opts.algorithms, + integrity: _opts.integrity, + size: _opts.size, + }) + const pipeline = new CachingMinipassPipeline({ + events: ['integrity', 'size'], + }, res.body, integrityStream) + // we also propagate the integrity and size events out to the pipeline so we can use + // this new response body as an integrityEmitter for cacache + integrityStream.on('integrity', i => pipeline.emit('integrity', i)) + integrityStream.on('size', s => pipeline.emit('size', s)) + res = new fetch.Response(pipeline, res) + // set an explicit flag so we know if our response body will emit integrity and size + res.body.hasIntegrityEmitter = true + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') { + options.onRetry(res) + } + + /* eslint-disable-next-line max-len */ + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`) + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) { + throw err + } + + if (typeof options.onRetry === 'function') { + options.onRetry(err) + } + + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`) + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') { + return err + } + + throw err + }) +} + +module.exports = remoteFetch diff --git a/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json new file mode 100644 index 00000000000000..7adb4d1e7f9719 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/make-fetch-happen/package.json @@ -0,0 +1,75 @@ +{ + "name": "make-fetch-happen", + "version": "13.0.1", + "description": "Opinionated, caching, retrying fetch client", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "eslint": "eslint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/make-fetch-happen.git" + }, + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.4", + "nock": "^13.2.4", + "safe-buffer": "^5.2.1", + "standard-version": "^9.3.2", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true, + "timeout": 60, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.4", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE new file mode 100644 index 00000000000000..3c3410cdc12ee3 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/LICENSE @@ -0,0 +1,28 @@ +The MIT License (MIT) + +Copyright (c) Isaac Z. Schlueter and Contributors +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +--- + +Note: This is a derivative work based on "node-fetch" by David Frank, +modified and distributed under the terms of the MIT license above. +https://github.com/bitinn/node-fetch diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js new file mode 100644 index 00000000000000..b18f643269e375 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/abort-error.js @@ -0,0 +1,17 @@ +'use strict' +class AbortError extends Error { + constructor (message) { + super(message) + this.code = 'FETCH_ABORTED' + this.type = 'aborted' + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'AbortError' + } + + // don't allow name to be overridden, but don't throw either + set name (s) {} +} +module.exports = AbortError diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js new file mode 100644 index 00000000000000..121b1730102e72 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/blob.js @@ -0,0 +1,97 @@ +'use strict' +const { Minipass } = require('minipass') +const TYPE = Symbol('type') +const BUFFER = Symbol('buffer') + +class Blob { + constructor (blobParts, options) { + this[TYPE] = '' + + const buffers = [] + let size = 0 + + if (blobParts) { + const a = blobParts + const length = Number(a.length) + for (let i = 0; i < length; i++) { + const element = a[i] + const buffer = element instanceof Buffer ? element + : ArrayBuffer.isView(element) + ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) + : element instanceof ArrayBuffer ? Buffer.from(element) + : element instanceof Blob ? element[BUFFER] + : typeof element === 'string' ? Buffer.from(element) + : Buffer.from(String(element)) + size += buffer.length + buffers.push(buffer) + } + } + + this[BUFFER] = Buffer.concat(buffers, size) + + const type = options && options.type !== undefined + && String(options.type).toLowerCase() + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type + } + } + + get size () { + return this[BUFFER].length + } + + get type () { + return this[TYPE] + } + + text () { + return Promise.resolve(this[BUFFER].toString()) + } + + arrayBuffer () { + const buf = this[BUFFER] + const off = buf.byteOffset + const len = buf.byteLength + const ab = buf.buffer.slice(off, off + len) + return Promise.resolve(ab) + } + + stream () { + return new Minipass().end(this[BUFFER]) + } + + slice (start, end, type) { + const size = this.size + const relativeStart = start === undefined ? 0 + : start < 0 ? Math.max(size + start, 0) + : Math.min(start, size) + const relativeEnd = end === undefined ? size + : end < 0 ? Math.max(size + end, 0) + : Math.min(end, size) + const span = Math.max(relativeEnd - relativeStart, 0) + + const buffer = this[BUFFER] + const slicedBuffer = buffer.slice( + relativeStart, + relativeStart + span + ) + const blob = new Blob([], { type }) + blob[BUFFER] = slicedBuffer + return blob + } + + get [Symbol.toStringTag] () { + return 'Blob' + } + + static get BUFFER () { + return BUFFER + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, +}) + +module.exports = Blob diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js new file mode 100644 index 00000000000000..62286bd1de0d91 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/body.js @@ -0,0 +1,350 @@ +'use strict' +const { Minipass } = require('minipass') +const MinipassSized = require('minipass-sized') + +const Blob = require('./blob.js') +const { BUFFER } = Blob +const FetchError = require('./fetch-error.js') + +// optional dependency on 'encoding' +let convert +try { + convert = require('encoding').convert +} catch (e) { + // defer error until textConverted is called +} + +const INTERNALS = Symbol('Body internals') +const CONSUME_BODY = Symbol('consumeBody') + +class Body { + constructor (bodyArg, options = {}) { + const { size = 0, timeout = 0 } = options + const body = bodyArg === undefined || bodyArg === null ? null + : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) + : isBlob(bodyArg) ? bodyArg + : Buffer.isBuffer(bodyArg) ? bodyArg + : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' + ? Buffer.from(bodyArg) + : ArrayBuffer.isView(bodyArg) + ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) + : Minipass.isStream(bodyArg) ? bodyArg + : Buffer.from(String(bodyArg)) + + this[INTERNALS] = { + body, + disturbed: false, + error: null, + } + + this.size = size + this.timeout = timeout + + if (Minipass.isStream(body)) { + body.on('error', er => { + const error = er.name === 'AbortError' ? er + : new FetchError(`Invalid response while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + this[INTERNALS].error = error + }) + } + } + + get body () { + return this[INTERNALS].body + } + + get bodyUsed () { + return this[INTERNALS].disturbed + } + + arrayBuffer () { + return this[CONSUME_BODY]().then(buf => + buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) + } + + blob () { + const ct = this.headers && this.headers.get('content-type') || '' + return this[CONSUME_BODY]().then(buf => Object.assign( + new Blob([], { type: ct.toLowerCase() }), + { [BUFFER]: buf } + )) + } + + async json () { + const buf = await this[CONSUME_BODY]() + try { + return JSON.parse(buf.toString()) + } catch (er) { + throw new FetchError( + `invalid json response body at ${this.url} reason: ${er.message}`, + 'invalid-json' + ) + } + } + + text () { + return this[CONSUME_BODY]().then(buf => buf.toString()) + } + + buffer () { + return this[CONSUME_BODY]() + } + + textConverted () { + return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) + } + + [CONSUME_BODY] () { + if (this[INTERNALS].disturbed) { + return Promise.reject(new TypeError(`body used already for: ${ + this.url}`)) + } + + this[INTERNALS].disturbed = true + + if (this[INTERNALS].error) { + return Promise.reject(this[INTERNALS].error) + } + + // body is null + if (this.body === null) { + return Promise.resolve(Buffer.alloc(0)) + } + + if (Buffer.isBuffer(this.body)) { + return Promise.resolve(this.body) + } + + const upstream = isBlob(this.body) ? this.body.stream() : this.body + + /* istanbul ignore if: should never happen */ + if (!Minipass.isStream(upstream)) { + return Promise.resolve(Buffer.alloc(0)) + } + + const stream = this.size && upstream instanceof MinipassSized ? upstream + : !this.size && upstream instanceof Minipass && + !(upstream instanceof MinipassSized) ? upstream + : this.size ? new MinipassSized({ size: this.size }) + : new Minipass() + + // allow timeout on slow response body, but only if the stream is still writable. this + // makes the timeout center on the socket stream from lib/index.js rather than the + // intermediary minipass stream we create to receive the data + const resTimeout = this.timeout && stream.writable ? setTimeout(() => { + stream.emit('error', new FetchError( + `Response timeout while trying to fetch ${ + this.url} (over ${this.timeout}ms)`, 'body-timeout')) + }, this.timeout) : null + + // do not keep the process open just for this timeout, even + // though we expect it'll get cleared eventually. + if (resTimeout && resTimeout.unref) { + resTimeout.unref() + } + + // do the pipe in the promise, because the pipe() can send too much + // data through right away and upset the MP Sized object + return new Promise((resolve) => { + // if the stream is some other kind of stream, then pipe through a MP + // so we can collect it more easily. + if (stream !== upstream) { + upstream.on('error', er => stream.emit('error', er)) + upstream.pipe(stream) + } + resolve() + }).then(() => stream.concat()).then(buf => { + clearTimeout(resTimeout) + return buf + }).catch(er => { + clearTimeout(resTimeout) + // request was aborted, reject with this Error + if (er.name === 'AbortError' || er.name === 'FetchError') { + throw er + } else if (er.name === 'RangeError') { + throw new FetchError(`Could not create Buffer from response body for ${ + this.url}: ${er.message}`, 'system', er) + } else { + // other errors, such as incorrect content-encoding or content-length + throw new FetchError(`Invalid response body while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + } + }) + } + + static clone (instance) { + if (instance.bodyUsed) { + throw new Error('cannot clone body after it is used') + } + + const body = instance.body + + // check that body is a stream and not form-data object + // NB: can't clone the form-data object without having it as a dependency + if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { + // create a dedicated tee stream so that we don't lose data + // potentially sitting in the body stream's buffer by writing it + // immediately to p1 and not having it for p2. + const tee = new Minipass() + const p1 = new Minipass() + const p2 = new Minipass() + tee.on('error', er => { + p1.emit('error', er) + p2.emit('error', er) + }) + body.on('error', er => tee.emit('error', er)) + tee.pipe(p1) + tee.pipe(p2) + body.pipe(tee) + // set instance body to one fork, return the other + instance[INTERNALS].body = p1 + return p2 + } else { + return instance.body + } + } + + static extractContentType (body) { + return body === null || body === undefined ? null + : typeof body === 'string' ? 'text/plain;charset=UTF-8' + : isURLSearchParams(body) + ? 'application/x-www-form-urlencoded;charset=UTF-8' + : isBlob(body) ? body.type || null + : Buffer.isBuffer(body) ? null + : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null + : ArrayBuffer.isView(body) ? null + : typeof body.getBoundary === 'function' + ? `multipart/form-data;boundary=${body.getBoundary()}` + : Minipass.isStream(body) ? null + : 'text/plain;charset=UTF-8' + } + + static getTotalBytes (instance) { + const { body } = instance + return (body === null || body === undefined) ? 0 + : isBlob(body) ? body.size + : Buffer.isBuffer(body) ? body.length + : body && typeof body.getLengthSync === 'function' && ( + // detect form data input from form-data module + body._lengthRetrievers && + /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x + body.hasKnownLength && body.hasKnownLength()) // 2.x + ? body.getLengthSync() + : null + } + + static writeToStream (dest, instance) { + const { body } = instance + + if (body === null || body === undefined) { + dest.end() + } else if (Buffer.isBuffer(body) || typeof body === 'string') { + dest.end(body) + } else { + // body is stream or blob + const stream = isBlob(body) ? body.stream() : body + stream.on('error', er => dest.emit('error', er)).pipe(dest) + } + + return dest + } +} + +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true }, +}) + +const isURLSearchParams = obj => + // Duck-typing as a necessary condition. + (typeof obj !== 'object' || + typeof obj.append !== 'function' || + typeof obj.delete !== 'function' || + typeof obj.get !== 'function' || + typeof obj.getAll !== 'function' || + typeof obj.has !== 'function' || + typeof obj.set !== 'function') ? false + // Brand-checking and more duck-typing as optional condition. + : obj.constructor.name === 'URLSearchParams' || + Object.prototype.toString.call(obj) === '[object URLSearchParams]' || + typeof obj.sort === 'function' + +const isBlob = obj => + typeof obj === 'object' && + typeof obj.arrayBuffer === 'function' && + typeof obj.type === 'string' && + typeof obj.stream === 'function' && + typeof obj.constructor === 'function' && + typeof obj.constructor.name === 'string' && + /^(Blob|File)$/.test(obj.constructor.name) && + /^(Blob|File)$/.test(obj[Symbol.toStringTag]) + +const convertBody = (buffer, headers) => { + /* istanbul ignore if */ + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function') + } + + const ct = headers && headers.get('content-type') + let charset = 'utf-8' + let res + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct) + } + + // no charset in content type, peek at response body for at most 1024 bytes + const str = buffer.slice(0, 1024).toString() + + // html5 + if (!res && str) { + res = / this.expect + ? 'max-size' : type + this.message = message + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'FetchError' + } + + // don't allow name to be overwritten + set name (n) {} + + get [Symbol.toStringTag] () { + return 'FetchError' + } +} +module.exports = FetchError diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js new file mode 100644 index 00000000000000..dd6e854d5ba399 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/headers.js @@ -0,0 +1,267 @@ +'use strict' +const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/ +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ + +const validateName = name => { + name = `${name}` + if (invalidTokenRegex.test(name) || name === '') { + throw new TypeError(`${name} is not a legal HTTP header name`) + } +} + +const validateValue = value => { + value = `${value}` + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`) + } +} + +const find = (map, name) => { + name = name.toLowerCase() + for (const key in map) { + if (key.toLowerCase() === name) { + return key + } + } + return undefined +} + +const MAP = Symbol('map') +class Headers { + constructor (init = undefined) { + this[MAP] = Object.create(null) + if (init instanceof Headers) { + const rawHeaders = init.raw() + const headerNames = Object.keys(rawHeaders) + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value) + } + } + return + } + + // no-op + if (init === undefined || init === null) { + return + } + + if (typeof init === 'object') { + const method = init[Symbol.iterator] + if (method !== null && method !== undefined) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable') + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = [] + for (const pair of init) { + if (typeof pair !== 'object' || + typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable') + } + const arrPair = Array.from(pair) + if (arrPair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple') + } + pairs.push(arrPair) + } + + for (const pair of pairs) { + this.append(pair[0], pair[1]) + } + } else { + // record + for (const key of Object.keys(init)) { + this.append(key, init[key]) + } + } + } else { + throw new TypeError('Provided initializer must be an object') + } + } + + get (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key === undefined) { + return null + } + + return this[MAP][key].join(', ') + } + + forEach (callback, thisArg = undefined) { + let pairs = getHeaders(this) + for (let i = 0; i < pairs.length; i++) { + const [name, value] = pairs[i] + callback.call(thisArg, value, name, this) + // refresh in case the callback added more headers + pairs = getHeaders(this) + } + } + + set (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + this[MAP][key !== undefined ? key : name] = [value] + } + + append (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + if (key !== undefined) { + this[MAP][key].push(value) + } else { + this[MAP][name] = [value] + } + } + + has (name) { + name = `${name}` + validateName(name) + return find(this[MAP], name) !== undefined + } + + delete (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key !== undefined) { + delete this[MAP][key] + } + } + + raw () { + return this[MAP] + } + + keys () { + return new HeadersIterator(this, 'key') + } + + values () { + return new HeadersIterator(this, 'value') + } + + [Symbol.iterator] () { + return new HeadersIterator(this, 'key+value') + } + + entries () { + return new HeadersIterator(this, 'key+value') + } + + get [Symbol.toStringTag] () { + return 'Headers' + } + + static exportNodeCompatibleHeaders (headers) { + const obj = Object.assign(Object.create(null), headers[MAP]) + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host') + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0] + } + + return obj + } + + static createHeadersLenient (obj) { + const headers = new Headers() + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue + } + + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue + } + + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val] + } else { + headers[MAP][name].push(val) + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]] + } + } + return headers + } +} + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true }, +}) + +const getHeaders = (headers, kind = 'key+value') => + Object.keys(headers[MAP]).sort().map( + kind === 'key' ? k => k.toLowerCase() + : kind === 'value' ? k => headers[MAP][k].join(', ') + : k => [k.toLowerCase(), headers[MAP][k].join(', ')] + ) + +const INTERNAL = Symbol('internal') + +class HeadersIterator { + constructor (target, kind) { + this[INTERNAL] = { + target, + kind, + index: 0, + } + } + + get [Symbol.toStringTag] () { + return 'HeadersIterator' + } + + next () { + /* istanbul ignore if: should be impossible */ + if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) { + throw new TypeError('Value of `this` is not a HeadersIterator') + } + + const { target, kind, index } = this[INTERNAL] + const values = getHeaders(target, kind) + const len = values.length + if (index >= len) { + return { + value: undefined, + done: true, + } + } + + this[INTERNAL].index++ + + return { value: values[index], done: false } + } +} + +// manually extend because 'extends' requires a ctor +Object.setPrototypeOf(HeadersIterator.prototype, + Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) + +module.exports = Headers diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js new file mode 100644 index 00000000000000..da402161670e65 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/index.js @@ -0,0 +1,377 @@ +'use strict' +const { URL } = require('url') +const http = require('http') +const https = require('https') +const zlib = require('minizlib') +const { Minipass } = require('minipass') + +const Body = require('./body.js') +const { writeToStream, getTotalBytes } = Body +const Response = require('./response.js') +const Headers = require('./headers.js') +const { createHeadersLenient } = Headers +const Request = require('./request.js') +const { getNodeRequestOptions } = Request +const FetchError = require('./fetch-error.js') +const AbortError = require('./abort-error.js') + +// XXX this should really be split up and unit-ized for easier testing +// and better DRY implementation of data/http request aborting +const fetch = async (url, opts) => { + if (/^data:/.test(url)) { + const request = new Request(url, opts) + // delay 1 promise tick so that the consumer can abort right away + return Promise.resolve().then(() => new Promise((resolve, reject) => { + let type, data + try { + const { pathname, search } = new URL(url) + const split = pathname.split(',') + if (split.length < 2) { + throw new Error('invalid data: URI') + } + const mime = split.shift() + const base64 = /;base64$/.test(mime) + type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime + const rawData = decodeURIComponent(split.join(',') + search) + data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData) + } catch (er) { + return reject(new FetchError(`[${request.method}] ${ + request.url} invalid URL, ${er.message}`, 'system', er)) + } + + const { signal } = request + if (signal && signal.aborted) { + return reject(new AbortError('The user aborted a request.')) + } + + const headers = { 'Content-Length': data.length } + if (type) { + headers['Content-Type'] = type + } + return resolve(new Response(data, { headers })) + })) + } + + return new Promise((resolve, reject) => { + // build request object + const request = new Request(url, opts) + let options + try { + options = getNodeRequestOptions(request) + } catch (er) { + return reject(er) + } + + const send = (options.protocol === 'https:' ? https : http).request + const { signal } = request + let response = null + const abort = () => { + const error = new AbortError('The user aborted a request.') + reject(error) + if (Minipass.isStream(request.body) && + typeof request.body.destroy === 'function') { + request.body.destroy(error) + } + if (response && response.body) { + response.body.emit('error', error) + } + } + + if (signal && signal.aborted) { + return abort() + } + + const abortAndFinalize = () => { + abort() + finalize() + } + + const finalize = () => { + req.abort() + if (signal) { + signal.removeEventListener('abort', abortAndFinalize) + } + clearTimeout(reqTimeout) + } + + // send request + const req = send(options) + + if (signal) { + signal.addEventListener('abort', abortAndFinalize) + } + + let reqTimeout = null + if (request.timeout) { + req.once('socket', () => { + reqTimeout = setTimeout(() => { + reject(new FetchError(`network timeout at: ${ + request.url}`, 'request-timeout')) + finalize() + }, request.timeout) + }) + } + + req.on('error', er => { + // if a 'response' event is emitted before the 'error' event, then by the + // time this handler is run it's too late to reject the Promise for the + // response. instead, we forward the error event to the response stream + // so that the error will surface to the user when they try to consume + // the body. this is done as a side effect of aborting the request except + // for in windows, where we must forward the event manually, otherwise + // there is no longer a ref'd socket attached to the request and the + // stream never ends so the event loop runs out of work and the process + // exits without warning. + // coverage skipped here due to the difficulty in testing + // istanbul ignore next + if (req.res) { + req.res.emit('error', er) + } + reject(new FetchError(`request to ${request.url} failed, reason: ${ + er.message}`, 'system', er)) + finalize() + }) + + req.on('response', res => { + clearTimeout(reqTimeout) + + const headers = createHeadersLenient(res.headers) + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location') + + // HTTP fetch step 5.3 + let locationURL = null + try { + locationURL = location === null ? null : new URL(location, request.url).toString() + } catch { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + /* eslint-disable-next-line max-len */ + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')) + finalize() + return + } + } + + // HTTP fetch step 5.5 + if (request.redirect === 'error') { + reject(new FetchError('uri requested responds with a redirect, ' + + `redirect mode is set to error: ${request.url}`, 'no-redirect')) + finalize() + return + } else if (request.redirect === 'manual') { + // node-fetch-specific step: make manual redirect a bit easier to + // use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL) + } catch (err) { + /* istanbul ignore next: nodejs server prevent invalid + response headers, we can't test this through normal + request */ + reject(err) + } + } + } else if (request.redirect === 'follow' && locationURL !== null) { + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${ + request.url}`, 'max-redirect')) + finalize() + return + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && + request.body && + getTotalBytes(request) === null) { + reject(new FetchError( + 'Cannot follow redirect with body being a readable stream', + 'unsupported-redirect' + )) + finalize() + return + } + + // Update host due to redirection + request.headers.set('host', (new URL(locationURL)).host) + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + } + + // if the redirect is to a new hostname, strip the authorization and cookie headers + const parsedOriginal = new URL(request.url) + const parsedRedirect = new URL(locationURL) + if (parsedOriginal.hostname !== parsedRedirect.hostname) { + requestOpts.headers.delete('authorization') + requestOpts.headers.delete('cookie') + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || ( + (res.statusCode === 301 || res.statusCode === 302) && + request.method === 'POST' + )) { + requestOpts.method = 'GET' + requestOpts.body = undefined + requestOpts.headers.delete('content-length') + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))) + finalize() + return + } + } // end if(isRedirect) + + // prepare response + res.once('end', () => + signal && signal.removeEventListener('abort', abortAndFinalize)) + + const body = new Minipass() + // if an error occurs, either on the response stream itself, on one of the + // decoder streams, or a response length timeout from the Body class, we + // forward the error through to our internal body stream. If we see an + // error event on that, we call finalize to abort the request and ensure + // we don't leave a socket believing a request is in flight. + // this is difficult to test, so lacks specific coverage. + body.on('error', finalize) + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) + res.on('data', (chunk) => body.write(chunk)) + res.on('end', () => body.end()) + + const responseOptions = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter, + trailer: new Promise(resolveTrailer => + res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))), + } + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding') + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || + request.method === 'HEAD' || + codings === null || + res.statusCode === 204 || + res.statusCode === 304) { + response = new Response(body, responseOptions) + resolve(response) + return + } + + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH, + } + + // for gzip + if (codings === 'gzip' || codings === 'x-gzip') { + const unzip = new zlib.Gunzip(zlibOptions) + response = new Response( + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), + responseOptions + ) + resolve(response) + return + } + + // for deflate + if (codings === 'deflate' || codings === 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new Minipass()) + raw.once('data', chunk => { + // see http://stackoverflow.com/questions/37519828 + const decoder = (chunk[0] & 0x0F) === 0x08 + ? new zlib.Inflate() + : new zlib.InflateRaw() + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + }) + return + } + + // for br + if (codings === 'br') { + // ignoring coverage so tests don't have to fake support (or lack of) for brotli + // istanbul ignore next + try { + var decoder = new zlib.BrotliDecompress() + } catch (err) { + reject(err) + finalize() + return + } + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + return + } + + // otherwise, use response as-is + response = new Response(body, responseOptions) + resolve(response) + }) + + writeToStream(req, request) + }) +} + +module.exports = fetch + +fetch.isRedirect = code => + code === 301 || + code === 302 || + code === 303 || + code === 307 || + code === 308 + +fetch.Headers = Headers +fetch.Request = Request +fetch.Response = Response +fetch.FetchError = FetchError +fetch.AbortError = AbortError diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js new file mode 100644 index 00000000000000..054439e6699107 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/request.js @@ -0,0 +1,282 @@ +'use strict' +const { URL } = require('url') +const { Minipass } = require('minipass') +const Headers = require('./headers.js') +const { exportNodeCompatibleHeaders } = Headers +const Body = require('./body.js') +const { clone, extractContentType, getTotalBytes } = Body + +const version = require('../package.json').version +const defaultUserAgent = + `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` + +const INTERNALS = Symbol('Request internals') + +const isRequest = input => + typeof input === 'object' && typeof input[INTERNALS] === 'object' + +const isAbortSignal = signal => { + const proto = ( + signal + && typeof signal === 'object' + && Object.getPrototypeOf(signal) + ) + return !!(proto && proto.constructor.name === 'AbortSignal') +} + +class Request extends Body { + constructor (input, init = {}) { + const parsedURL = isRequest(input) ? new URL(input.url) + : input && input.href ? new URL(input.href) + : new URL(`${input}`) + + if (isRequest(input)) { + init = { ...input[INTERNALS], ...init } + } else if (!input || typeof input === 'string') { + input = {} + } + + const method = (init.method || input.method || 'GET').toUpperCase() + const isGETHEAD = method === 'GET' || method === 'HEAD' + + if ((init.body !== null && init.body !== undefined || + isRequest(input) && input.body !== null) && isGETHEAD) { + throw new TypeError('Request with GET/HEAD method cannot have body') + } + + const inputBody = init.body !== null && init.body !== undefined ? init.body + : isRequest(input) && input.body !== null ? clone(input) + : null + + super(inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0, + }) + + const headers = new Headers(init.headers || input.headers || {}) + + if (inputBody !== null && inputBody !== undefined && + !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + const signal = 'signal' in init ? init.signal + : null + + if (signal !== null && signal !== undefined && !isAbortSignal(signal)) { + throw new TypeError('Expected signal must be an instanceof AbortSignal') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = init + + this[INTERNALS] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow + : input.follow !== undefined ? input.follow + : 20 + this.compress = init.compress !== undefined ? init.compress + : input.compress !== undefined ? input.compress + : true + this.counter = init.counter || input.counter || 0 + this.agent = init.agent || input.agent + } + + get method () { + return this[INTERNALS].method + } + + get url () { + return this[INTERNALS].parsedURL.toString() + } + + get headers () { + return this[INTERNALS].headers + } + + get redirect () { + return this[INTERNALS].redirect + } + + get signal () { + return this[INTERNALS].signal + } + + clone () { + return new Request(this) + } + + get [Symbol.toStringTag] () { + return 'Request' + } + + static getNodeRequestOptions (request) { + const parsedURL = request[INTERNALS].parsedURL + const headers = new Headers(request[INTERNALS].headers) + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*') + } + + // Basic fetch + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported') + } + + if (request.signal && + Minipass.isStream(request.body) && + typeof request.body.destroy !== 'function') { + throw new Error( + 'Cancellation of streamed requests with AbortSignal is not supported') + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + const contentLengthValue = + (request.body === null || request.body === undefined) && + /^(POST|PUT)$/i.test(request.method) ? '0' + : request.body !== null && request.body !== undefined + ? getTotalBytes(request) + : null + + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue + '') + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', defaultUserAgent) + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate') + } + + const agent = typeof request.agent === 'function' + ? request.agent(parsedURL) + : request.agent + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = request[INTERNALS] + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + // we cannot spread parsedURL directly, so we have to read each property one-by-one + // and map them to the equivalent https?.request() method options + const urlProps = { + auth: parsedURL.username || parsedURL.password + ? `${parsedURL.username}:${parsedURL.password}` + : '', + host: parsedURL.host, + hostname: parsedURL.hostname, + path: `${parsedURL.pathname}${parsedURL.search}`, + port: parsedURL.port, + protocol: parsedURL.protocol, + } + + return { + ...urlProps, + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + timeout: request.timeout, + } + } +} + +module.exports = Request + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true }, +}) diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js new file mode 100644 index 00000000000000..54cb52db3594a7 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/lib/response.js @@ -0,0 +1,90 @@ +'use strict' +const http = require('http') +const { STATUS_CODES } = http + +const Headers = require('./headers.js') +const Body = require('./body.js') +const { clone, extractContentType } = Body + +const INTERNALS = Symbol('Response internals') + +class Response extends Body { + constructor (body = null, opts = {}) { + super(body, opts) + + const status = opts.status || 200 + const headers = new Headers(opts.headers) + + if (body !== null && body !== undefined && !headers.has('Content-Type')) { + const contentType = extractContentType(body) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + this[INTERNALS] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter, + trailer: Promise.resolve(opts.trailer || new Headers()), + } + } + + get trailer () { + return this[INTERNALS].trailer + } + + get url () { + return this[INTERNALS].url || '' + } + + get status () { + return this[INTERNALS].status + } + + get ok () { + return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 + } + + get redirected () { + return this[INTERNALS].counter > 0 + } + + get statusText () { + return this[INTERNALS].statusText + } + + get headers () { + return this[INTERNALS].headers + } + + clone () { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected, + trailer: this.trailer, + }) + } + + get [Symbol.toStringTag] () { + return 'Response' + } +} + +module.exports = Response + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true }, +}) diff --git a/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/package.json new file mode 100644 index 00000000000000..d491a7fba126d0 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/minipass-fetch/package.json @@ -0,0 +1,69 @@ +{ + "name": "minipass-fetch", + "version": "3.0.5", + "description": "An implementation of window.fetch in Node.js using Minipass streams", + "license": "MIT", + "main": "lib/index.js", + "scripts": { + "test:tls-fixtures": "./test/fixtures/tls/setup.sh", + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "tap": { + "coverage-map": "map.js", + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "@ungap/url-search-params": "^0.2.2", + "abort-controller": "^3.0.0", + "abortcontroller-polyfill": "~1.7.3", + "encoding": "^0.1.13", + "form-data": "^4.0.0", + "nock": "^13.2.4", + "parted": "^0.1.1", + "string-to-arraybuffer": "^1.0.2", + "tap": "^16.0.0" + }, + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/minipass-fetch.git" + }, + "keywords": [ + "fetch", + "minipass", + "node-fetch", + "window.fetch" + ], + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/nopt/LICENSE new file mode 100644 index 00000000000000..19129e315fe593 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/README.md b/deps/npm/node_modules/node-gyp/node_modules/nopt/README.md new file mode 100644 index 00000000000000..a99531c04655fe --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/README.md @@ -0,0 +1,213 @@ +If you want to write an option parser, and have it be good, there are +two ways to do it. The Right Way, and the Wrong Way. + +The Wrong Way is to sit down and write an option parser. We've all done +that. + +The Right Way is to write some complex configurable program with so many +options that you hit the limit of your frustration just trying to +manage them all, and defer it with duct-tape solutions until you see +exactly to the core of the problem, and finally snap and write an +awesome option parser. + +If you want to write an option parser, don't write an option parser. +Write a package manager, or a source control system, or a service +restarter, or an operating system. You probably won't end up with a +good one of those, but if you don't give up, and you are relentless and +diligent enough in your procrastination, you may just end up with a very +nice option parser. + +## USAGE + +```javascript +// my-program.js +var nopt = require("nopt") + , Stream = require("stream").Stream + , path = require("path") + , knownOpts = { "foo" : [String, null] + , "bar" : [Stream, Number] + , "baz" : path + , "bloo" : [ "big", "medium", "small" ] + , "flag" : Boolean + , "pick" : Boolean + , "many1" : [String, Array] + , "many2" : [path, Array] + } + , shortHands = { "foofoo" : ["--foo", "Mr. Foo"] + , "b7" : ["--bar", "7"] + , "m" : ["--bloo", "medium"] + , "p" : ["--pick"] + , "f" : ["--flag"] + } + // everything is optional. + // knownOpts and shorthands default to {} + // arg list defaults to process.argv + // slice defaults to 2 + , parsed = nopt(knownOpts, shortHands, process.argv, 2) +console.log(parsed) +``` + +This would give you support for any of the following: + +```console +$ node my-program.js --foo "blerp" --no-flag +{ "foo" : "blerp", "flag" : false } + +$ node my-program.js ---bar 7 --foo "Mr. Hand" --flag +{ bar: 7, foo: "Mr. Hand", flag: true } + +$ node my-program.js --foo "blerp" -f -----p +{ foo: "blerp", flag: true, pick: true } + +$ node my-program.js -fp --foofoo +{ foo: "Mr. Foo", flag: true, pick: true } + +$ node my-program.js --foofoo -- -fp # -- stops the flag parsing. +{ foo: "Mr. Foo", argv: { remain: ["-fp"] } } + +$ node my-program.js --blatzk -fp # unknown opts are ok. +{ blatzk: true, flag: true, pick: true } + +$ node my-program.js --blatzk=1000 -fp # but you need to use = if they have a value +{ blatzk: 1000, flag: true, pick: true } + +$ node my-program.js --no-blatzk -fp # unless they start with "no-" +{ blatzk: false, flag: true, pick: true } + +$ node my-program.js --baz b/a/z # known paths are resolved. +{ baz: "/Users/isaacs/b/a/z" } + +# if Array is one of the types, then it can take many +# values, and will always be an array. The other types provided +# specify what types are allowed in the list. + +$ node my-program.js --many1 5 --many1 null --many1 foo +{ many1: ["5", "null", "foo"] } + +$ node my-program.js --many2 foo --many2 bar +{ many2: ["/path/to/foo", "path/to/bar"] } +``` + +Read the tests at the bottom of `lib/nopt.js` for more examples of +what this puppy can do. + +## Types + +The following types are supported, and defined on `nopt.typeDefs` + +* String: A normal string. No parsing is done. +* path: A file system path. Gets resolved against cwd if not absolute. +* url: A url. If it doesn't parse, it isn't accepted. +* Number: Must be numeric. +* Date: Must parse as a date. If it does, and `Date` is one of the options, + then it will return a Date object, not a string. +* Boolean: Must be either `true` or `false`. If an option is a boolean, + then it does not need a value, and its presence will imply `true` as + the value. To negate boolean flags, do `--no-whatever` or `--whatever + false` +* NaN: Means that the option is strictly not allowed. Any value will + fail. +* Stream: An object matching the "Stream" class in node. Valuable + for use when validating programmatically. (npm uses this to let you + supply any WriteStream on the `outfd` and `logfd` config options.) +* Array: If `Array` is specified as one of the types, then the value + will be parsed as a list of options. This means that multiple values + can be specified, and that the value will always be an array. + +If a type is an array of values not on this list, then those are +considered valid values. For instance, in the example above, the +`--bloo` option can only be one of `"big"`, `"medium"`, or `"small"`, +and any other value will be rejected. + +When parsing unknown fields, `"true"`, `"false"`, and `"null"` will be +interpreted as their JavaScript equivalents. + +You can also mix types and values, or multiple types, in a list. For +instance `{ blah: [Number, null] }` would allow a value to be set to +either a Number or null. When types are ordered, this implies a +preference, and the first type that can be used to properly interpret +the value will be used. + +To define a new type, add it to `nopt.typeDefs`. Each item in that +hash is an object with a `type` member and a `validate` method. The +`type` member is an object that matches what goes in the type list. The +`validate` method is a function that gets called with `validate(data, +key, val)`. Validate methods should assign `data[key]` to the valid +value of `val` if it can be handled properly, or return boolean +`false` if it cannot. + +You can also call `nopt.clean(data, types, typeDefs)` to clean up a +config object and remove its invalid properties. + +## Error Handling + +By default, nopt outputs a warning to standard error when invalid values for +known options are found. You can change this behavior by assigning a method +to `nopt.invalidHandler`. This method will be called with +the offending `nopt.invalidHandler(key, val, types)`. + +If no `nopt.invalidHandler` is assigned, then it will console.error +its whining. If it is assigned to boolean `false` then the warning is +suppressed. + +## Abbreviations + +Yes, they are supported. If you define options like this: + +```javascript +{ "foolhardyelephants" : Boolean +, "pileofmonkeys" : Boolean } +``` + +Then this will work: + +```bash +node program.js --foolhar --pil +node program.js --no-f --pileofmon +# etc. +``` + +## Shorthands + +Shorthands are a hash of shorter option names to a snippet of args that +they expand to. + +If multiple one-character shorthands are all combined, and the +combination does not unambiguously match any other option or shorthand, +then they will be broken up into their constituent parts. For example: + +```json +{ "s" : ["--loglevel", "silent"] +, "g" : "--global" +, "f" : "--force" +, "p" : "--parseable" +, "l" : "--long" +} +``` + +```bash +npm ls -sgflp +# just like doing this: +npm ls --loglevel silent --global --force --long --parseable +``` + +## The Rest of the args + +The config object returned by nopt is given a special member called +`argv`, which is an object with the following fields: + +* `remain`: The remaining args after all the parsing has occurred. +* `original`: The args as they originally appeared. +* `cooked`: The args after flags and shorthands are expanded. + +## Slicing + +Node programs are called with more or less the exact argv as it appears +in C land, after the v8 and node-specific options have been plucked off. +As such, `argv[0]` is always `node` and `argv[1]` is always the +JavaScript program being run. + +That's usually not very useful to you. So they're sliced off by +default. If you want them, then you can pass in `0` as the last +argument, or any other number that you'd like to slice off the start of +the list. diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/bin/nopt.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/bin/nopt.js new file mode 100755 index 00000000000000..6ed2082064b5ea --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/bin/nopt.js @@ -0,0 +1,29 @@ +#!/usr/bin/env node +const nopt = require('../lib/nopt') +const path = require('path') +console.log('parsed', nopt({ + num: Number, + bool: Boolean, + help: Boolean, + list: Array, + 'num-list': [Number, Array], + 'str-list': [String, Array], + 'bool-list': [Boolean, Array], + str: String, + clear: Boolean, + config: Boolean, + length: Number, + file: path, +}, { + s: ['--str', 'astring'], + b: ['--bool'], + nb: ['--no-bool'], + tft: ['--bool-list', '--no-bool-list', '--bool-list', 'true'], + '?': ['--help'], + h: ['--help'], + H: ['--help'], + n: ['--num', '125'], + c: ['--config'], + l: ['--length'], + f: ['--file'], +}, process.argv, 2)) diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/debug.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/debug.js new file mode 100644 index 00000000000000..544ab382ca85c0 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/debug.js @@ -0,0 +1,5 @@ +/* istanbul ignore next */ +module.exports = process.env.DEBUG_NOPT || process.env.NOPT_DEBUG + // eslint-disable-next-line no-console + ? (...a) => console.error(...a) + : () => {} diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt-lib.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt-lib.js new file mode 100644 index 00000000000000..d3d1de0255ba9b --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt-lib.js @@ -0,0 +1,479 @@ +const abbrev = require('abbrev') +const debug = require('./debug') +const defaultTypeDefs = require('./type-defs') + +const hasOwn = (o, k) => Object.prototype.hasOwnProperty.call(o, k) + +const getType = (k, { types, dynamicTypes }) => { + let hasType = hasOwn(types, k) + let type = types[k] + if (!hasType && typeof dynamicTypes === 'function') { + const matchedType = dynamicTypes(k) + if (matchedType !== undefined) { + type = matchedType + hasType = true + } + } + return [hasType, type] +} + +const isTypeDef = (type, def) => def && type === def +const hasTypeDef = (type, def) => def && type.indexOf(def) !== -1 +const doesNotHaveTypeDef = (type, def) => def && !hasTypeDef(type, def) + +function nopt (args, { + types, + shorthands, + typeDefs, + invalidHandler, + typeDefault, + dynamicTypes, +} = {}) { + debug(types, shorthands, args, typeDefs) + + const data = {} + const argv = { + remain: [], + cooked: args, + original: args.slice(0), + } + + parse(args, data, argv.remain, { typeDefs, types, dynamicTypes, shorthands }) + + // now data is full + clean(data, { types, dynamicTypes, typeDefs, invalidHandler, typeDefault }) + data.argv = argv + + Object.defineProperty(data.argv, 'toString', { + value: function () { + return this.original.map(JSON.stringify).join(' ') + }, + enumerable: false, + }) + + return data +} + +function clean (data, { + types = {}, + typeDefs = {}, + dynamicTypes, + invalidHandler, + typeDefault, +} = {}) { + const StringType = typeDefs.String?.type + const NumberType = typeDefs.Number?.type + const ArrayType = typeDefs.Array?.type + const BooleanType = typeDefs.Boolean?.type + const DateType = typeDefs.Date?.type + + const hasTypeDefault = typeof typeDefault !== 'undefined' + if (!hasTypeDefault) { + typeDefault = [false, true, null] + if (StringType) { + typeDefault.push(StringType) + } + if (ArrayType) { + typeDefault.push(ArrayType) + } + } + + const remove = {} + + Object.keys(data).forEach((k) => { + if (k === 'argv') { + return + } + let val = data[k] + debug('val=%j', val) + const isArray = Array.isArray(val) + let [hasType, rawType] = getType(k, { types, dynamicTypes }) + let type = rawType + if (!isArray) { + val = [val] + } + if (!type) { + type = typeDefault + } + if (isTypeDef(type, ArrayType)) { + type = typeDefault.concat(ArrayType) + } + if (!Array.isArray(type)) { + type = [type] + } + + debug('val=%j', val) + debug('types=', type) + val = val.map((v) => { + // if it's an unknown value, then parse false/true/null/numbers/dates + if (typeof v === 'string') { + debug('string %j', v) + v = v.trim() + if ((v === 'null' && ~type.indexOf(null)) + || (v === 'true' && + (~type.indexOf(true) || hasTypeDef(type, BooleanType))) + || (v === 'false' && + (~type.indexOf(false) || hasTypeDef(type, BooleanType)))) { + v = JSON.parse(v) + debug('jsonable %j', v) + } else if (hasTypeDef(type, NumberType) && !isNaN(v)) { + debug('convert to number', v) + v = +v + } else if (hasTypeDef(type, DateType) && !isNaN(Date.parse(v))) { + debug('convert to date', v) + v = new Date(v) + } + } + + if (!hasType) { + if (!hasTypeDefault) { + return v + } + // if the default type has been passed in then we want to validate the + // unknown data key instead of bailing out earlier. we also set the raw + // type which is passed to the invalid handler so that it can be + // determined if during validation if it is unknown vs invalid + rawType = typeDefault + } + + // allow `--no-blah` to set 'blah' to null if null is allowed + if (v === false && ~type.indexOf(null) && + !(~type.indexOf(false) || hasTypeDef(type, BooleanType))) { + v = null + } + + const d = {} + d[k] = v + debug('prevalidated val', d, v, rawType) + if (!validate(d, k, v, rawType, { typeDefs })) { + if (invalidHandler) { + invalidHandler(k, v, rawType, data) + } else if (invalidHandler !== false) { + debug('invalid: ' + k + '=' + v, rawType) + } + return remove + } + debug('validated v', d, v, rawType) + return d[k] + }).filter((v) => v !== remove) + + // if we allow Array specifically, then an empty array is how we + // express 'no value here', not null. Allow it. + if (!val.length && doesNotHaveTypeDef(type, ArrayType)) { + debug('VAL HAS NO LENGTH, DELETE IT', val, k, type.indexOf(ArrayType)) + delete data[k] + } else if (isArray) { + debug(isArray, data[k], val) + data[k] = val + } else { + data[k] = val[0] + } + + debug('k=%s val=%j', k, val, data[k]) + }) +} + +function validate (data, k, val, type, { typeDefs } = {}) { + const ArrayType = typeDefs?.Array?.type + // arrays are lists of types. + if (Array.isArray(type)) { + for (let i = 0, l = type.length; i < l; i++) { + if (isTypeDef(type[i], ArrayType)) { + continue + } + if (validate(data, k, val, type[i], { typeDefs })) { + return true + } + } + delete data[k] + return false + } + + // an array of anything? + if (isTypeDef(type, ArrayType)) { + return true + } + + // Original comment: + // NaN is poisonous. Means that something is not allowed. + // New comment: Changing this to an isNaN check breaks a lot of tests. + // Something is being assumed here that is not actually what happens in + // practice. Fixing it is outside the scope of getting linting to pass in + // this repo. Leaving as-is for now. + /* eslint-disable-next-line no-self-compare */ + if (type !== type) { + debug('Poison NaN', k, val, type) + delete data[k] + return false + } + + // explicit list of values + if (val === type) { + debug('Explicitly allowed %j', val) + data[k] = val + return true + } + + // now go through the list of typeDefs, validate against each one. + let ok = false + const types = Object.keys(typeDefs) + for (let i = 0, l = types.length; i < l; i++) { + debug('test type %j %j %j', k, val, types[i]) + const t = typeDefs[types[i]] + if (t && ( + (type && type.name && t.type && t.type.name) ? + (type.name === t.type.name) : + (type === t.type) + )) { + const d = {} + ok = t.validate(d, k, val) !== false + val = d[k] + if (ok) { + data[k] = val + break + } + } + } + debug('OK? %j (%j %j %j)', ok, k, val, types[types.length - 1]) + + if (!ok) { + delete data[k] + } + return ok +} + +function parse (args, data, remain, { + types = {}, + typeDefs = {}, + shorthands = {}, + dynamicTypes, +} = {}) { + const StringType = typeDefs.String?.type + const NumberType = typeDefs.Number?.type + const ArrayType = typeDefs.Array?.type + const BooleanType = typeDefs.Boolean?.type + + debug('parse', args, data, remain) + + const abbrevs = abbrev(Object.keys(types)) + debug('abbrevs=%j', abbrevs) + const shortAbbr = abbrev(Object.keys(shorthands)) + + for (let i = 0; i < args.length; i++) { + let arg = args[i] + debug('arg', arg) + + if (arg.match(/^-{2,}$/)) { + // done with keys. + // the rest are args. + remain.push.apply(remain, args.slice(i + 1)) + args[i] = '--' + break + } + let hadEq = false + if (arg.charAt(0) === '-' && arg.length > 1) { + const at = arg.indexOf('=') + if (at > -1) { + hadEq = true + const v = arg.slice(at + 1) + arg = arg.slice(0, at) + args.splice(i, 1, arg, v) + } + + // see if it's a shorthand + // if so, splice and back up to re-parse it. + const shRes = resolveShort(arg, shortAbbr, abbrevs, { shorthands }) + debug('arg=%j shRes=%j', arg, shRes) + if (shRes) { + args.splice.apply(args, [i, 1].concat(shRes)) + if (arg !== shRes[0]) { + i-- + continue + } + } + arg = arg.replace(/^-+/, '') + let no = null + while (arg.toLowerCase().indexOf('no-') === 0) { + no = !no + arg = arg.slice(3) + } + + if (abbrevs[arg]) { + arg = abbrevs[arg] + } + + let [hasType, argType] = getType(arg, { types, dynamicTypes }) + let isTypeArray = Array.isArray(argType) + if (isTypeArray && argType.length === 1) { + isTypeArray = false + argType = argType[0] + } + + let isArray = isTypeDef(argType, ArrayType) || + isTypeArray && hasTypeDef(argType, ArrayType) + + // allow unknown things to be arrays if specified multiple times. + if (!hasType && hasOwn(data, arg)) { + if (!Array.isArray(data[arg])) { + data[arg] = [data[arg]] + } + isArray = true + } + + let val + let la = args[i + 1] + + const isBool = typeof no === 'boolean' || + isTypeDef(argType, BooleanType) || + isTypeArray && hasTypeDef(argType, BooleanType) || + (typeof argType === 'undefined' && !hadEq) || + (la === 'false' && + (argType === null || + isTypeArray && ~argType.indexOf(null))) + + if (isBool) { + // just set and move along + val = !no + // however, also support --bool true or --bool false + if (la === 'true' || la === 'false') { + val = JSON.parse(la) + la = null + if (no) { + val = !val + } + i++ + } + + // also support "foo":[Boolean, "bar"] and "--foo bar" + if (isTypeArray && la) { + if (~argType.indexOf(la)) { + // an explicit type + val = la + i++ + } else if (la === 'null' && ~argType.indexOf(null)) { + // null allowed + val = null + i++ + } else if (!la.match(/^-{2,}[^-]/) && + !isNaN(la) && + hasTypeDef(argType, NumberType)) { + // number + val = +la + i++ + } else if (!la.match(/^-[^-]/) && hasTypeDef(argType, StringType)) { + // string + val = la + i++ + } + } + + if (isArray) { + (data[arg] = data[arg] || []).push(val) + } else { + data[arg] = val + } + + continue + } + + if (isTypeDef(argType, StringType)) { + if (la === undefined) { + la = '' + } else if (la.match(/^-{1,2}[^-]+/)) { + la = '' + i-- + } + } + + if (la && la.match(/^-{2,}$/)) { + la = undefined + i-- + } + + val = la === undefined ? true : la + if (isArray) { + (data[arg] = data[arg] || []).push(val) + } else { + data[arg] = val + } + + i++ + continue + } + remain.push(arg) + } +} + +const SINGLES = Symbol('singles') +const singleCharacters = (arg, shorthands) => { + let singles = shorthands[SINGLES] + if (!singles) { + singles = Object.keys(shorthands).filter((s) => s.length === 1).reduce((l, r) => { + l[r] = true + return l + }, {}) + shorthands[SINGLES] = singles + debug('shorthand singles', singles) + } + const chrs = arg.split('').filter((c) => singles[c]) + return chrs.join('') === arg ? chrs : null +} + +function resolveShort (arg, ...rest) { + const { types = {}, shorthands = {} } = rest.length ? rest.pop() : {} + const shortAbbr = rest[0] ?? abbrev(Object.keys(shorthands)) + const abbrevs = rest[1] ?? abbrev(Object.keys(types)) + + // handle single-char shorthands glommed together, like + // npm ls -glp, but only if there is one dash, and only if + // all of the chars are single-char shorthands, and it's + // not a match to some other abbrev. + arg = arg.replace(/^-+/, '') + + // if it's an exact known option, then don't go any further + if (abbrevs[arg] === arg) { + return null + } + + // if it's an exact known shortopt, same deal + if (shorthands[arg]) { + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) { + shorthands[arg] = shorthands[arg].split(/\s+/) + } + + return shorthands[arg] + } + + // first check to see if this arg is a set of single-char shorthands + const chrs = singleCharacters(arg, shorthands) + if (chrs) { + return chrs.map((c) => shorthands[c]).reduce((l, r) => l.concat(r), []) + } + + // if it's an arg abbrev, and not a literal shorthand, then prefer the arg + if (abbrevs[arg] && !shorthands[arg]) { + return null + } + + // if it's an abbr for a shorthand, then use that + if (shortAbbr[arg]) { + arg = shortAbbr[arg] + } + + // make it an array, if it's a list of words + if (shorthands[arg] && !Array.isArray(shorthands[arg])) { + shorthands[arg] = shorthands[arg].split(/\s+/) + } + + return shorthands[arg] +} + +module.exports = { + nopt, + clean, + parse, + validate, + resolveShort, + typeDefs: defaultTypeDefs, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt.js new file mode 100644 index 00000000000000..37f01a08783f87 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/nopt.js @@ -0,0 +1,30 @@ +const lib = require('./nopt-lib') +const defaultTypeDefs = require('./type-defs') + +// This is the version of nopt's API that requires setting typeDefs and invalidHandler +// on the required `nopt` object since it is a singleton. To not do a breaking change +// an API that requires all options be passed in is located in `nopt-lib.js` and +// exported here as lib. +// TODO(breaking): make API only work in non-singleton mode + +module.exports = exports = nopt +exports.clean = clean +exports.typeDefs = defaultTypeDefs +exports.lib = lib + +function nopt (types, shorthands, args = process.argv, slice = 2) { + return lib.nopt(args.slice(slice), { + types: types || {}, + shorthands: shorthands || {}, + typeDefs: exports.typeDefs, + invalidHandler: exports.invalidHandler, + }) +} + +function clean (data, types, typeDefs = exports.typeDefs) { + return lib.clean(data, { + types: types || {}, + typeDefs, + invalidHandler: exports.invalidHandler, + }) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/type-defs.js b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/type-defs.js new file mode 100644 index 00000000000000..608352ee248cc4 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/lib/type-defs.js @@ -0,0 +1,91 @@ +const url = require('url') +const path = require('path') +const Stream = require('stream').Stream +const os = require('os') +const debug = require('./debug') + +function validateString (data, k, val) { + data[k] = String(val) +} + +function validatePath (data, k, val) { + if (val === true) { + return false + } + if (val === null) { + return true + } + + val = String(val) + + const isWin = process.platform === 'win32' + const homePattern = isWin ? /^~(\/|\\)/ : /^~\// + const home = os.homedir() + + if (home && val.match(homePattern)) { + data[k] = path.resolve(home, val.slice(2)) + } else { + data[k] = path.resolve(val) + } + return true +} + +function validateNumber (data, k, val) { + debug('validate Number %j %j %j', k, val, isNaN(val)) + if (isNaN(val)) { + return false + } + data[k] = +val +} + +function validateDate (data, k, val) { + const s = Date.parse(val) + debug('validate Date %j %j %j', k, val, s) + if (isNaN(s)) { + return false + } + data[k] = new Date(val) +} + +function validateBoolean (data, k, val) { + if (typeof val === 'string') { + if (!isNaN(val)) { + val = !!(+val) + } else if (val === 'null' || val === 'false') { + val = false + } else { + val = true + } + } else { + val = !!val + } + data[k] = val +} + +function validateUrl (data, k, val) { + // Changing this would be a breaking change in the npm cli + /* eslint-disable-next-line node/no-deprecated-api */ + val = url.parse(String(val)) + if (!val.host) { + return false + } + data[k] = val.href +} + +function validateStream (data, k, val) { + if (!(val instanceof Stream)) { + return false + } + data[k] = val +} + +module.exports = { + String: { type: String, validate: validateString }, + Boolean: { type: Boolean, validate: validateBoolean }, + url: { type: url, validate: validateUrl }, + Number: { type: Number, validate: validateNumber }, + path: { type: path, validate: validatePath }, + Stream: { type: Stream, validate: validateStream }, + Date: { type: Date, validate: validateDate }, + Array: { type: Array }, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/nopt/package.json b/deps/npm/node_modules/node-gyp/node_modules/nopt/package.json new file mode 100644 index 00000000000000..37b770ad487711 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/nopt/package.json @@ -0,0 +1,51 @@ +{ + "name": "nopt", + "version": "7.2.1", + "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", + "author": "GitHub Inc.", + "main": "lib/nopt.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/nopt.git" + }, + "bin": { + "nopt": "bin/nopt.js" + }, + "license": "ISC", + "dependencies": { + "abbrev": "^2.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.3.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "windowsCI": false, + "version": "4.22.0", + "publish": true + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/proc-log/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/proc-log/LICENSE new file mode 100644 index 00000000000000..83837797202b70 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/proc-log/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) GitHub, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/proc-log/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/proc-log/lib/index.js new file mode 100644 index 00000000000000..86d90861078dab --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/proc-log/lib/index.js @@ -0,0 +1,153 @@ +const META = Symbol('proc-log.meta') +module.exports = { + META: META, + output: { + LEVELS: [ + 'standard', + 'error', + 'buffer', + 'flush', + ], + KEYS: { + standard: 'standard', + error: 'error', + buffer: 'buffer', + flush: 'flush', + }, + standard: function (...args) { + return process.emit('output', 'standard', ...args) + }, + error: function (...args) { + return process.emit('output', 'error', ...args) + }, + buffer: function (...args) { + return process.emit('output', 'buffer', ...args) + }, + flush: function (...args) { + return process.emit('output', 'flush', ...args) + }, + }, + log: { + LEVELS: [ + 'notice', + 'error', + 'warn', + 'info', + 'verbose', + 'http', + 'silly', + 'timing', + 'pause', + 'resume', + ], + KEYS: { + notice: 'notice', + error: 'error', + warn: 'warn', + info: 'info', + verbose: 'verbose', + http: 'http', + silly: 'silly', + timing: 'timing', + pause: 'pause', + resume: 'resume', + }, + error: function (...args) { + return process.emit('log', 'error', ...args) + }, + notice: function (...args) { + return process.emit('log', 'notice', ...args) + }, + warn: function (...args) { + return process.emit('log', 'warn', ...args) + }, + info: function (...args) { + return process.emit('log', 'info', ...args) + }, + verbose: function (...args) { + return process.emit('log', 'verbose', ...args) + }, + http: function (...args) { + return process.emit('log', 'http', ...args) + }, + silly: function (...args) { + return process.emit('log', 'silly', ...args) + }, + timing: function (...args) { + return process.emit('log', 'timing', ...args) + }, + pause: function () { + return process.emit('log', 'pause') + }, + resume: function () { + return process.emit('log', 'resume') + }, + }, + time: { + LEVELS: [ + 'start', + 'end', + ], + KEYS: { + start: 'start', + end: 'end', + }, + start: function (name, fn) { + process.emit('time', 'start', name) + function end () { + return process.emit('time', 'end', name) + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function (name) { + return process.emit('time', 'end', name) + }, + }, + input: { + LEVELS: [ + 'start', + 'end', + 'read', + ], + KEYS: { + start: 'start', + end: 'end', + read: 'read', + }, + start: function (fn) { + process.emit('input', 'start') + function end () { + return process.emit('input', 'end') + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function () { + return process.emit('input', 'end') + }, + read: function (...args) { + let resolve, reject + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve + reject = _reject + }) + process.emit('input', 'read', resolve, reject, ...args) + return promise + }, + }, +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/proc-log/package.json b/deps/npm/node_modules/node-gyp/node_modules/proc-log/package.json new file mode 100644 index 00000000000000..4ab89102ecc9b5 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/proc-log/package.json @@ -0,0 +1,45 @@ +{ + "name": "proc-log", + "version": "4.2.0", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "description": "just emit 'log' events on the process object", + "repository": { + "type": "git", + "url": "https://github.com/npm/proc-log.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "posttest": "npm run lint", + "postsnap": "eslint index.js test/*.js --fix", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "template-oss-apply": "template-oss-apply --force" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "tap": "^16.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/ssri/LICENSE.md b/deps/npm/node_modules/node-gyp/node_modules/ssri/LICENSE.md new file mode 100644 index 00000000000000..e335388869f50f --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/ssri/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright 2021 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/ssri/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/ssri/lib/index.js new file mode 100644 index 00000000000000..7d749ed480fb98 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/ssri/lib/index.js @@ -0,0 +1,580 @@ +'use strict' + +const crypto = require('crypto') +const { Minipass } = require('minipass') + +const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256'] +const DEFAULT_ALGORITHMS = ['sha512'] + +// TODO: this should really be a hardcoded list of algorithms we support, +// rather than [a-z0-9]. +const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i +const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/ +const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/ +const VCHAR_REGEX = /^[\x21-\x7E]+$/ + +const getOptString = options => options?.length ? `?${options.join('?')}` : '' + +class IntegrityStream extends Minipass { + #emittedIntegrity + #emittedSize + #emittedVerified + + constructor (opts) { + super() + this.size = 0 + this.opts = opts + + // may be overridden later, but set now for class consistency + this.#getOptions() + + // options used for calculating stream. can't be changed. + if (opts?.algorithms) { + this.algorithms = [...opts.algorithms] + } else { + this.algorithms = [...DEFAULT_ALGORITHMS] + } + if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) { + this.algorithms.push(this.algorithm) + } + + this.hashes = this.algorithms.map(crypto.createHash) + } + + #getOptions () { + // For verification + this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null + this.expectedSize = this.opts?.size + + if (!this.sri) { + this.algorithm = null + } else if (this.sri.isHash) { + this.goodSri = true + this.algorithm = this.sri.algorithm + } else { + this.goodSri = !this.sri.isEmpty() + this.algorithm = this.sri.pickAlgorithm(this.opts) + } + + this.digests = this.goodSri ? this.sri[this.algorithm] : null + this.optString = getOptString(this.opts?.options) + } + + on (ev, handler) { + if (ev === 'size' && this.#emittedSize) { + return handler(this.#emittedSize) + } + + if (ev === 'integrity' && this.#emittedIntegrity) { + return handler(this.#emittedIntegrity) + } + + if (ev === 'verified' && this.#emittedVerified) { + return handler(this.#emittedVerified) + } + + return super.on(ev, handler) + } + + emit (ev, data) { + if (ev === 'end') { + this.#onEnd() + } + return super.emit(ev, data) + } + + write (data) { + this.size += data.length + this.hashes.forEach(h => h.update(data)) + return super.write(data) + } + + #onEnd () { + if (!this.goodSri) { + this.#getOptions() + } + const newSri = parse(this.hashes.map((h, i) => { + return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}` + }).join(' '), this.opts) + // Integrity verification mode + const match = this.goodSri && newSri.match(this.sri, this.opts) + if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) { + /* eslint-disable-next-line max-len */ + const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`) + err.code = 'EBADSIZE' + err.found = this.size + err.expected = this.expectedSize + err.sri = this.sri + this.emit('error', err) + } else if (this.sri && !match) { + /* eslint-disable-next-line max-len */ + const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = this.digests + err.algorithm = this.algorithm + err.sri = this.sri + this.emit('error', err) + } else { + this.#emittedSize = this.size + this.emit('size', this.size) + this.#emittedIntegrity = newSri + this.emit('integrity', newSri) + if (match) { + this.#emittedVerified = match + this.emit('verified', match) + } + } + } +} + +class Hash { + get isHash () { + return true + } + + constructor (hash, opts) { + const strict = opts?.strict + this.source = hash.trim() + + // set default values so that we make V8 happy to + // always see a familiar object template. + this.digest = '' + this.algorithm = '' + this.options = [] + + // 3.1. Integrity metadata (called "Hash" by ssri) + // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description + const match = this.source.match( + strict + ? STRICT_SRI_REGEX + : SRI_REGEX + ) + if (!match) { + return + } + if (strict && !SPEC_ALGORITHMS.includes(match[1])) { + return + } + this.algorithm = match[1] + this.digest = match[2] + + const rawOpts = match[3] + if (rawOpts) { + this.options = rawOpts.slice(1).split('?') + } + } + + hexDigest () { + return this.digest && Buffer.from(this.digest, 'base64').toString('hex') + } + + toJSON () { + return this.toString() + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + if (other.isIntegrity) { + const algo = other.pickAlgorithm(opts, [this.algorithm]) + + if (!algo) { + return false + } + + const foundHash = other[algo].find(hash => hash.digest === this.digest) + + if (foundHash) { + return foundHash + } + + return false + } + return other.digest === this.digest ? other : false + } + + toString (opts) { + if (opts?.strict) { + // Strict mode enforces the standard as close to the foot of the + // letter as it can. + if (!( + // The spec has very restricted productions for algorithms. + // https://www.w3.org/TR/CSP2/#source-list-syntax + SPEC_ALGORITHMS.includes(this.algorithm) && + // Usually, if someone insists on using a "different" base64, we + // leave it as-is, since there's multiple standards, and the + // specified is not a URL-safe variant. + // https://www.w3.org/TR/CSP2/#base64_value + this.digest.match(BASE64_REGEX) && + // Option syntax is strictly visual chars. + // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression + // https://tools.ietf.org/html/rfc5234#appendix-B.1 + this.options.every(opt => opt.match(VCHAR_REGEX)) + )) { + return '' + } + } + return `${this.algorithm}-${this.digest}${getOptString(this.options)}` + } +} + +function integrityHashToString (toString, sep, opts, hashes) { + const toStringIsNotEmpty = toString !== '' + + let shouldAddFirstSep = false + let complement = '' + + const lastIndex = hashes.length - 1 + + for (let i = 0; i < lastIndex; i++) { + const hashString = Hash.prototype.toString.call(hashes[i], opts) + + if (hashString) { + shouldAddFirstSep = true + + complement += hashString + complement += sep + } + } + + const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts) + + if (finalHashString) { + shouldAddFirstSep = true + complement += finalHashString + } + + if (toStringIsNotEmpty && shouldAddFirstSep) { + return toString + sep + complement + } + + return toString + complement +} + +class Integrity { + get isIntegrity () { + return true + } + + toJSON () { + return this.toString() + } + + isEmpty () { + return Object.keys(this).length === 0 + } + + toString (opts) { + let sep = opts?.sep || ' ' + let toString = '' + + if (opts?.strict) { + // Entries must be separated by whitespace, according to spec. + sep = sep.replace(/\S+/g, ' ') + + for (const hash of SPEC_ALGORITHMS) { + if (this[hash]) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + } else { + for (const hash of Object.keys(this)) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + + return toString + } + + concat (integrity, opts) { + const other = typeof integrity === 'string' + ? integrity + : stringify(integrity, opts) + return parse(`${this.toString(opts)} ${other}`, opts) + } + + hexDigest () { + return parse(this, { single: true }).hexDigest() + } + + // add additional hashes to an integrity value, but prevent + // *changing* an existing integrity hash. + merge (integrity, opts) { + const other = parse(integrity, opts) + for (const algo in other) { + if (this[algo]) { + if (!this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest))) { + throw new Error('hashes do not match, cannot update integrity') + } + } else { + this[algo] = other[algo] + } + } + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + const algo = other.pickAlgorithm(opts, Object.keys(this)) + return ( + !!algo && + this[algo] && + other[algo] && + this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest + ) + ) + ) || false + } + + // Pick the highest priority algorithm present, optionally also limited to a + // set of hashes found in another integrity. When limiting it may return + // nothing. + pickAlgorithm (opts, hashes) { + const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash + const keys = Object.keys(this).filter(k => { + if (hashes?.length) { + return hashes.includes(k) + } + return true + }) + if (keys.length) { + return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc) + } + // no intersection between this and hashes, + return null + } +} + +module.exports.parse = parse +function parse (sri, opts) { + if (!sri) { + return null + } + if (typeof sri === 'string') { + return _parse(sri, opts) + } else if (sri.algorithm && sri.digest) { + const fullSri = new Integrity() + fullSri[sri.algorithm] = [sri] + return _parse(stringify(fullSri, opts), opts) + } else { + return _parse(stringify(sri, opts), opts) + } +} + +function _parse (integrity, opts) { + // 3.4.3. Parse metadata + // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + if (opts?.single) { + return new Hash(integrity, opts) + } + const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => { + const hash = new Hash(string, opts) + if (hash.algorithm && hash.digest) { + const algo = hash.algorithm + if (!acc[algo]) { + acc[algo] = [] + } + acc[algo].push(hash) + } + return acc + }, new Integrity()) + return hashes.isEmpty() ? null : hashes +} + +module.exports.stringify = stringify +function stringify (obj, opts) { + if (obj.algorithm && obj.digest) { + return Hash.prototype.toString.call(obj, opts) + } else if (typeof obj === 'string') { + return stringify(parse(obj, opts), opts) + } else { + return Integrity.prototype.toString.call(obj, opts) + } +} + +module.exports.fromHex = fromHex +function fromHex (hexDigest, algorithm, opts) { + const optString = getOptString(opts?.options) + return parse( + `${algorithm}-${ + Buffer.from(hexDigest, 'hex').toString('base64') + }${optString}`, opts + ) +} + +module.exports.fromData = fromData +function fromData (data, opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + return algorithms.reduce((acc, algo) => { + const digest = crypto.createHash(algo).update(data).digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the string we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) +} + +module.exports.fromStream = fromStream +function fromStream (stream, opts) { + const istream = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(istream) + stream.on('error', reject) + istream.on('error', reject) + let sri + istream.on('integrity', s => { + sri = s + }) + istream.on('end', () => resolve(sri)) + istream.resume() + }) +} + +module.exports.checkData = checkData +function checkData (data, sri, opts) { + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + if (opts?.error) { + throw Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + ) + } else { + return false + } + } + const algorithm = sri.pickAlgorithm(opts) + const digest = crypto.createHash(algorithm).update(data).digest('base64') + const newSri = parse({ algorithm, digest }) + const match = newSri.match(sri, opts) + opts = opts || {} + if (match || !(opts.error)) { + return match + } else if (typeof opts.size === 'number' && (data.length !== opts.size)) { + /* eslint-disable-next-line max-len */ + const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`) + err.code = 'EBADSIZE' + err.found = data.length + err.expected = opts.size + err.sri = sri + throw err + } else { + /* eslint-disable-next-line max-len */ + const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = sri + err.algorithm = algorithm + err.sri = sri + throw err + } +} + +module.exports.checkStream = checkStream +function checkStream (stream, sri, opts) { + opts = opts || Object.create(null) + opts.integrity = sri + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + return Promise.reject(Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + )) + } + const checker = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(checker) + stream.on('error', reject) + checker.on('error', reject) + let verified + checker.on('verified', s => { + verified = s + }) + checker.on('end', () => resolve(verified)) + checker.resume() + }) +} + +module.exports.integrityStream = integrityStream +function integrityStream (opts = Object.create(null)) { + return new IntegrityStream(opts) +} + +module.exports.create = createIntegrity +function createIntegrity (opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + + const hashes = algorithms.map(crypto.createHash) + + return { + update: function (chunk, enc) { + hashes.forEach(h => h.update(chunk, enc)) + return this + }, + digest: function () { + const integrity = algorithms.reduce((acc, algo) => { + const digest = hashes.shift().digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the hash we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) + + return integrity + }, + } +} + +const NODE_HASHES = crypto.getHashes() + +// This is a Best Effort™ at a reasonable priority for hash algos +const DEFAULT_PRIORITY = [ + 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', + // TODO - it's unclear _which_ of these Node will actually use as its name + // for the algorithm, so we guesswork it based on the OpenSSL names. + 'sha3', + 'sha3-256', 'sha3-384', 'sha3-512', + 'sha3_256', 'sha3_384', 'sha3_512', +].filter(algo => NODE_HASHES.includes(algo)) + +function getPrioritizedHash (algo1, algo2) { + /* eslint-disable-next-line max-len */ + return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) + ? algo1 + : algo2 +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/ssri/package.json b/deps/npm/node_modules/node-gyp/node_modules/ssri/package.json new file mode 100644 index 00000000000000..28395414e4643c --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/ssri/package.json @@ -0,0 +1,65 @@ +{ + "name": "ssri", + "version": "10.0.6", + "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "prerelease": "npm t", + "postrelease": "npm publish", + "posttest": "npm run lint", + "test": "tap", + "coverage": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap" + }, + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/ssri.git" + }, + "keywords": [ + "w3c", + "web", + "security", + "integrity", + "checksum", + "hashing", + "subresource integrity", + "sri", + "sri hash", + "sri string", + "sri generator", + "html" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/unique-filename/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/unique-filename/LICENSE new file mode 100644 index 00000000000000..69619c125ea7ef --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/unique-filename/LICENSE @@ -0,0 +1,5 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/unique-filename/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/unique-filename/lib/index.js new file mode 100644 index 00000000000000..d067d2e709809a --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/unique-filename/lib/index.js @@ -0,0 +1,7 @@ +var path = require('path') + +var uniqueSlug = require('unique-slug') + +module.exports = function (filepath, prefix, uniq) { + return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/unique-filename/package.json b/deps/npm/node_modules/node-gyp/node_modules/unique-filename/package.json new file mode 100644 index 00000000000000..b2fbf0666489a6 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/unique-filename/package.json @@ -0,0 +1,51 @@ +{ + "name": "unique-filename", + "version": "3.0.0", + "description": "Generate a unique filename for use in temporary directories or caches.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/unique-filename.git" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/unique-filename/issues" + }, + "homepage": "https://github.com/iarna/unique-filename", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.5.1", + "tap": "^16.3.0" + }, + "dependencies": { + "unique-slug": "^4.0.0" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.5.1" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/unique-slug/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/unique-slug/LICENSE new file mode 100644 index 00000000000000..7953647e7760b8 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/unique-slug/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/unique-slug/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/unique-slug/lib/index.js new file mode 100644 index 00000000000000..1bac84d95d7307 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/unique-slug/lib/index.js @@ -0,0 +1,11 @@ +'use strict' +var MurmurHash3 = require('imurmurhash') + +module.exports = function (uniq) { + if (uniq) { + var hash = new MurmurHash3(uniq) + return ('00000000' + hash.result().toString(16)).slice(-8) + } else { + return (Math.random().toString(16) + '0000000').slice(2, 10) + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/unique-slug/package.json b/deps/npm/node_modules/node-gyp/node_modules/unique-slug/package.json new file mode 100644 index 00000000000000..33732cdbb42859 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/unique-slug/package.json @@ -0,0 +1,47 @@ +{ + "name": "unique-slug", + "version": "4.0.0", + "description": "Generate a unique character string suitible for use in files and URLs.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^3.1.0", + "@npmcli/template-oss": "4.5.1", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/unique-slug.git" + }, + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.5.1" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/which/LICENSE b/deps/npm/node_modules/node-gyp/node_modules/which/LICENSE new file mode 100644 index 00000000000000..19129e315fe593 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/which/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/node-gyp/node_modules/which/README.md b/deps/npm/node_modules/node-gyp/node_modules/which/README.md new file mode 100644 index 00000000000000..942bf1e0932b89 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/which/README.md @@ -0,0 +1,51 @@ +# which + +Like the unix `which` utility. + +Finds the first instance of a specified executable in the PATH +environment variable. Does not cache the results, so `hash -r` is not +needed when the PATH changes. + +## USAGE + +```javascript +const which = require('which') + +// async usage +// rejects if not found +const resolved = await which('node') + +// if nothrow option is used, returns null if not found +const resolvedOrNull = await which('node', { nothrow: true }) + +// sync usage +// throws if not found +const resolved = which.sync('node') + +// if nothrow option is used, returns null if not found +const resolvedOrNull = which.sync('node', { nothrow: true }) + +// Pass options to override the PATH and PATHEXT environment vars. +await which('node', { path: someOtherPath, pathExt: somePathExt }) +``` + +## CLI USAGE + +Just like the BSD `which(1)` binary but using `node-which`. + +``` +usage: node-which [-as] program ... +``` + +You can learn more about why the binary is `node-which` and not `which` +[here](https://github.com/npm/node-which/pull/67) + +## OPTIONS + +You may pass an options object as the second argument. + +- `path`: Use instead of the `PATH` environment variable. +- `pathExt`: Use instead of the `PATHEXT` environment variable. +- `all`: Return all matches, instead of just the first one. Note that + this means the function returns an array of strings instead of a + single string. diff --git a/deps/npm/node_modules/node-gyp/node_modules/which/bin/which.js b/deps/npm/node_modules/node-gyp/node_modules/which/bin/which.js new file mode 100755 index 00000000000000..6df16f21acf930 --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/which/bin/which.js @@ -0,0 +1,52 @@ +#!/usr/bin/env node + +const which = require('../lib') +const argv = process.argv.slice(2) + +const usage = (err) => { + if (err) { + console.error(`which: ${err}`) + } + console.error('usage: which [-as] program ...') + process.exit(1) +} + +if (!argv.length) { + return usage() +} + +let dashdash = false +const [commands, flags] = argv.reduce((acc, arg) => { + if (dashdash || arg === '--') { + dashdash = true + return acc + } + + if (!/^-/.test(arg)) { + acc[0].push(arg) + return acc + } + + for (const flag of arg.slice(1).split('')) { + if (flag === 's') { + acc[1].silent = true + } else if (flag === 'a') { + acc[1].all = true + } else { + usage(`illegal option -- ${flag}`) + } + } + + return acc +}, [[], {}]) + +for (const command of commands) { + try { + const res = which.sync(command, { all: flags.all }) + if (!flags.silent) { + console.log([].concat(res).join('\n')) + } + } catch (err) { + process.exitCode = 1 + } +} diff --git a/deps/npm/node_modules/node-gyp/node_modules/which/lib/index.js b/deps/npm/node_modules/node-gyp/node_modules/which/lib/index.js new file mode 100644 index 00000000000000..2fd358baf888fd --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/which/lib/index.js @@ -0,0 +1,111 @@ +const { isexe, sync: isexeSync } = require('isexe') +const { join, delimiter, sep, posix } = require('path') + +const isWindows = process.platform === 'win32' + +// used to check for slashed in commands passed in. always checks for the posix +// seperator on all platforms, and checks for the current separator when not on +// a posix platform. don't use the isWindows check for this since that is mocked +// in tests but we still need the code to actually work when called. that is also +// why it is ignored from coverage. +/* istanbul ignore next */ +const rSlash = new RegExp(`[${posix.sep}${sep === posix.sep ? '' : sep}]`.replace(/(\\)/g, '\\$1')) +const rRel = new RegExp(`^\\.${rSlash.source}`) + +const getNotFoundError = (cmd) => + Object.assign(new Error(`not found: ${cmd}`), { code: 'ENOENT' }) + +const getPathInfo = (cmd, { + path: optPath = process.env.PATH, + pathExt: optPathExt = process.env.PATHEXT, + delimiter: optDelimiter = delimiter, +}) => { + // If it has a slash, then we don't bother searching the pathenv. + // just check the file itself, and that's it. + const pathEnv = cmd.match(rSlash) ? [''] : [ + // windows always checks the cwd first + ...(isWindows ? [process.cwd()] : []), + ...(optPath || /* istanbul ignore next: very unusual */ '').split(optDelimiter), + ] + + if (isWindows) { + const pathExtExe = optPathExt || + ['.EXE', '.CMD', '.BAT', '.COM'].join(optDelimiter) + const pathExt = pathExtExe.split(optDelimiter).flatMap((item) => [item, item.toLowerCase()]) + if (cmd.includes('.') && pathExt[0] !== '') { + pathExt.unshift('') + } + return { pathEnv, pathExt, pathExtExe } + } + + return { pathEnv, pathExt: [''] } +} + +const getPathPart = (raw, cmd) => { + const pathPart = /^".*"$/.test(raw) ? raw.slice(1, -1) : raw + const prefix = !pathPart && rRel.test(cmd) ? cmd.slice(0, 2) : '' + return prefix + join(pathPart, cmd) +} + +const which = async (cmd, opt = {}) => { + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + for (const envPart of pathEnv) { + const p = getPathPart(envPart, cmd) + + for (const ext of pathExt) { + const withExt = p + ext + const is = await isexe(withExt, { pathExt: pathExtExe, ignoreErrors: true }) + if (is) { + if (!opt.all) { + return withExt + } + found.push(withExt) + } + } + } + + if (opt.all && found.length) { + return found + } + + if (opt.nothrow) { + return null + } + + throw getNotFoundError(cmd) +} + +const whichSync = (cmd, opt = {}) => { + const { pathEnv, pathExt, pathExtExe } = getPathInfo(cmd, opt) + const found = [] + + for (const pathEnvPart of pathEnv) { + const p = getPathPart(pathEnvPart, cmd) + + for (const ext of pathExt) { + const withExt = p + ext + const is = isexeSync(withExt, { pathExt: pathExtExe, ignoreErrors: true }) + if (is) { + if (!opt.all) { + return withExt + } + found.push(withExt) + } + } + } + + if (opt.all && found.length) { + return found + } + + if (opt.nothrow) { + return null + } + + throw getNotFoundError(cmd) +} + +module.exports = which +which.sync = whichSync diff --git a/deps/npm/node_modules/node-gyp/node_modules/which/package.json b/deps/npm/node_modules/node-gyp/node_modules/which/package.json new file mode 100644 index 00000000000000..515bfb22ca0e1e --- /dev/null +++ b/deps/npm/node_modules/node-gyp/node_modules/which/package.json @@ -0,0 +1,57 @@ +{ + "author": "GitHub Inc.", + "name": "which", + "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.", + "version": "4.0.0", + "repository": { + "type": "git", + "url": "https://github.com/npm/node-which.git" + }, + "main": "lib/index.js", + "bin": { + "node-which": "./bin/which.js" + }, + "license": "ISC", + "dependencies": { + "isexe": "^3.1.1" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.18.0", + "tap": "^16.3.0" + }, + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "files": [ + "bin/", + "lib/" + ], + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "engines": { + "node": "^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "ciVersions": [ + "16.13.0", + "16.x", + "18.0.0", + "18.x" + ], + "version": "4.18.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/nopt/node_modules/abbrev/LICENSE b/deps/npm/node_modules/nopt/node_modules/abbrev/LICENSE new file mode 100644 index 00000000000000..9bcfa9d7d8d26e --- /dev/null +++ b/deps/npm/node_modules/nopt/node_modules/abbrev/LICENSE @@ -0,0 +1,46 @@ +This software is dual-licensed under the ISC and MIT licenses. +You may use this software under EITHER of the following licenses. + +---------- + +The ISC License + +Copyright (c) Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. + +---------- + +Copyright Isaac Z. Schlueter and Contributors +All rights reserved. + +Permission is hereby granted, free of charge, to any person +obtaining a copy of this software and associated documentation +files (the "Software"), to deal in the Software without +restriction, including without limitation the rights to use, +copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following +conditions: + +The above copyright notice and this permission notice shall be +included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, +EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES +OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND +NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT +HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, +WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING +FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR +OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/nopt/node_modules/abbrev/lib/index.js b/deps/npm/node_modules/nopt/node_modules/abbrev/lib/index.js new file mode 100644 index 00000000000000..9f48801f049c9e --- /dev/null +++ b/deps/npm/node_modules/nopt/node_modules/abbrev/lib/index.js @@ -0,0 +1,50 @@ +module.exports = abbrev + +function abbrev (...args) { + let list = args.length === 1 || Array.isArray(args[0]) ? args[0] : args + + for (let i = 0, l = list.length; i < l; i++) { + list[i] = typeof list[i] === 'string' ? list[i] : String(list[i]) + } + + // sort them lexicographically, so that they're next to their nearest kin + list = list.sort(lexSort) + + // walk through each, seeing how much it has in common with the next and previous + const abbrevs = {} + let prev = '' + for (let ii = 0, ll = list.length; ii < ll; ii++) { + const current = list[ii] + const next = list[ii + 1] || '' + let nextMatches = true + let prevMatches = true + if (current === next) { + continue + } + let j = 0 + const cl = current.length + for (; j < cl; j++) { + const curChar = current.charAt(j) + nextMatches = nextMatches && curChar === next.charAt(j) + prevMatches = prevMatches && curChar === prev.charAt(j) + if (!nextMatches && !prevMatches) { + j++ + break + } + } + prev = current + if (j === cl) { + abbrevs[current] = current + continue + } + for (let a = current.slice(0, j); j <= cl; j++) { + abbrevs[a] = current + a += current.charAt(j) + } + } + return abbrevs +} + +function lexSort (a, b) { + return a === b ? 0 : a > b ? 1 : -1 +} diff --git a/deps/npm/node_modules/nopt/node_modules/abbrev/package.json b/deps/npm/node_modules/nopt/node_modules/abbrev/package.json new file mode 100644 index 00000000000000..e26400445631ad --- /dev/null +++ b/deps/npm/node_modules/nopt/node_modules/abbrev/package.json @@ -0,0 +1,43 @@ +{ + "name": "abbrev", + "version": "2.0.0", + "description": "Like ruby's abbrev module, but in js", + "author": "GitHub Inc.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/abbrev-js.git" + }, + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.8.0", + "tap": "^16.3.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.8.0" + } +} diff --git a/deps/npm/node_modules/nopt/package.json b/deps/npm/node_modules/nopt/package.json index 37b770ad487711..508b8e28b59f70 100644 --- a/deps/npm/node_modules/nopt/package.json +++ b/deps/npm/node_modules/nopt/package.json @@ -1,17 +1,18 @@ { "name": "nopt", - "version": "7.2.1", + "version": "8.0.0", "description": "Option parsing for Node, supporting types, shorthands, etc. Used by npm.", "author": "GitHub Inc.", "main": "lib/nopt.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -25,8 +26,8 @@ "abbrev": "^2.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "tap": { @@ -40,12 +41,12 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.22.0", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/normalize-package-data/package.json b/deps/npm/node_modules/normalize-package-data/package.json index 04a7647abe65c6..a849ea3a848397 100644 --- a/deps/npm/node_modules/normalize-package-data/package.json +++ b/deps/npm/node_modules/normalize-package-data/package.json @@ -1,6 +1,6 @@ { "name": "normalize-package-data", - "version": "6.0.2", + "version": "7.0.0", "author": "GitHub Inc.", "description": "Normalizes data that can be found in package.json files.", "license": "BSD-2-Clause", @@ -12,22 +12,23 @@ "scripts": { "test": "tap", "npmclilint": "npmcli-lint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", "postsnap": "npm run lintfix --", "postlint": "template-oss-check", "snap": "tap", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "dependencies": { - "hosted-git-info": "^7.0.0", + "hosted-git-info": "^8.0.0", "semver": "^7.3.5", "validate-npm-package-license": "^3.0.4" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "files": [ @@ -35,11 +36,11 @@ "lib/" ], "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/npm-audit-report/package.json b/deps/npm/node_modules/npm-audit-report/package.json index 492071c1faf902..22b16099e23d66 100644 --- a/deps/npm/node_modules/npm-audit-report/package.json +++ b/deps/npm/node_modules/npm-audit-report/package.json @@ -1,16 +1,17 @@ { "name": "npm-audit-report", - "version": "5.0.0", + "version": "6.0.0", "description": "Given a response from the npm security api, render it into a variety of security reports", "main": "lib/index.js", "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -29,8 +30,8 @@ "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "chalk": "^5.2.0", "tap": "^16.0.0" }, @@ -40,7 +41,7 @@ }, "repository": { "type": "git", - "url": "https://github.com/npm/npm-audit-report.git" + "url": "git+https://github.com/npm/npm-audit-report.git" }, "bugs": { "url": "https://github.com/npm/npm-audit-report/issues" @@ -51,10 +52,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1" + "version": "4.23.3", + "publish": true } } diff --git a/deps/npm/node_modules/npm-bundled/package.json b/deps/npm/node_modules/npm-bundled/package.json index 2744ca6af67fc7..c5daf35dbaa841 100644 --- a/deps/npm/node_modules/npm-bundled/package.json +++ b/deps/npm/node_modules/npm-bundled/package.json @@ -1,6 +1,6 @@ { "name": "npm-bundled", - "version": "3.0.1", + "version": "4.0.0", "description": "list things in node_modules that are bundledDependencies, or transitive dependencies thereof", "main": "lib/index.js", "repository": { @@ -10,33 +10,34 @@ "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "mutate-fs": "^2.1.1", "tap": "^16.3.0" }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "files": [ "bin/", "lib/" ], "dependencies": { - "npm-normalize-package-bin": "^3.0.0" + "npm-normalize-package-bin": "^4.0.0" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true }, "tap": { diff --git a/deps/npm/node_modules/npm-install-checks/lib/current-env.js b/deps/npm/node_modules/npm-install-checks/lib/current-env.js new file mode 100644 index 00000000000000..9babde1f277ff1 --- /dev/null +++ b/deps/npm/node_modules/npm-install-checks/lib/current-env.js @@ -0,0 +1,63 @@ +const process = require('node:process') +const nodeOs = require('node:os') + +function isMusl (file) { + return file.includes('libc.musl-') || file.includes('ld-musl-') +} + +function os () { + return process.platform +} + +function cpu () { + return process.arch +} + +function libc (osName) { + // this is to make it faster on non linux machines + if (osName !== 'linux') { + return undefined + } + let family + const originalExclude = process.report.excludeNetwork + process.report.excludeNetwork = true + const report = process.report.getReport() + process.report.excludeNetwork = originalExclude + if (report.header?.glibcVersionRuntime) { + family = 'glibc' + } else if (Array.isArray(report.sharedObjects) && report.sharedObjects.some(isMusl)) { + family = 'musl' + } + return family +} + +function devEngines (env = {}) { + const osName = env.os || os() + return { + cpu: { + name: env.cpu || cpu(), + }, + libc: { + name: env.libc || libc(osName), + }, + os: { + name: osName, + version: env.osVersion || nodeOs.release(), + }, + packageManager: { + name: 'npm', + version: env.npmVersion, + }, + runtime: { + name: 'node', + version: env.nodeVersion || process.version, + }, + } +} + +module.exports = { + cpu, + libc, + os, + devEngines, +} diff --git a/deps/npm/node_modules/npm-install-checks/lib/dev-engines.js b/deps/npm/node_modules/npm-install-checks/lib/dev-engines.js new file mode 100644 index 00000000000000..ac5a182330d3b2 --- /dev/null +++ b/deps/npm/node_modules/npm-install-checks/lib/dev-engines.js @@ -0,0 +1,145 @@ +const satisfies = require('semver/functions/satisfies') +const validRange = require('semver/ranges/valid') + +const recognizedOnFail = [ + 'ignore', + 'warn', + 'error', + 'download', +] + +const recognizedProperties = [ + 'name', + 'version', + 'onFail', +] + +const recognizedEngines = [ + 'packageManager', + 'runtime', + 'cpu', + 'libc', + 'os', +] + +/** checks a devEngine dependency */ +function checkDependency (wanted, current, opts) { + const { engine } = opts + + if ((typeof wanted !== 'object' || wanted === null) || Array.isArray(wanted)) { + throw new Error(`Invalid non-object value for "${engine}"`) + } + + const properties = Object.keys(wanted) + + for (const prop of properties) { + if (!recognizedProperties.includes(prop)) { + throw new Error(`Invalid property "${prop}" for "${engine}"`) + } + } + + if (!properties.includes('name')) { + throw new Error(`Missing "name" property for "${engine}"`) + } + + if (typeof wanted.name !== 'string') { + throw new Error(`Invalid non-string value for "name" within "${engine}"`) + } + + if (typeof current.name !== 'string' || current.name === '') { + throw new Error(`Unable to determine "name" for "${engine}"`) + } + + if (properties.includes('onFail')) { + if (typeof wanted.onFail !== 'string') { + throw new Error(`Invalid non-string value for "onFail" within "${engine}"`) + } + if (!recognizedOnFail.includes(wanted.onFail)) { + throw new Error(`Invalid onFail value "${wanted.onFail}" for "${engine}"`) + } + } + + if (wanted.name !== current.name) { + return new Error( + `Invalid name "${wanted.name}" does not match "${current.name}" for "${engine}"` + ) + } + + if (properties.includes('version')) { + if (typeof wanted.version !== 'string') { + throw new Error(`Invalid non-string value for "version" within "${engine}"`) + } + if (typeof current.version !== 'string' || current.version === '') { + throw new Error(`Unable to determine "version" for "${engine}" "${wanted.name}"`) + } + if (validRange(wanted.version)) { + if (!satisfies(current.version, wanted.version, opts.semver)) { + return new Error( + // eslint-disable-next-line max-len + `Invalid semver version "${wanted.version}" does not match "${current.version}" for "${engine}"` + ) + } + } else if (wanted.version !== current.version) { + return new Error( + `Invalid version "${wanted.version}" does not match "${current.version}" for "${engine}"` + ) + } + } +} + +/** checks devEngines package property and returns array of warnings / errors */ +function checkDevEngines (wanted, current = {}, opts = {}) { + if ((typeof wanted !== 'object' || wanted === null) || Array.isArray(wanted)) { + throw new Error(`Invalid non-object value for devEngines`) + } + + const errors = [] + + for (const engine of Object.keys(wanted)) { + if (!recognizedEngines.includes(engine)) { + throw new Error(`Invalid property "${engine}"`) + } + const dependencyAsAuthored = wanted[engine] + const dependencies = [dependencyAsAuthored].flat() + const currentEngine = current[engine] || {} + + // this accounts for empty array eg { runtime: [] } and ignores it + if (dependencies.length === 0) { + continue + } + + const depErrors = [] + for (const dep of dependencies) { + const result = checkDependency(dep, currentEngine, { ...opts, engine }) + if (result) { + depErrors.push(result) + } + } + + const invalid = depErrors.length === dependencies.length + + if (invalid) { + const lastDependency = dependencies[dependencies.length - 1] + let onFail = lastDependency.onFail || 'error' + if (onFail === 'download') { + onFail = 'error' + } + + const err = Object.assign(new Error(`Invalid engine "${engine}"`), { + errors: depErrors, + engine, + isWarn: onFail === 'warn', + isError: onFail === 'error', + current: currentEngine, + required: dependencyAsAuthored, + }) + + errors.push(err) + } + } + return errors +} + +module.exports = { + checkDevEngines, +} diff --git a/deps/npm/node_modules/npm-install-checks/lib/index.js b/deps/npm/node_modules/npm-install-checks/lib/index.js index 545472b61dc604..71702920873080 100644 --- a/deps/npm/node_modules/npm-install-checks/lib/index.js +++ b/deps/npm/node_modules/npm-install-checks/lib/index.js @@ -1,4 +1,6 @@ const semver = require('semver') +const currentEnv = require('./current-env') +const { checkDevEngines } = require('./dev-engines') const checkEngine = (target, npmVer, nodeVer, force = false) => { const nodev = force ? null : nodeVer @@ -20,44 +22,29 @@ const checkEngine = (target, npmVer, nodeVer, force = false) => { } } -const isMusl = (file) => file.includes('libc.musl-') || file.includes('ld-musl-') - const checkPlatform = (target, force = false, environment = {}) => { if (force) { return } - const platform = environment.os || process.platform - const arch = environment.cpu || process.arch - const osOk = target.os ? checkList(platform, target.os) : true - const cpuOk = target.cpu ? checkList(arch, target.cpu) : true + const os = environment.os || currentEnv.os() + const cpu = environment.cpu || currentEnv.cpu() + const libc = environment.libc || currentEnv.libc(os) - let libcOk = true - let libcFamily = null - if (target.libc) { - // libc checks only work in linux, any value is a failure if we aren't - if (environment.libc) { - libcOk = checkList(environment.libc, target.libc) - } else if (platform !== 'linux') { - libcOk = false - } else { - const report = process.report.getReport() - if (report.header?.glibcVersionRuntime) { - libcFamily = 'glibc' - } else if (Array.isArray(report.sharedObjects) && report.sharedObjects.some(isMusl)) { - libcFamily = 'musl' - } - libcOk = libcFamily ? checkList(libcFamily, target.libc) : false - } + const osOk = target.os ? checkList(os, target.os) : true + const cpuOk = target.cpu ? checkList(cpu, target.cpu) : true + let libcOk = target.libc ? checkList(libc, target.libc) : true + if (target.libc && !libc) { + libcOk = false } if (!osOk || !cpuOk || !libcOk) { throw Object.assign(new Error('Unsupported platform'), { pkgid: target._id, current: { - os: platform, - cpu: arch, - libc: libcFamily, + os, + cpu, + libc, }, required: { os: target.os, @@ -98,4 +85,6 @@ const checkList = (value, list) => { module.exports = { checkEngine, checkPlatform, + checkDevEngines, + currentEnv, } diff --git a/deps/npm/node_modules/npm-install-checks/package.json b/deps/npm/node_modules/npm-install-checks/package.json index 11a3b87750e25a..e9e69575a6dc6d 100644 --- a/deps/npm/node_modules/npm-install-checks/package.json +++ b/deps/npm/node_modules/npm-install-checks/package.json @@ -1,28 +1,29 @@ { "name": "npm-install-checks", - "version": "6.3.0", + "version": "7.1.0", "description": "Check the engines and platform fields in package.json", "main": "lib/index.js", "dependencies": { "semver": "^7.1.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.19.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", - "url": "https://github.com/npm/npm-install-checks.git" + "url": "git+https://github.com/npm/npm-install-checks.git" }, "keywords": [ "npm,", @@ -34,12 +35,12 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "author": "GitHub Inc.", "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.19.0", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/npm-normalize-package-bin/package.json b/deps/npm/node_modules/npm-normalize-package-bin/package.json index 5ea50dc2a47ba1..a1aeef0e1e7512 100644 --- a/deps/npm/node_modules/npm-normalize-package-bin/package.json +++ b/deps/npm/node_modules/npm-normalize-package-bin/package.json @@ -1,26 +1,27 @@ { "name": "npm-normalize-package-bin", - "version": "3.0.1", + "version": "4.0.0", "description": "Turn any flavor of allowable package.json bin into a normalized object", "main": "lib/index.js", "repository": { "type": "git", - "url": "https://github.com/npm/npm-normalize-package-bin.git" + "url": "git+https://github.com/npm/npm-normalize-package-bin.git" }, "author": "GitHub Inc.", "license": "ISC", "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "files": [ @@ -28,11 +29,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.14.1", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/node_modules/npm-package-arg/package.json b/deps/npm/node_modules/npm-package-arg/package.json index d3f6fd7cf0a05d..80baa3d32a52fe 100644 --- a/deps/npm/node_modules/npm-package-arg/package.json +++ b/deps/npm/node_modules/npm-package-arg/package.json @@ -1,6 +1,6 @@ { "name": "npm-package-arg", - "version": "11.0.3", + "version": "12.0.0", "description": "Parse the things that can be arguments to `npm install`", "main": "./lib/npa.js", "directories": { @@ -11,14 +11,14 @@ "lib/" ], "dependencies": { - "hosted-git-info": "^7.0.0", - "proc-log": "^4.0.0", + "hosted-git-info": "^8.0.0", + "proc-log": "^5.0.0", "semver": "^7.3.5", - "validate-npm-package-name": "^5.0.0" + "validate-npm-package-name": "^6.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.23.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "scripts": { @@ -44,7 +44,7 @@ }, "homepage": "https://github.com/npm/npm-package-arg", "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "tap": { "branches": 97, @@ -55,7 +55,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.23.1", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/npm-packlist/package.json b/deps/npm/node_modules/npm-packlist/package.json index 8c3a16e741ad30..d7e0a4fd5a8452 100644 --- a/deps/npm/node_modules/npm-packlist/package.json +++ b/deps/npm/node_modules/npm-packlist/package.json @@ -1,13 +1,13 @@ { "name": "npm-packlist", - "version": "8.0.2", + "version": "9.0.0", "description": "Get a list of the files to add from a folder into an npm package", "directories": { "test": "test" }, "main": "lib/index.js", "dependencies": { - "ignore-walk": "^6.0.4" + "ignore-walk": "^7.0.0" }, "author": "GitHub Inc.", "license": "ISC", @@ -16,9 +16,9 @@ "lib/" ], "devDependencies": { - "@npmcli/arborist": "^6.0.0 || ^6.0.0-pre.0", + "@npmcli/arborist": "^7.5.4", "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.2", + "@npmcli/template-oss": "4.23.3", "mutate-fs": "^2.1.1", "tap": "^16.0.1" }, @@ -27,16 +27,16 @@ "posttest": "npm run lint", "snap": "tap", "postsnap": "npm run lintfix --", - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "npmclilint": "npmcli-lint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force" }, "repository": { "type": "git", - "url": "https://github.com/npm/npm-packlist.git" + "url": "git+https://github.com/npm/npm-packlist.git" }, "tap": { "test-env": [ @@ -51,11 +51,11 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.2", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/npm-pick-manifest/package.json b/deps/npm/node_modules/npm-pick-manifest/package.json index 4c0dd50630def5..5763088c250b69 100644 --- a/deps/npm/node_modules/npm-pick-manifest/package.json +++ b/deps/npm/node_modules/npm-pick-manifest/package.json @@ -1,6 +1,6 @@ { "name": "npm-pick-manifest", - "version": "9.1.0", + "version": "10.0.0", "description": "Resolves a matching manifest from a package metadata document according to standard npm semver resolution rules.", "main": "./lib", "files": [ @@ -9,13 +9,14 @@ ], "scripts": { "coverage": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "test": "tap", "posttest": "npm run lint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -29,14 +30,14 @@ "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "npm-install-checks": "^6.0.0", - "npm-normalize-package-bin": "^3.0.0", - "npm-package-arg": "^11.0.0", + "npm-install-checks": "^7.1.0", + "npm-normalize-package-bin": "^4.0.0", + "npm-package-arg": "^12.0.0", "semver": "^7.3.5" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "tap": { @@ -47,11 +48,11 @@ ] }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/npm-profile/lib/index.js b/deps/npm/node_modules/npm-profile/lib/index.js index 69e4e49b1fcddf..83ab5e1b46b68e 100644 --- a/deps/npm/node_modules/npm-profile/lib/index.js +++ b/deps/npm/node_modules/npm-profile/lib/index.js @@ -119,7 +119,7 @@ const webAuthCheckLogin = async (doneUrl, opts, { signal } = {}) => { if (res.status === 202) { const retry = +res.headers.get('retry-after') * 1000 if (retry > 0) { - await timers.setTimeout(retry, null, { ref: false, signal }) + await timers.setTimeout(retry, null, { signal }) } return webAuthCheckLogin(doneUrl, opts, { signal }) } diff --git a/deps/npm/node_modules/npm-profile/package.json b/deps/npm/node_modules/npm-profile/package.json index ff93911716fa7e..72a19a08231e2b 100644 --- a/deps/npm/node_modules/npm-profile/package.json +++ b/deps/npm/node_modules/npm-profile/package.json @@ -1,26 +1,26 @@ { "name": "npm-profile", - "version": "10.0.0", + "version": "11.0.1", "description": "Library for updating an npmjs.com profile", "keywords": [], "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "npm-registry-fetch": "^17.0.1", - "proc-log": "^4.0.0" + "npm-registry-fetch": "^18.0.0", + "proc-log": "^5.0.0" }, "main": "./lib/index.js", "repository": { "type": "git", - "url": "https://github.com/npm/npm-profile.git" + "url": "git+https://github.com/npm/npm-profile.git" }, "files": [ "bin/", "lib/" ], "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.4", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "nock": "^13.2.4", "tap": "^16.0.1" }, @@ -28,10 +28,11 @@ "posttest": "npm run lint", "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -41,11 +42,11 @@ ] }, "engines": { - "node": ">=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.4", + "version": "4.23.3", "publish": true } } diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE new file mode 100644 index 00000000000000..49f7efe431c9ea --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/LICENSE @@ -0,0 +1,26 @@ +Minizlib was created by Isaac Z. Schlueter. +It is a derivative work of the Node.js project. + +""" +Copyright (c) 2017-2023 Isaac Z. Schlueter and Contributors +Copyright (c) 2017-2023 Node.js contributors. All rights reserved. +Copyright (c) 2017-2023 Joyent, Inc. and other Node contributors. All rights reserved. + +Permission is hereby granted, free of charge, to any person obtaining a +copy of this software and associated documentation files (the "Software"), +to deal in the Software without restriction, including without limitation +the rights to use, copy, modify, merge, publish, distribute, sublicense, +and/or sell copies of the Software, and to permit persons to whom the +Software is furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS +OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF +MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. +IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY +CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, +TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE +SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. +""" diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js new file mode 100644 index 00000000000000..dfc2c1957bfc99 --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/constants.js @@ -0,0 +1,123 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.constants = void 0; +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +const zlib_1 = __importDefault(require("zlib")); +/* c8 ignore start */ +const realZlibConstants = zlib_1.default.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +exports.constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js new file mode 100644 index 00000000000000..ad65eef0495076 --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/index.js @@ -0,0 +1,352 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.BrotliDecompress = exports.BrotliCompress = exports.Brotli = exports.Unzip = exports.InflateRaw = exports.DeflateRaw = exports.Gunzip = exports.Gzip = exports.Inflate = exports.Deflate = exports.Zlib = exports.ZlibError = exports.constants = void 0; +const assert_1 = __importDefault(require("assert")); +const buffer_1 = require("buffer"); +const minipass_1 = require("minipass"); +const zlib_1 = __importDefault(require("zlib")); +const constants_js_1 = require("./constants.js"); +var constants_js_2 = require("./constants.js"); +Object.defineProperty(exports, "constants", { enumerable: true, get: function () { return constants_js_2.constants; } }); +const OriginalBufferConcat = buffer_1.Buffer.concat; +const _superWrite = Symbol('_superWrite'); +class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +exports.ZlibError = ZlibError; +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends minipass_1.Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new zlib_1.default[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(buffer_1.Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = buffer_1.Buffer.from(chunk, encoding); + if (this.#sawError) + return; + (0, assert_1.default)(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + buffer_1.Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + buffer_1.Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + buffer_1.Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](buffer_1.Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](buffer_1.Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants_js_1.constants.Z_FINISH; + opts.fullFlushFlag = constants_js_1.constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants_js_1.constants.Z_SYNC_FLUSH); + (0, assert_1.default)(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +exports.Zlib = Zlib; +// minimal 2-byte header +class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +exports.Deflate = Deflate; +class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +exports.Inflate = Inflate; +class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +exports.Gzip = Gzip; +class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +exports.Gunzip = Gunzip; +// raw - no header +class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +exports.DeflateRaw = DeflateRaw; +class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +exports.InflateRaw = InflateRaw; +// auto-detect header. +class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +exports.Unzip = Unzip; +class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants_js_1.constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants_js_1.constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants_js_1.constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +exports.Brotli = Brotli; +class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +exports.BrotliCompress = BrotliCompress; +class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +exports.BrotliDecompress = BrotliDecompress; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js new file mode 100644 index 00000000000000..7faf40be5068d0 --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/constants.js @@ -0,0 +1,117 @@ +// Update with any zlib constants that are added or changed in the future. +// Node v6 didn't export this, so we just hard code the version and rely +// on all the other hard-coded values from zlib v4736. When node v6 +// support drops, we can just export the realZlibConstants object. +import realZlib from 'zlib'; +/* c8 ignore start */ +const realZlibConstants = realZlib.constants || { ZLIB_VERNUM: 4736 }; +/* c8 ignore stop */ +export const constants = Object.freeze(Object.assign(Object.create(null), { + Z_NO_FLUSH: 0, + Z_PARTIAL_FLUSH: 1, + Z_SYNC_FLUSH: 2, + Z_FULL_FLUSH: 3, + Z_FINISH: 4, + Z_BLOCK: 5, + Z_OK: 0, + Z_STREAM_END: 1, + Z_NEED_DICT: 2, + Z_ERRNO: -1, + Z_STREAM_ERROR: -2, + Z_DATA_ERROR: -3, + Z_MEM_ERROR: -4, + Z_BUF_ERROR: -5, + Z_VERSION_ERROR: -6, + Z_NO_COMPRESSION: 0, + Z_BEST_SPEED: 1, + Z_BEST_COMPRESSION: 9, + Z_DEFAULT_COMPRESSION: -1, + Z_FILTERED: 1, + Z_HUFFMAN_ONLY: 2, + Z_RLE: 3, + Z_FIXED: 4, + Z_DEFAULT_STRATEGY: 0, + DEFLATE: 1, + INFLATE: 2, + GZIP: 3, + GUNZIP: 4, + DEFLATERAW: 5, + INFLATERAW: 6, + UNZIP: 7, + BROTLI_DECODE: 8, + BROTLI_ENCODE: 9, + Z_MIN_WINDOWBITS: 8, + Z_MAX_WINDOWBITS: 15, + Z_DEFAULT_WINDOWBITS: 15, + Z_MIN_CHUNK: 64, + Z_MAX_CHUNK: Infinity, + Z_DEFAULT_CHUNK: 16384, + Z_MIN_MEMLEVEL: 1, + Z_MAX_MEMLEVEL: 9, + Z_DEFAULT_MEMLEVEL: 8, + Z_MIN_LEVEL: -1, + Z_MAX_LEVEL: 9, + Z_DEFAULT_LEVEL: -1, + BROTLI_OPERATION_PROCESS: 0, + BROTLI_OPERATION_FLUSH: 1, + BROTLI_OPERATION_FINISH: 2, + BROTLI_OPERATION_EMIT_METADATA: 3, + BROTLI_MODE_GENERIC: 0, + BROTLI_MODE_TEXT: 1, + BROTLI_MODE_FONT: 2, + BROTLI_DEFAULT_MODE: 0, + BROTLI_MIN_QUALITY: 0, + BROTLI_MAX_QUALITY: 11, + BROTLI_DEFAULT_QUALITY: 11, + BROTLI_MIN_WINDOW_BITS: 10, + BROTLI_MAX_WINDOW_BITS: 24, + BROTLI_LARGE_MAX_WINDOW_BITS: 30, + BROTLI_DEFAULT_WINDOW: 22, + BROTLI_MIN_INPUT_BLOCK_BITS: 16, + BROTLI_MAX_INPUT_BLOCK_BITS: 24, + BROTLI_PARAM_MODE: 0, + BROTLI_PARAM_QUALITY: 1, + BROTLI_PARAM_LGWIN: 2, + BROTLI_PARAM_LGBLOCK: 3, + BROTLI_PARAM_DISABLE_LITERAL_CONTEXT_MODELING: 4, + BROTLI_PARAM_SIZE_HINT: 5, + BROTLI_PARAM_LARGE_WINDOW: 6, + BROTLI_PARAM_NPOSTFIX: 7, + BROTLI_PARAM_NDIRECT: 8, + BROTLI_DECODER_RESULT_ERROR: 0, + BROTLI_DECODER_RESULT_SUCCESS: 1, + BROTLI_DECODER_RESULT_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_RESULT_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_PARAM_DISABLE_RING_BUFFER_REALLOCATION: 0, + BROTLI_DECODER_PARAM_LARGE_WINDOW: 1, + BROTLI_DECODER_NO_ERROR: 0, + BROTLI_DECODER_SUCCESS: 1, + BROTLI_DECODER_NEEDS_MORE_INPUT: 2, + BROTLI_DECODER_NEEDS_MORE_OUTPUT: 3, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_NIBBLE: -1, + BROTLI_DECODER_ERROR_FORMAT_RESERVED: -2, + BROTLI_DECODER_ERROR_FORMAT_EXUBERANT_META_NIBBLE: -3, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_ALPHABET: -4, + BROTLI_DECODER_ERROR_FORMAT_SIMPLE_HUFFMAN_SAME: -5, + BROTLI_DECODER_ERROR_FORMAT_CL_SPACE: -6, + BROTLI_DECODER_ERROR_FORMAT_HUFFMAN_SPACE: -7, + BROTLI_DECODER_ERROR_FORMAT_CONTEXT_MAP_REPEAT: -8, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_1: -9, + BROTLI_DECODER_ERROR_FORMAT_BLOCK_LENGTH_2: -10, + BROTLI_DECODER_ERROR_FORMAT_TRANSFORM: -11, + BROTLI_DECODER_ERROR_FORMAT_DICTIONARY: -12, + BROTLI_DECODER_ERROR_FORMAT_WINDOW_BITS: -13, + BROTLI_DECODER_ERROR_FORMAT_PADDING_1: -14, + BROTLI_DECODER_ERROR_FORMAT_PADDING_2: -15, + BROTLI_DECODER_ERROR_FORMAT_DISTANCE: -16, + BROTLI_DECODER_ERROR_DICTIONARY_NOT_SET: -19, + BROTLI_DECODER_ERROR_INVALID_ARGUMENTS: -20, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MODES: -21, + BROTLI_DECODER_ERROR_ALLOC_TREE_GROUPS: -22, + BROTLI_DECODER_ERROR_ALLOC_CONTEXT_MAP: -25, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_1: -26, + BROTLI_DECODER_ERROR_ALLOC_RING_BUFFER_2: -27, + BROTLI_DECODER_ERROR_ALLOC_BLOCK_TYPE_TREES: -30, + BROTLI_DECODER_ERROR_UNREACHABLE: -31, +}, realZlibConstants)); +//# sourceMappingURL=constants.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js new file mode 100644 index 00000000000000..a6269b505f47cc --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/index.js @@ -0,0 +1,333 @@ +import assert from 'assert'; +import { Buffer } from 'buffer'; +import { Minipass } from 'minipass'; +import realZlib from 'zlib'; +import { constants } from './constants.js'; +export { constants } from './constants.js'; +const OriginalBufferConcat = Buffer.concat; +const _superWrite = Symbol('_superWrite'); +export class ZlibError extends Error { + code; + errno; + constructor(err) { + super('zlib: ' + err.message); + this.code = err.code; + this.errno = err.errno; + /* c8 ignore next */ + if (!this.code) + this.code = 'ZLIB_ERROR'; + this.message = 'zlib: ' + err.message; + Error.captureStackTrace(this, this.constructor); + } + get name() { + return 'ZlibError'; + } +} +// the Zlib class they all inherit from +// This thing manages the queue of requests, and returns +// true or false if there is anything in the queue when +// you call the .write() method. +const _flushFlag = Symbol('flushFlag'); +class ZlibBase extends Minipass { + #sawError = false; + #ended = false; + #flushFlag; + #finishFlushFlag; + #fullFlushFlag; + #handle; + #onError; + get sawError() { + return this.#sawError; + } + get handle() { + return this.#handle; + } + /* c8 ignore start */ + get flushFlag() { + return this.#flushFlag; + } + /* c8 ignore stop */ + constructor(opts, mode) { + if (!opts || typeof opts !== 'object') + throw new TypeError('invalid options for ZlibBase constructor'); + //@ts-ignore + super(opts); + /* c8 ignore start */ + this.#flushFlag = opts.flush ?? 0; + this.#finishFlushFlag = opts.finishFlush ?? 0; + this.#fullFlushFlag = opts.fullFlushFlag ?? 0; + /* c8 ignore stop */ + // this will throw if any options are invalid for the class selected + try { + // @types/node doesn't know that it exports the classes, but they're there + //@ts-ignore + this.#handle = new realZlib[mode](opts); + } + catch (er) { + // make sure that all errors get decorated properly + throw new ZlibError(er); + } + this.#onError = err => { + // no sense raising multiple errors, since we abort on the first one. + if (this.#sawError) + return; + this.#sawError = true; + // there is no way to cleanly recover. + // continuing only obscures problems. + this.close(); + this.emit('error', err); + }; + this.#handle?.on('error', er => this.#onError(new ZlibError(er))); + this.once('end', () => this.close); + } + close() { + if (this.#handle) { + this.#handle.close(); + this.#handle = undefined; + this.emit('close'); + } + } + reset() { + if (!this.#sawError) { + assert(this.#handle, 'zlib binding closed'); + //@ts-ignore + return this.#handle.reset?.(); + } + } + flush(flushFlag) { + if (this.ended) + return; + if (typeof flushFlag !== 'number') + flushFlag = this.#fullFlushFlag; + this.write(Object.assign(Buffer.alloc(0), { [_flushFlag]: flushFlag })); + } + end(chunk, encoding, cb) { + /* c8 ignore start */ + if (typeof chunk === 'function') { + cb = chunk; + encoding = undefined; + chunk = undefined; + } + if (typeof encoding === 'function') { + cb = encoding; + encoding = undefined; + } + /* c8 ignore stop */ + if (chunk) { + if (encoding) + this.write(chunk, encoding); + else + this.write(chunk); + } + this.flush(this.#finishFlushFlag); + this.#ended = true; + return super.end(cb); + } + get ended() { + return this.#ended; + } + // overridden in the gzip classes to do portable writes + [_superWrite](data) { + return super.write(data); + } + write(chunk, encoding, cb) { + // process the chunk using the sync process + // then super.write() all the outputted chunks + if (typeof encoding === 'function') + (cb = encoding), (encoding = 'utf8'); + if (typeof chunk === 'string') + chunk = Buffer.from(chunk, encoding); + if (this.#sawError) + return; + assert(this.#handle, 'zlib binding closed'); + // _processChunk tries to .close() the native handle after it's done, so we + // intercept that by temporarily making it a no-op. + // diving into the node:zlib internals a bit here + const nativeHandle = this.#handle + ._handle; + const originalNativeClose = nativeHandle.close; + nativeHandle.close = () => { }; + const originalClose = this.#handle.close; + this.#handle.close = () => { }; + // It also calls `Buffer.concat()` at the end, which may be convenient + // for some, but which we are not interested in as it slows us down. + Buffer.concat = args => args; + let result = undefined; + try { + const flushFlag = typeof chunk[_flushFlag] === 'number' + ? chunk[_flushFlag] + : this.#flushFlag; + result = this.#handle._processChunk(chunk, flushFlag); + // if we don't throw, reset it back how it was + Buffer.concat = OriginalBufferConcat; + } + catch (err) { + // or if we do, put Buffer.concat() back before we emit error + // Error events call into user code, which may call Buffer.concat() + Buffer.concat = OriginalBufferConcat; + this.#onError(new ZlibError(err)); + } + finally { + if (this.#handle) { + // Core zlib resets `_handle` to null after attempting to close the + // native handle. Our no-op handler prevented actual closure, but we + // need to restore the `._handle` property. + ; + this.#handle._handle = + nativeHandle; + nativeHandle.close = originalNativeClose; + this.#handle.close = originalClose; + // `_processChunk()` adds an 'error' listener. If we don't remove it + // after each call, these handlers start piling up. + this.#handle.removeAllListeners('error'); + // make sure OUR error listener is still attached tho + } + } + if (this.#handle) + this.#handle.on('error', er => this.#onError(new ZlibError(er))); + let writeReturn; + if (result) { + if (Array.isArray(result) && result.length > 0) { + const r = result[0]; + // The first buffer is always `handle._outBuffer`, which would be + // re-used for later invocations; so, we always have to copy that one. + writeReturn = this[_superWrite](Buffer.from(r)); + for (let i = 1; i < result.length; i++) { + writeReturn = this[_superWrite](result[i]); + } + } + else { + // either a single Buffer or an empty array + writeReturn = this[_superWrite](Buffer.from(result)); + } + } + if (cb) + cb(); + return writeReturn; + } +} +export class Zlib extends ZlibBase { + #level; + #strategy; + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.Z_NO_FLUSH; + opts.finishFlush = opts.finishFlush || constants.Z_FINISH; + opts.fullFlushFlag = constants.Z_FULL_FLUSH; + super(opts, mode); + this.#level = opts.level; + this.#strategy = opts.strategy; + } + params(level, strategy) { + if (this.sawError) + return; + if (!this.handle) + throw new Error('cannot switch params when binding is closed'); + // no way to test this without also not supporting params at all + /* c8 ignore start */ + if (!this.handle.params) + throw new Error('not supported in this implementation'); + /* c8 ignore stop */ + if (this.#level !== level || this.#strategy !== strategy) { + this.flush(constants.Z_SYNC_FLUSH); + assert(this.handle, 'zlib binding closed'); + // .params() calls .flush(), but the latter is always async in the + // core zlib. We override .flush() temporarily to intercept that and + // flush synchronously. + const origFlush = this.handle.flush; + this.handle.flush = (flushFlag, cb) => { + /* c8 ignore start */ + if (typeof flushFlag === 'function') { + cb = flushFlag; + flushFlag = this.flushFlag; + } + /* c8 ignore stop */ + this.flush(flushFlag); + cb?.(); + }; + try { + ; + this.handle.params(level, strategy); + } + finally { + this.handle.flush = origFlush; + } + /* c8 ignore start */ + if (this.handle) { + this.#level = level; + this.#strategy = strategy; + } + /* c8 ignore stop */ + } + } +} +// minimal 2-byte header +export class Deflate extends Zlib { + constructor(opts) { + super(opts, 'Deflate'); + } +} +export class Inflate extends Zlib { + constructor(opts) { + super(opts, 'Inflate'); + } +} +export class Gzip extends Zlib { + #portable; + constructor(opts) { + super(opts, 'Gzip'); + this.#portable = opts && !!opts.portable; + } + [_superWrite](data) { + if (!this.#portable) + return super[_superWrite](data); + // we'll always get the header emitted in one first chunk + // overwrite the OS indicator byte with 0xFF + this.#portable = false; + data[9] = 255; + return super[_superWrite](data); + } +} +export class Gunzip extends Zlib { + constructor(opts) { + super(opts, 'Gunzip'); + } +} +// raw - no header +export class DeflateRaw extends Zlib { + constructor(opts) { + super(opts, 'DeflateRaw'); + } +} +export class InflateRaw extends Zlib { + constructor(opts) { + super(opts, 'InflateRaw'); + } +} +// auto-detect header. +export class Unzip extends Zlib { + constructor(opts) { + super(opts, 'Unzip'); + } +} +export class Brotli extends ZlibBase { + constructor(opts, mode) { + opts = opts || {}; + opts.flush = opts.flush || constants.BROTLI_OPERATION_PROCESS; + opts.finishFlush = + opts.finishFlush || constants.BROTLI_OPERATION_FINISH; + opts.fullFlushFlag = constants.BROTLI_OPERATION_FLUSH; + super(opts, mode); + } +} +export class BrotliCompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliCompress'); + } +} +export class BrotliDecompress extends Brotli { + constructor(opts) { + super(opts, 'BrotliDecompress'); + } +} +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/package.json b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/package.json new file mode 100644 index 00000000000000..e94623ff43d353 --- /dev/null +++ b/deps/npm/node_modules/npm-registry-fetch/node_modules/minizlib/package.json @@ -0,0 +1,81 @@ +{ + "name": "minizlib", + "version": "3.0.1", + "description": "A small fast zlib stream built on [minipass](http://npm.im/minipass) and Node.js's zlib binding.", + "main": "./dist/commonjs/index.js", + "dependencies": { + "minipass": "^7.0.4", + "rimraf": "^5.0.5" + }, + "scripts": { + "prepare": "tshy", + "pretest": "npm run prepare", + "test": "tap", + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "format": "prettier --write . --loglevel warn", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/isaacs/minizlib.git" + }, + "keywords": [ + "zlib", + "gzip", + "gunzip", + "deflate", + "inflate", + "compression", + "zip", + "unzip" + ], + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "MIT", + "devDependencies": { + "@types/node": "^20.11.29", + "mkdirp": "^3.0.1", + "tap": "^18.7.1", + "tshy": "^1.12.0", + "typedoc": "^0.25.12" + }, + "files": [ + "dist" + ], + "engines": { + "node": ">= 18" + }, + "tshy": { + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "types": "./dist/commonjs/index.d.ts", + "type": "module", + "prettier": { + "semi": false, + "printWidth": 75, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + } +} diff --git a/deps/npm/node_modules/npm-registry-fetch/package.json b/deps/npm/node_modules/npm-registry-fetch/package.json index 07ea620d153172..559473b964aaa5 100644 --- a/deps/npm/node_modules/npm-registry-fetch/package.json +++ b/deps/npm/node_modules/npm-registry-fetch/package.json @@ -1,6 +1,6 @@ { "name": "npm-registry-fetch", - "version": "17.1.0", + "version": "18.0.1", "description": "Fetch-based http client for use with npm registry APIs", "main": "lib", "files": [ @@ -8,9 +8,9 @@ "lib/" ], "scripts": { - "eslint": "eslint", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", - "lintfix": "npm run lint -- --fix", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", + "lintfix": "npm run eslint -- --fix", "test": "tap", "posttest": "npm run lint", "npmclilint": "npmcli-lint", @@ -31,22 +31,22 @@ "author": "GitHub Inc.", "license": "ISC", "dependencies": { - "@npmcli/redact": "^2.0.0", + "@npmcli/redact": "^3.0.0", "jsonparse": "^1.3.1", - "make-fetch-happen": "^13.0.0", + "make-fetch-happen": "^14.0.0", "minipass": "^7.0.2", - "minipass-fetch": "^3.0.0", - "minizlib": "^2.1.2", - "npm-package-arg": "^11.0.0", - "proc-log": "^4.0.0" + "minipass-fetch": "^4.0.0", + "minizlib": "^3.0.1", + "npm-package-arg": "^12.0.0", + "proc-log": "^5.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "cacache": "^18.0.0", "nock": "^13.2.4", "require-inject": "^1.4.4", - "ssri": "^10.0.0", + "ssri": "^12.0.0", "tap": "^16.0.1" }, "tap": { @@ -58,11 +58,11 @@ ] }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/npm-user-validate/package.json b/deps/npm/node_modules/npm-user-validate/package.json index 15a1305356ef3b..8c66f8f3e47b9b 100644 --- a/deps/npm/node_modules/npm-user-validate/package.json +++ b/deps/npm/node_modules/npm-user-validate/package.json @@ -1,21 +1,22 @@ { "name": "npm-user-validate", - "version": "2.0.1", + "version": "3.0.0", "description": "User validations for npm", "main": "lib/index.js", "devDependencies": { - "@npmcli/eslint-config": "^4.0.1", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.2" }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -33,11 +34,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true }, "tap": { diff --git a/deps/npm/node_modules/pacote/package.json b/deps/npm/node_modules/pacote/package.json index caadaf2db50c81..0eb8261af96e0c 100644 --- a/deps/npm/node_modules/pacote/package.json +++ b/deps/npm/node_modules/pacote/package.json @@ -1,6 +1,6 @@ { "name": "pacote", - "version": "18.0.6", + "version": "19.0.0", "description": "JavaScript package downloader", "author": "GitHub Inc.", "bin": { @@ -11,11 +11,12 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "timeout": 300, @@ -26,9 +27,9 @@ }, "devDependencies": { "@npmcli/arborist": "^7.1.0", - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", - "hosted-git-info": "^7.0.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "hosted-git-info": "^8.0.0", "mutate-fs": "^2.1.1", "nock": "^13.2.4", "npm-registry-mock": "^1.3.2", @@ -44,26 +45,26 @@ "git" ], "dependencies": { - "@npmcli/git": "^5.0.0", - "@npmcli/installed-package-contents": "^2.0.1", - "@npmcli/package-json": "^5.1.0", - "@npmcli/promise-spawn": "^7.0.0", - "@npmcli/run-script": "^8.0.0", - "cacache": "^18.0.0", + "@npmcli/git": "^6.0.0", + "@npmcli/installed-package-contents": "^3.0.0", + "@npmcli/package-json": "^6.0.0", + "@npmcli/promise-spawn": "^8.0.0", + "@npmcli/run-script": "^9.0.0", + "cacache": "^19.0.0", "fs-minipass": "^3.0.0", "minipass": "^7.0.2", - "npm-package-arg": "^11.0.0", - "npm-packlist": "^8.0.0", - "npm-pick-manifest": "^9.0.0", - "npm-registry-fetch": "^17.0.0", - "proc-log": "^4.0.0", + "npm-package-arg": "^12.0.0", + "npm-packlist": "^9.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-registry-fetch": "^18.0.0", + "proc-log": "^5.0.0", "promise-retry": "^2.0.1", "sigstore": "^2.2.0", - "ssri": "^10.0.0", + "ssri": "^12.0.0", "tar": "^6.1.11" }, "engines": { - "node": "^16.14.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "repository": { "type": "git", @@ -71,7 +72,7 @@ }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "windowsCI": false, "publish": "true" } diff --git a/deps/npm/node_modules/parse-conflict-json/package.json b/deps/npm/node_modules/parse-conflict-json/package.json index 32584d3e6401b3..824ca8ed2bd145 100644 --- a/deps/npm/node_modules/parse-conflict-json/package.json +++ b/deps/npm/node_modules/parse-conflict-json/package.json @@ -1,6 +1,6 @@ { "name": "parse-conflict-json", - "version": "3.0.1", + "version": "4.0.0", "description": "Parse a JSON string that has git merge conflicts, resolving if possible", "author": "GitHub Inc.", "license": "ISC", @@ -8,11 +8,12 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "posttest": "npm run lint", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -22,28 +23,29 @@ ] }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.12.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "dependencies": { - "json-parse-even-better-errors": "^3.0.0", + "json-parse-even-better-errors": "^4.0.0", "just-diff": "^6.0.0", "just-diff-apply": "^5.2.0" }, "repository": { "type": "git", - "url": "https://github.com/npm/parse-conflict-json.git" + "url": "git+https://github.com/npm/parse-conflict-json.git" }, "files": [ "bin/", "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.12.0" + "version": "4.23.3", + "publish": true } } diff --git a/deps/npm/node_modules/proc-log/package.json b/deps/npm/node_modules/proc-log/package.json index 4ab89102ecc9b5..957209d3954e53 100644 --- a/deps/npm/node_modules/proc-log/package.json +++ b/deps/npm/node_modules/proc-log/package.json @@ -1,6 +1,6 @@ { "name": "proc-log", - "version": "4.2.0", + "version": "5.0.0", "files": [ "bin/", "lib/" @@ -9,7 +9,7 @@ "description": "just emit 'log' events on the process object", "repository": { "type": "git", - "url": "https://github.com/npm/proc-log.git" + "url": "git+https://github.com/npm/proc-log.git" }, "author": "GitHub Inc.", "license": "ISC", @@ -18,22 +18,23 @@ "snap": "tap", "posttest": "npm run lint", "postsnap": "eslint index.js test/*.js --fix", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.21.3", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.21.3", + "version": "4.23.3", "publish": true }, "tap": { diff --git a/deps/npm/node_modules/proggy/package.json b/deps/npm/node_modules/proggy/package.json index 4940fc9d002a64..01ac44b4af18f8 100644 --- a/deps/npm/node_modules/proggy/package.json +++ b/deps/npm/node_modules/proggy/package.json @@ -1,6 +1,6 @@ { "name": "proggy", - "version": "2.0.0", + "version": "3.0.0", "files": [ "bin/", "lib/" @@ -9,7 +9,7 @@ "description": "Progress bar updates at a distance", "repository": { "type": "git", - "url": "https://github.com/npm/proggy.git" + "url": "git+https://github.com/npm/proggy.git" }, "author": "GitHub Inc.", "license": "ISC", @@ -18,17 +18,18 @@ "posttest": "npm run lint", "snap": "tap", "postsnap": "eslint lib test --fix", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", - "template-oss-apply": "template-oss-apply --force" + "lintfix": "npm run eslint -- --fix", + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "chalk": "^4.1.2", "cli-progress": "^3.10.0", - "npmlog": "^6.0.1", + "npmlog": "^7.0.0", "tap": "^16.0.1" }, "tap": { @@ -39,10 +40,11 @@ ] }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.23.3", + "publish": true } } diff --git a/deps/npm/node_modules/promzard/package.json b/deps/npm/node_modules/promzard/package.json index 71b0ed439a4402..c224a4b458e877 100644 --- a/deps/npm/node_modules/promzard/package.json +++ b/deps/npm/node_modules/promzard/package.json @@ -2,28 +2,29 @@ "author": "GitHub Inc.", "name": "promzard", "description": "prompting wizardly", - "version": "1.0.2", + "version": "2.0.0", "repository": { "url": "git+https://github.com/npm/promzard.git", "type": "git" }, "dependencies": { - "read": "^3.0.1" + "read": "^4.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "main": "lib/index.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "license": "ISC", "files": [ @@ -31,11 +32,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true }, "tap": { diff --git a/deps/npm/node_modules/read-cmd-shim/package.json b/deps/npm/node_modules/read-cmd-shim/package.json index 401ee3d7101f10..3b16802df3ce21 100644 --- a/deps/npm/node_modules/read-cmd-shim/package.json +++ b/deps/npm/node_modules/read-cmd-shim/package.json @@ -1,23 +1,23 @@ { "name": "read-cmd-shim", - "version": "4.0.0", + "version": "5.0.0", "description": "Figure out what a cmd-shim is pointing at. This acts as the equivalent of fs.readlink.", "main": "lib/index.js", "devDependencies": { - "@npmcli/eslint-config": "^3.0.1", - "@npmcli/template-oss": "4.5.1", - "cmd-shim": "^5.0.0", - "rimraf": "^3.0.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", + "cmd-shim": "^7.0.0", "tap": "^16.0.1" }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -28,7 +28,7 @@ }, "repository": { "type": "git", - "url": "https://github.com/npm/read-cmd-shim.git" + "url": "git+https://github.com/npm/read-cmd-shim.git" }, "license": "ISC", "homepage": "https://github.com/npm/read-cmd-shim#readme", @@ -38,10 +38,11 @@ ], "author": "GitHub Inc.", "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.23.3", + "publish": true } } diff --git a/deps/npm/node_modules/read-package-json-fast/package.json b/deps/npm/node_modules/read-package-json-fast/package.json index 4964bb0a934cb8..20208329e24be6 100644 --- a/deps/npm/node_modules/read-package-json-fast/package.json +++ b/deps/npm/node_modules/read-package-json-fast/package.json @@ -1,6 +1,6 @@ { "name": "read-package-json-fast", - "version": "3.0.2", + "version": "4.0.0", "description": "Like read-package-json, but faster", "main": "lib/index.js", "author": "GitHub Inc.", @@ -8,27 +8,28 @@ "scripts": { "test": "tap", "snap": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "posttest": "npm run lint" + "lintfix": "npm run eslint -- --fix", + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.11.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "dependencies": { - "json-parse-even-better-errors": "^3.0.0", - "npm-normalize-package-bin": "^3.0.0" + "json-parse-even-better-errors": "^4.0.0", + "npm-normalize-package-bin": "^4.0.0" }, "repository": { "type": "git", - "url": "https://github.com/npm/read-package-json-fast.git" + "url": "git+https://github.com/npm/read-package-json-fast.git" }, "files": [ "bin/", @@ -36,7 +37,8 @@ ], "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.11.0" + "version": "4.23.3", + "publish": true }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/read/dist/commonjs/read.js b/deps/npm/node_modules/read/dist/commonjs/read.js index bab433d8a1155f..c0600d2b4e8cae 100644 --- a/deps/npm/node_modules/read/dist/commonjs/read.js +++ b/deps/npm/node_modules/read/dist/commonjs/read.js @@ -3,7 +3,7 @@ var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; Object.defineProperty(exports, "__esModule", { value: true }); -exports.read = void 0; +exports.read = read; const mute_stream_1 = __importDefault(require("mute-stream")); const readline_1 = require("readline"); async function read({ default: def, input = process.stdin, output = process.stdout, completer, prompt = '', silent, timeout, edit, terminal, replace, }) { @@ -91,5 +91,4 @@ async function read({ default: def, input = process.stdin, output = process.stdo /* c8 ignore stop */ }); } -exports.read = read; //# sourceMappingURL=read.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/read/package.json b/deps/npm/node_modules/read/package.json index 799362f40f1529..337f7d26d4dd94 100644 --- a/deps/npm/node_modules/read/package.json +++ b/deps/npm/node_modules/read/package.json @@ -1,6 +1,6 @@ { "name": "read", - "version": "3.0.1", + "version": "4.0.0", "exports": { "./package.json": "./package.json", ".": { @@ -22,29 +22,29 @@ } }, "dependencies": { - "mute-stream": "^1.0.0" + "mute-stream": "^2.0.0" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.2", - "@npmcli/template-oss": "4.20.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "@types/mute-stream": "^0.0.4", "@types/tap": "^15.0.11", - "@typescript-eslint/parser": "^6.11.0", - "c8": "^8.0.1", + "@typescript-eslint/parser": "^8.0.1", + "c8": "^10.1.2", "eslint-import-resolver-typescript": "^3.6.1", "tap": "^16.3.9", "ts-node": "^10.9.1", - "tshy": "^1.8.0", + "tshy": "^3.0.2", "typescript": "^5.2.2" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "author": "GitHub Inc.", "description": "read(1) for node programs", "repository": { "type": "git", - "url": "https://github.com/npm/read.git" + "url": "git+https://github.com/npm/read.git" }, "license": "ISC", "scripts": { @@ -52,19 +52,19 @@ "pretest": "npm run prepare", "presnap": "npm run prepare", "test": "c8 tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "c8 tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.20.0", + "version": "4.23.3", "publish": true, - "typescript": true, - "content": "./scripts/template-oss" + "typescript": true }, "main": "./dist/commonjs/read.js", "types": "./dist/commonjs/read.d.ts", @@ -83,5 +83,6 @@ }, "files": [ "dist/" - ] + ], + "module": "./dist/esm/read.js" } diff --git a/deps/npm/node_modules/rimraf/LICENSE b/deps/npm/node_modules/rimraf/LICENSE new file mode 100644 index 00000000000000..1493534e60dce4 --- /dev/null +++ b/deps/npm/node_modules/rimraf/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) 2011-2023 Isaac Z. Schlueter and Contributors + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/rimraf/README.md b/deps/npm/node_modules/rimraf/README.md new file mode 100644 index 00000000000000..7ab1a5d6d77ffa --- /dev/null +++ b/deps/npm/node_modules/rimraf/README.md @@ -0,0 +1,220 @@ +The [UNIX command]() `rm -rf` for node +in a cross-platform implementation. + +Install with `npm install rimraf`. + +## Major Changes + +### v4 to v5 + +- There is no default export anymore. Import the functions directly + using, e.g., `import { rimrafSync } from 'rimraf'`. + +### v3 to v4 + +- The function returns a `Promise` instead of taking a callback. +- Globbing requires the `--glob` CLI option or `glob` option property + to be set. (Removed in 4.0 and 4.1, opt-in support added in 4.2.) +- Functions take arrays of paths, as well as a single path. +- Native implementation used by default when available, except on + Windows, where this implementation is faster and more reliable. +- New implementation on Windows, falling back to "move then + remove" strategy when exponential backoff for `EBUSY` fails to + resolve the situation. +- Simplified implementation on POSIX, since the Windows + affordances are not necessary there. +- As of 4.3, return/resolve value is boolean instead of undefined. + +## API + +Hybrid module, load either with `import` or `require()`. + +```js +// 'rimraf' export is the one you probably want, but other +// strategies exported as well. +import { rimraf, rimrafSync, native, nativeSync } from 'rimraf' +// or +const { rimraf, rimrafSync, native, nativeSync } = require('rimraf') +``` + +All removal functions return a boolean indicating that all +entries were successfully removed. + +The only case in which this will not return `true` is if +something was omitted from the removal via a `filter` option. + +### `rimraf(f, [opts]) -> Promise` + +This first parameter is a path or array of paths. The second +argument is an options object. + +Options: + +- `preserveRoot`: If set to boolean `false`, then allow the + recursive removal of the root directory. Otherwise, this is + not allowed. +- `tmp`: Windows only. Temp folder to place files and + folders for the "move then remove" fallback. Must be on the + same physical device as the path being deleted. Defaults to + `os.tmpdir()` when that is on the same drive letter as the path + being deleted, or `${drive}:\temp` if present, or `${drive}:\` + if not. +- `maxRetries`: Windows and Native only. Maximum number of + retry attempts in case of `EBUSY`, `EMFILE`, and `ENFILE` + errors. Default `10` for Windows implementation, `0` for Native + implementation. +- `backoff`: Windows only. Rate of exponential backoff for async + removal in case of `EBUSY`, `EMFILE`, and `ENFILE` errors. + Should be a number greater than 1. Default `1.2` +- `maxBackoff`: Windows only. Maximum total backoff time in ms to + attempt asynchronous retries in case of `EBUSY`, `EMFILE`, and + `ENFILE` errors. Default `200`. With the default `1.2` backoff + rate, this results in 14 retries, with the final retry being + delayed 33ms. +- `retryDelay`: Native only. Time to wait between retries, using + linear backoff. Default `100`. +- `signal` Pass in an AbortSignal to cancel the directory + removal. This is useful when removing large folder structures, + if you'd like to limit the time spent. + + Using a `signal` option prevents the use of Node's built-in + `fs.rm` because that implementation does not support abort + signals. + +- `glob` Boolean flag to treat path as glob pattern, or an object + specifying [`glob` options](https://github.com/isaacs/node-glob). +- `filter` Method that returns a boolean indicating whether that + path should be deleted. With async `rimraf` methods, this may + return a Promise that resolves to a boolean. (Since Promises + are truthy, returning a Promise from a sync filter is the same + as just not filtering anything.) + + The first argument to the filter is the path string. The + second argument is either a `Dirent` or `Stats` object for that + path. (The first path explored will be a `Stats`, the rest + will be `Dirent`.) + + If a filter method is provided, it will _only_ remove entries + if the filter returns (or resolves to) a truthy value. Omitting + a directory will still allow its children to be removed, unless + they are also filtered out, but any parents of a filtered entry + will not be removed, since the directory will not be empty in + that case. + + Using a filter method prevents the use of Node's built-in + `fs.rm` because that implementation does not support filtering. + +Any other options are provided to the native Node.js `fs.rm` implementation +when that is used. + +This will attempt to choose the best implementation, based on the Node.js +version and `process.platform`. To force a specific implementation, use +one of the other functions provided. + +### `rimraf.sync(f, [opts])`
    `rimraf.rimrafSync(f, [opts])` + +Synchronous form of `rimraf()` + +Note that, unlike many file system operations, the synchronous form will +typically be significantly _slower_ than the async form, because recursive +deletion is extremely parallelizable. + +### `rimraf.native(f, [opts])` + +Uses the built-in `fs.rm` implementation that Node.js provides. This is +used by default on Node.js versions greater than or equal to `14.14.0`. + +### `rimraf.native.sync(f, [opts])`
    `rimraf.nativeSync(f, [opts])` + +Synchronous form of `rimraf.native` + +### `rimraf.manual(f, [opts])` + +Use the JavaScript implementation appropriate for your operating system. + +### `rimraf.manual.sync(f, [opts])`
    `rimraf.manualSync(f, opts)` + +Synchronous form of `rimraf.manual()` + +### `rimraf.windows(f, [opts])` + +JavaScript implementation of file removal appropriate for Windows +platforms. Works around `unlink` and `rmdir` not being atomic +operations, and `EPERM` when deleting files with certain +permission modes. + +First deletes all non-directory files within the tree, and then +removes all directories, which should ideally be empty by that +time. When an `ENOTEMPTY` is raised in the second pass, falls +back to the `rimraf.moveRemove` strategy as needed. + +### `rimraf.windows.sync(path, [opts])`
    `rimraf.windowsSync(path, [opts])` + +Synchronous form of `rimraf.windows()` + +### `rimraf.moveRemove(path, [opts])` + +Moves all files and folders to the parent directory of `path` +with a temporary filename prior to attempting to remove them. + +Note that, in cases where the operation fails, this _may_ leave +files lying around in the parent directory with names like +`.file-basename.txt.0.123412341`. Until the Windows kernel +provides a way to perform atomic `unlink` and `rmdir` operations, +this is, unfortunately, unavoidable. + +To move files to a different temporary directory other than the +parent, provide `opts.tmp`. Note that this _must_ be on the same +physical device as the folder being deleted, or else the +operation will fail. + +This is the slowest strategy, but most reliable on Windows +platforms. Used as a last-ditch fallback by `rimraf.windows()`. + +### `rimraf.moveRemove.sync(path, [opts])`
    `rimraf.moveRemoveSync(path, [opts])` + +Synchronous form of `rimraf.moveRemove()` + +### Command Line Interface + +``` +rimraf version 4.3.0 + +Usage: rimraf [ ...] +Deletes all files and folders at "path", recursively. + +Options: + -- Treat all subsequent arguments as paths + -h --help Display this usage info + --preserve-root Do not remove '/' recursively (default) + --no-preserve-root Do not treat '/' specially + -G --no-glob Treat arguments as literal paths, not globs (default) + -g --glob Treat arguments as glob patterns + -v --verbose Be verbose when deleting files, showing them as + they are removed. Not compatible with --impl=native + -V --no-verbose Be silent when deleting files, showing nothing as + they are removed (default) + -i --interactive Ask for confirmation before deleting anything + Not compatible with --impl=native + -I --no-interactive Do not ask for confirmation before deleting + + --impl= Specify the implementation to use: + rimraf: choose the best option (default) + native: the built-in implementation in Node.js + manual: the platform-specific JS implementation + posix: the Posix JS implementation + windows: the Windows JS implementation (falls back to + move-remove on ENOTEMPTY) + move-remove: a slow reliable Windows fallback + +Implementation-specific options: + --tmp= Temp file folder for 'move-remove' implementation + --max-retries= maxRetries for 'native' and 'windows' implementations + --retry-delay= retryDelay for 'native' implementation, default 100 + --backoff= Exponential backoff factor for retries (default: 1.2) +``` + +## mkdirp + +If you need to _create_ a directory recursively, check out +[mkdirp](https://github.com/isaacs/node-mkdirp). diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts new file mode 100644 index 00000000000000..a68e925b249a8d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts @@ -0,0 +1,3 @@ +export declare const defaultTmp: (path: string) => Promise; +export declare const defaultTmpSync: (path: string) => string; +//# sourceMappingURL=default-tmp.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts.map new file mode 100644 index 00000000000000..d0b35f2786233b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"default-tmp.d.ts","sourceRoot":"","sources":["../../src/default-tmp.ts"],"names":[],"mappings":"AAiEA,eAAO,MAAM,UAAU,SAnCc,MAAM,oBAoCe,CAAA;AAC1D,eAAO,MAAM,cAAc,SArBQ,MAAM,WAsByB,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js new file mode 100644 index 00000000000000..ae9087881962da --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js @@ -0,0 +1,61 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.defaultTmpSync = exports.defaultTmp = void 0; +// The default temporary folder location for use in the windows algorithm. +// It's TEMPting to use dirname(path), since that's guaranteed to be on the +// same device. However, this means that: +// rimraf(path).then(() => rimraf(dirname(path))) +// will often fail with EBUSY, because the parent dir contains +// marked-for-deletion directory entries (which do not show up in readdir). +// The approach here is to use os.tmpdir() if it's on the same drive letter, +// or resolve(path, '\\temp') if it exists, or the root of the drive if not. +// On Posix (not that you'd be likely to use the windows algorithm there), +// it uses os.tmpdir() always. +const os_1 = require("os"); +const path_1 = require("path"); +const fs_js_1 = require("./fs.js"); +const platform_js_1 = __importDefault(require("./platform.js")); +const { stat } = fs_js_1.promises; +const isDirSync = (path) => { + try { + return (0, fs_js_1.statSync)(path).isDirectory(); + } + catch (er) { + return false; + } +}; +const isDir = (path) => stat(path).then(st => st.isDirectory(), () => false); +const win32DefaultTmp = async (path) => { + const { root } = (0, path_1.parse)(path); + const tmp = (0, os_1.tmpdir)(); + const { root: tmpRoot } = (0, path_1.parse)(tmp); + if (root.toLowerCase() === tmpRoot.toLowerCase()) { + return tmp; + } + const driveTmp = (0, path_1.resolve)(root, '/temp'); + if (await isDir(driveTmp)) { + return driveTmp; + } + return root; +}; +const win32DefaultTmpSync = (path) => { + const { root } = (0, path_1.parse)(path); + const tmp = (0, os_1.tmpdir)(); + const { root: tmpRoot } = (0, path_1.parse)(tmp); + if (root.toLowerCase() === tmpRoot.toLowerCase()) { + return tmp; + } + const driveTmp = (0, path_1.resolve)(root, '/temp'); + if (isDirSync(driveTmp)) { + return driveTmp; + } + return root; +}; +const posixDefaultTmp = async () => (0, os_1.tmpdir)(); +const posixDefaultTmpSync = () => (0, os_1.tmpdir)(); +exports.defaultTmp = platform_js_1.default === 'win32' ? win32DefaultTmp : posixDefaultTmp; +exports.defaultTmpSync = platform_js_1.default === 'win32' ? win32DefaultTmpSync : posixDefaultTmpSync; +//# sourceMappingURL=default-tmp.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js.map new file mode 100644 index 00000000000000..4984afb1b21290 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/default-tmp.js.map @@ -0,0 +1 @@ +{"version":3,"file":"default-tmp.js","sourceRoot":"","sources":["../../src/default-tmp.ts"],"names":[],"mappings":";;;;;;AAAA,0EAA0E;AAC1E,2EAA2E;AAC3E,0CAA0C;AAC1C,iDAAiD;AACjD,8DAA8D;AAC9D,2EAA2E;AAC3E,4EAA4E;AAC5E,4EAA4E;AAC5E,0EAA0E;AAC1E,8BAA8B;AAC9B,2BAA2B;AAC3B,+BAAqC;AACrC,mCAA4C;AAC5C,gEAAoC;AACpC,MAAM,EAAE,IAAI,EAAE,GAAG,gBAAQ,CAAA;AAEzB,MAAM,SAAS,GAAG,CAAC,IAAY,EAAE,EAAE;IACjC,IAAI,CAAC;QACH,OAAO,IAAA,gBAAQ,EAAC,IAAI,CAAC,CAAC,WAAW,EAAE,CAAA;IACrC,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,OAAO,KAAK,CAAA;IACd,CAAC;AACH,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,IAAY,EAAE,EAAE,CAC7B,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CACb,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,WAAW,EAAE,EACtB,GAAG,EAAE,CAAC,KAAK,CACZ,CAAA;AAEH,MAAM,eAAe,GAAG,KAAK,EAAE,IAAY,EAAE,EAAE;IAC7C,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,GAAG,GAAG,IAAA,WAAM,GAAE,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,IAAA,YAAK,EAAC,GAAG,CAAC,CAAA;IACpC,IAAI,IAAI,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QACjD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,MAAM,QAAQ,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACvC,IAAI,MAAM,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC1B,OAAO,QAAQ,CAAA;IACjB,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,CAAC,IAAY,EAAE,EAAE;IAC3C,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,GAAG,GAAG,IAAA,WAAM,GAAE,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,IAAA,YAAK,EAAC,GAAG,CAAC,CAAA;IACpC,IAAI,IAAI,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QACjD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,MAAM,QAAQ,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACvC,IAAI,SAAS,CAAC,QAAQ,CAAC,EAAE,CAAC;QACxB,OAAO,QAAQ,CAAA;IACjB,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE,CAAC,IAAA,WAAM,GAAE,CAAA;AAC5C,MAAM,mBAAmB,GAAG,GAAG,EAAE,CAAC,IAAA,WAAM,GAAE,CAAA;AAE7B,QAAA,UAAU,GACrB,qBAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,eAAe,CAAA;AAC7C,QAAA,cAAc,GACzB,qBAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAA","sourcesContent":["// The default temporary folder location for use in the windows algorithm.\n// It's TEMPting to use dirname(path), since that's guaranteed to be on the\n// same device. However, this means that:\n// rimraf(path).then(() => rimraf(dirname(path)))\n// will often fail with EBUSY, because the parent dir contains\n// marked-for-deletion directory entries (which do not show up in readdir).\n// The approach here is to use os.tmpdir() if it's on the same drive letter,\n// or resolve(path, '\\\\temp') if it exists, or the root of the drive if not.\n// On Posix (not that you'd be likely to use the windows algorithm there),\n// it uses os.tmpdir() always.\nimport { tmpdir } from 'os'\nimport { parse, resolve } from 'path'\nimport { promises, statSync } from './fs.js'\nimport platform from './platform.js'\nconst { stat } = promises\n\nconst isDirSync = (path: string) => {\n try {\n return statSync(path).isDirectory()\n } catch (er) {\n return false\n }\n}\n\nconst isDir = (path: string) =>\n stat(path).then(\n st => st.isDirectory(),\n () => false,\n )\n\nconst win32DefaultTmp = async (path: string) => {\n const { root } = parse(path)\n const tmp = tmpdir()\n const { root: tmpRoot } = parse(tmp)\n if (root.toLowerCase() === tmpRoot.toLowerCase()) {\n return tmp\n }\n\n const driveTmp = resolve(root, '/temp')\n if (await isDir(driveTmp)) {\n return driveTmp\n }\n\n return root\n}\n\nconst win32DefaultTmpSync = (path: string) => {\n const { root } = parse(path)\n const tmp = tmpdir()\n const { root: tmpRoot } = parse(tmp)\n if (root.toLowerCase() === tmpRoot.toLowerCase()) {\n return tmp\n }\n\n const driveTmp = resolve(root, '/temp')\n if (isDirSync(driveTmp)) {\n return driveTmp\n }\n\n return root\n}\n\nconst posixDefaultTmp = async () => tmpdir()\nconst posixDefaultTmpSync = () => tmpdir()\n\nexport const defaultTmp =\n platform === 'win32' ? win32DefaultTmp : posixDefaultTmp\nexport const defaultTmpSync =\n platform === 'win32' ? win32DefaultTmpSync : posixDefaultTmpSync\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts new file mode 100644 index 00000000000000..20e76a82c4942e --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts @@ -0,0 +1,3 @@ +export declare const fixEPERM: (fn: (path: string) => Promise) => (path: string) => Promise; +export declare const fixEPERMSync: (fn: (path: string) => any) => (path: string) => any; +//# sourceMappingURL=fix-eperm.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts.map new file mode 100644 index 00000000000000..ac17d6f4e060bb --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fix-eperm.d.ts","sourceRoot":"","sources":["../../src/fix-eperm.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,QAAQ,OACd,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,GAAG,CAAC,YAAkB,MAAM,iBAsB1D,CAAA;AAEH,eAAO,MAAM,YAAY,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,GAAG,YAAY,MAAM,QAsBvE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js new file mode 100644 index 00000000000000..7baecb7c9589bd --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js @@ -0,0 +1,58 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.fixEPERMSync = exports.fixEPERM = void 0; +const fs_js_1 = require("./fs.js"); +const { chmod } = fs_js_1.promises; +const fixEPERM = (fn) => async (path) => { + try { + return await fn(path); + } + catch (er) { + const fer = er; + if (fer?.code === 'ENOENT') { + return; + } + if (fer?.code === 'EPERM') { + try { + await chmod(path, 0o666); + } + catch (er2) { + const fer2 = er2; + if (fer2?.code === 'ENOENT') { + return; + } + throw er; + } + return await fn(path); + } + throw er; + } +}; +exports.fixEPERM = fixEPERM; +const fixEPERMSync = (fn) => (path) => { + try { + return fn(path); + } + catch (er) { + const fer = er; + if (fer?.code === 'ENOENT') { + return; + } + if (fer?.code === 'EPERM') { + try { + (0, fs_js_1.chmodSync)(path, 0o666); + } + catch (er2) { + const fer2 = er2; + if (fer2?.code === 'ENOENT') { + return; + } + throw er; + } + return fn(path); + } + throw er; + } +}; +exports.fixEPERMSync = fixEPERMSync; +//# sourceMappingURL=fix-eperm.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js.map new file mode 100644 index 00000000000000..250ea5d5b4cbc6 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fix-eperm.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fix-eperm.js","sourceRoot":"","sources":["../../src/fix-eperm.ts"],"names":[],"mappings":";;;AAAA,mCAA6C;AAC7C,MAAM,EAAE,KAAK,EAAE,GAAG,gBAAQ,CAAA;AAEnB,MAAM,QAAQ,GACnB,CAAC,EAAkC,EAAE,EAAE,CAAC,KAAK,EAAE,IAAY,EAAE,EAAE;IAC7D,IAAI,CAAC;QACH,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;IACvB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,OAAM;QACR,CAAC;QACD,IAAI,GAAG,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACH,MAAM,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAC1B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,MAAM,IAAI,GAAG,GAA4B,CAAA;gBACzC,IAAI,IAAI,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBAC5B,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;YACD,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;QACvB,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAvBU,QAAA,QAAQ,YAuBlB;AAEI,MAAM,YAAY,GAAG,CAAC,EAAyB,EAAE,EAAE,CAAC,CAAC,IAAY,EAAE,EAAE;IAC1E,IAAI,CAAC;QACH,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;IACjB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,OAAM;QACR,CAAC;QACD,IAAI,GAAG,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACH,IAAA,iBAAS,EAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YACxB,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,MAAM,IAAI,GAAG,GAA4B,CAAA;gBACzC,IAAI,IAAI,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBAC5B,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;YACD,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;QACjB,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAtBY,QAAA,YAAY,gBAsBxB","sourcesContent":["import { chmodSync, promises } from './fs.js'\nconst { chmod } = promises\n\nexport const fixEPERM =\n (fn: (path: string) => Promise) => async (path: string) => {\n try {\n return await fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOENT') {\n return\n }\n if (fer?.code === 'EPERM') {\n try {\n await chmod(path, 0o666)\n } catch (er2) {\n const fer2 = er2 as NodeJS.ErrnoException\n if (fer2?.code === 'ENOENT') {\n return\n }\n throw er\n }\n return await fn(path)\n }\n throw er\n }\n }\n\nexport const fixEPERMSync = (fn: (path: string) => any) => (path: string) => {\n try {\n return fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOENT') {\n return\n }\n if (fer?.code === 'EPERM') {\n try {\n chmodSync(path, 0o666)\n } catch (er2) {\n const fer2 = er2 as NodeJS.ErrnoException\n if (fer2?.code === 'ENOENT') {\n return\n }\n throw er\n }\n return fn(path)\n }\n throw er\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts new file mode 100644 index 00000000000000..9e4e95b4e7a411 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts @@ -0,0 +1,17 @@ +import fs, { Dirent } from 'fs'; +export { chmodSync, mkdirSync, renameSync, rmdirSync, rmSync, statSync, lstatSync, unlinkSync, } from 'fs'; +export declare const readdirSync: (path: fs.PathLike) => Dirent[]; +export declare const promises: { + chmod: (path: fs.PathLike, mode: fs.Mode) => Promise; + mkdir: (path: fs.PathLike, options?: fs.Mode | (fs.MakeDirectoryOptions & { + recursive?: boolean | null; + }) | undefined | null) => Promise; + readdir: (path: fs.PathLike) => Promise; + rename: (oldPath: fs.PathLike, newPath: fs.PathLike) => Promise; + rm: (path: fs.PathLike, options: fs.RmOptions) => Promise; + rmdir: (path: fs.PathLike) => Promise; + stat: (path: fs.PathLike) => Promise; + lstat: (path: fs.PathLike) => Promise; + unlink: (path: fs.PathLike) => Promise; +}; +//# sourceMappingURL=fs.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts.map new file mode 100644 index 00000000000000..8c8b1034cbcd27 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fs.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fs.d.ts","sourceRoot":"","sources":["../../src/fs.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,EAAE,EAAE,MAAM,EAAE,MAAM,IAAI,CAAA;AAG/B,OAAO,EACL,SAAS,EACT,SAAS,EACT,UAAU,EACV,SAAS,EACT,MAAM,EACN,QAAQ,EACR,SAAS,EACT,UAAU,GACX,MAAM,IAAI,CAAA;AAGX,eAAO,MAAM,WAAW,SAAU,EAAE,CAAC,QAAQ,KAAG,MAAM,EACf,CAAA;AA+DvC,eAAO,MAAM,QAAQ;kBAxDA,EAAE,CAAC,QAAQ,QAAQ,EAAE,CAAC,IAAI,KAAG,OAAO,CAAC,IAAI,CAAC;kBAMvD,EAAE,CAAC,QAAQ,YAEb,EAAE,CAAC,IAAI,GACP,CAAC,EAAE,CAAC,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,GAAG,IAAI,CAAA;KAAE,CAAC,GAC1D,SAAS,GACT,IAAI,KACP,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;oBAKP,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,MAAM,EAAE,CAAC;sBAO7B,EAAE,CAAC,QAAQ,WAAW,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;eAOxD,EAAE,CAAC,QAAQ,WAAW,EAAE,CAAC,SAAS,KAAG,OAAO,CAAC,IAAI,CAAC;kBAK/C,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;iBAK5B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC;kBAK9B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC;mBAK9B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;CAehD,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fs.js b/deps/npm/node_modules/rimraf/dist/commonjs/fs.js new file mode 100644 index 00000000000000..dba64c9830ed82 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fs.js @@ -0,0 +1,46 @@ +"use strict"; +// promisify ourselves, because older nodes don't have fs.promises +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.promises = exports.readdirSync = exports.unlinkSync = exports.lstatSync = exports.statSync = exports.rmSync = exports.rmdirSync = exports.renameSync = exports.mkdirSync = exports.chmodSync = void 0; +const fs_1 = __importDefault(require("fs")); +// sync ones just take the sync version from node +var fs_2 = require("fs"); +Object.defineProperty(exports, "chmodSync", { enumerable: true, get: function () { return fs_2.chmodSync; } }); +Object.defineProperty(exports, "mkdirSync", { enumerable: true, get: function () { return fs_2.mkdirSync; } }); +Object.defineProperty(exports, "renameSync", { enumerable: true, get: function () { return fs_2.renameSync; } }); +Object.defineProperty(exports, "rmdirSync", { enumerable: true, get: function () { return fs_2.rmdirSync; } }); +Object.defineProperty(exports, "rmSync", { enumerable: true, get: function () { return fs_2.rmSync; } }); +Object.defineProperty(exports, "statSync", { enumerable: true, get: function () { return fs_2.statSync; } }); +Object.defineProperty(exports, "lstatSync", { enumerable: true, get: function () { return fs_2.lstatSync; } }); +Object.defineProperty(exports, "unlinkSync", { enumerable: true, get: function () { return fs_2.unlinkSync; } }); +const fs_3 = require("fs"); +const readdirSync = (path) => (0, fs_3.readdirSync)(path, { withFileTypes: true }); +exports.readdirSync = readdirSync; +// unrolled for better inlining, this seems to get better performance +// than something like: +// const makeCb = (res, rej) => (er, ...d) => er ? rej(er) : res(...d) +// which would be a bit cleaner. +const chmod = (path, mode) => new Promise((res, rej) => fs_1.default.chmod(path, mode, (er, ...d) => (er ? rej(er) : res(...d)))); +const mkdir = (path, options) => new Promise((res, rej) => fs_1.default.mkdir(path, options, (er, made) => (er ? rej(er) : res(made)))); +const readdir = (path) => new Promise((res, rej) => fs_1.default.readdir(path, { withFileTypes: true }, (er, data) => er ? rej(er) : res(data))); +const rename = (oldPath, newPath) => new Promise((res, rej) => fs_1.default.rename(oldPath, newPath, (er, ...d) => er ? rej(er) : res(...d))); +const rm = (path, options) => new Promise((res, rej) => fs_1.default.rm(path, options, (er, ...d) => (er ? rej(er) : res(...d)))); +const rmdir = (path) => new Promise((res, rej) => fs_1.default.rmdir(path, (er, ...d) => (er ? rej(er) : res(...d)))); +const stat = (path) => new Promise((res, rej) => fs_1.default.stat(path, (er, data) => (er ? rej(er) : res(data)))); +const lstat = (path) => new Promise((res, rej) => fs_1.default.lstat(path, (er, data) => (er ? rej(er) : res(data)))); +const unlink = (path) => new Promise((res, rej) => fs_1.default.unlink(path, (er, ...d) => (er ? rej(er) : res(...d)))); +exports.promises = { + chmod, + mkdir, + readdir, + rename, + rm, + rmdir, + stat, + lstat, + unlink, +}; +//# sourceMappingURL=fs.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/fs.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/fs.js.map new file mode 100644 index 00000000000000..9d9e1fba4b2dc8 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/fs.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fs.js","sourceRoot":"","sources":["../../src/fs.ts"],"names":[],"mappings":";AAAA,kEAAkE;;;;;;AAElE,4CAA+B;AAE/B,iDAAiD;AACjD,yBASW;AART,+FAAA,SAAS,OAAA;AACT,+FAAA,SAAS,OAAA;AACT,gGAAA,UAAU,OAAA;AACV,+FAAA,SAAS,OAAA;AACT,4FAAA,MAAM,OAAA;AACN,8FAAA,QAAQ,OAAA;AACR,+FAAA,SAAS,OAAA;AACT,gGAAA,UAAU,OAAA;AAGZ,2BAA0C;AACnC,MAAM,WAAW,GAAG,CAAC,IAAiB,EAAY,EAAE,CACzD,IAAA,gBAAM,EAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAA;AAD1B,QAAA,WAAW,eACe;AAEvC,qEAAqE;AACrE,uBAAuB;AACvB,sEAAsE;AACtE,gCAAgC;AAEhC,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAE,IAAa,EAAiB,EAAE,CAChE,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACtE,CAAA;AAEH,MAAM,KAAK,GAAG,CACZ,IAAiB,EACjB,OAIQ,EACqB,EAAE,CAC/B,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAClE,CAAA;AAEH,MAAM,OAAO,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACvD,IAAI,OAAO,CAAW,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACjC,YAAE,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CACrD,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CACzB,CACF,CAAA;AAEH,MAAM,MAAM,GAAG,CAAC,OAAoB,EAAE,OAAoB,EAAiB,EAAE,CAC3E,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAC9C,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CACzB,CACF,CAAA;AAEH,MAAM,EAAE,GAAG,CAAC,IAAiB,EAAE,OAAqB,EAAiB,EAAE,CACrE,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACtE,CAAA;AAEH,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAiB,EAAE,CACjD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAChE,CAAA;AAEH,MAAM,IAAI,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACpD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CACxD,CAAA;AAEH,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACrD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CACzD,CAAA;AAEH,MAAM,MAAM,GAAG,CAAC,IAAiB,EAAiB,EAAE,CAClD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,YAAE,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACjE,CAAA;AAEU,QAAA,QAAQ,GAAG;IACtB,KAAK;IACL,KAAK;IACL,OAAO;IACP,MAAM;IACN,EAAE;IACF,KAAK;IACL,IAAI;IACJ,KAAK;IACL,MAAM;CACP,CAAA","sourcesContent":["// promisify ourselves, because older nodes don't have fs.promises\n\nimport fs, { Dirent } from 'fs'\n\n// sync ones just take the sync version from node\nexport {\n chmodSync,\n mkdirSync,\n renameSync,\n rmdirSync,\n rmSync,\n statSync,\n lstatSync,\n unlinkSync,\n} from 'fs'\n\nimport { readdirSync as rdSync } from 'fs'\nexport const readdirSync = (path: fs.PathLike): Dirent[] =>\n rdSync(path, { withFileTypes: true })\n\n// unrolled for better inlining, this seems to get better performance\n// than something like:\n// const makeCb = (res, rej) => (er, ...d) => er ? rej(er) : res(...d)\n// which would be a bit cleaner.\n\nconst chmod = (path: fs.PathLike, mode: fs.Mode): Promise =>\n new Promise((res, rej) =>\n fs.chmod(path, mode, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst mkdir = (\n path: fs.PathLike,\n options?:\n | fs.Mode\n | (fs.MakeDirectoryOptions & { recursive?: boolean | null })\n | undefined\n | null,\n): Promise =>\n new Promise((res, rej) =>\n fs.mkdir(path, options, (er, made) => (er ? rej(er) : res(made))),\n )\n\nconst readdir = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.readdir(path, { withFileTypes: true }, (er, data) =>\n er ? rej(er) : res(data),\n ),\n )\n\nconst rename = (oldPath: fs.PathLike, newPath: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.rename(oldPath, newPath, (er, ...d: any[]) =>\n er ? rej(er) : res(...d),\n ),\n )\n\nconst rm = (path: fs.PathLike, options: fs.RmOptions): Promise =>\n new Promise((res, rej) =>\n fs.rm(path, options, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst rmdir = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.rmdir(path, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst stat = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.stat(path, (er, data) => (er ? rej(er) : res(data))),\n )\n\nconst lstat = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.lstat(path, (er, data) => (er ? rej(er) : res(data))),\n )\n\nconst unlink = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.unlink(path, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nexport const promises = {\n chmod,\n mkdir,\n readdir,\n rename,\n rm,\n rmdir,\n stat,\n lstat,\n unlink,\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts new file mode 100644 index 00000000000000..f158cc27025b16 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts @@ -0,0 +1,3 @@ +export declare const ignoreENOENT: (p: Promise) => Promise; +export declare const ignoreENOENTSync: (fn: () => any) => any; +//# sourceMappingURL=ignore-enoent.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts.map new file mode 100644 index 00000000000000..2cfb3bbac5fab7 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore-enoent.d.ts","sourceRoot":"","sources":["../../src/ignore-enoent.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,YAAY,MAAa,OAAO,CAAC,GAAG,CAAC,iBAK9C,CAAA;AAEJ,eAAO,MAAM,gBAAgB,OAAQ,MAAM,GAAG,QAQ7C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js new file mode 100644 index 00000000000000..02595342121f79 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js @@ -0,0 +1,21 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.ignoreENOENTSync = exports.ignoreENOENT = void 0; +const ignoreENOENT = async (p) => p.catch(er => { + if (er.code !== 'ENOENT') { + throw er; + } +}); +exports.ignoreENOENT = ignoreENOENT; +const ignoreENOENTSync = (fn) => { + try { + return fn(); + } + catch (er) { + if (er?.code !== 'ENOENT') { + throw er; + } + } +}; +exports.ignoreENOENTSync = ignoreENOENTSync; +//# sourceMappingURL=ignore-enoent.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js.map new file mode 100644 index 00000000000000..7acf4c29d1e56f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/ignore-enoent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore-enoent.js","sourceRoot":"","sources":["../../src/ignore-enoent.ts"],"names":[],"mappings":";;;AAAO,MAAM,YAAY,GAAG,KAAK,EAAE,CAAe,EAAE,EAAE,CACpD,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;IACX,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;QACzB,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAC,CAAA;AALS,QAAA,YAAY,gBAKrB;AAEG,MAAM,gBAAgB,GAAG,CAAC,EAAa,EAAE,EAAE;IAChD,IAAI,CAAC;QACH,OAAO,EAAE,EAAE,CAAA;IACb,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YACrD,MAAM,EAAE,CAAA;QACV,CAAC;IACH,CAAC;AACH,CAAC,CAAA;AARY,QAAA,gBAAgB,oBAQ5B","sourcesContent":["export const ignoreENOENT = async (p: Promise) =>\n p.catch(er => {\n if (er.code !== 'ENOENT') {\n throw er\n }\n })\n\nexport const ignoreENOENTSync = (fn: () => any) => {\n try {\n return fn()\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code !== 'ENOENT') {\n throw er\n }\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts new file mode 100644 index 00000000000000..9ec4a124ab613d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts @@ -0,0 +1,50 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './opt-arg.js'; +export { assertRimrafOptions, isRimrafOptions, type RimrafAsyncOptions, type RimrafOptions, type RimrafSyncOptions, } from './opt-arg.js'; +export declare const nativeSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const native: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const manualSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const manual: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const windowsSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const windows: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const posixSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const posix: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const moveRemoveSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const moveRemove: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const rimrafSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const rimraf: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + rimraf: (path: string | string[], opt?: RimrafAsyncOptions) => Promise; + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + rimrafSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + manual: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + manualSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + native: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + nativeSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + posix: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + posixSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + windows: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + windowsSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + moveRemove: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + moveRemoveSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts.map new file mode 100644 index 00000000000000..0dc659ca730252 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAGL,kBAAkB,EAClB,iBAAiB,EAClB,MAAM,cAAc,CAAA;AASrB,OAAO,EACL,mBAAmB,EACnB,eAAe,EACf,KAAK,kBAAkB,EACvB,KAAK,aAAa,EAClB,KAAK,iBAAiB,GACvB,MAAM,cAAc,CAAA;AAqCrB,eAAO,MAAM,UAAU,SAdd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAcF,CAAA;AACpD,eAAO,MAAM,MAAM,UAjCT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAegB,CAAA;AAE7E,eAAO,MAAM,UAAU,SAjBd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAiBF,CAAA;AACpD,eAAO,MAAM,MAAM,UApCT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAkBgB,CAAA;AAE7E,eAAO,MAAM,WAAW,SApBf,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAoBA,CAAA;AACtD,eAAO,MAAM,OAAO,UAvCV,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAqBmB,CAAA;AAEhF,eAAO,MAAM,SAAS,SAvBb,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAuBJ,CAAA;AAClD,eAAO,MAAM,KAAK,UA1CR,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAwBa,CAAA;AAE1E,eAAO,MAAM,cAAc,SA1BlB,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OA0BM,CAAA;AAC5D,eAAO,MAAM,UAAU,UA7Cb,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CA6B3D,CAAA;AAEF,eAAO,MAAM,UAAU,SA/Bd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAmCrD,CAAA;AACD,eAAO,MAAM,IAAI,SApCR,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAoCxB,CAAA;AAK9B,eAAO,MAAM,MAAM,UA3DT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;mBAFX,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;oBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;oBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;mBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;sBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;qBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;wBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;wBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;2BAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAuD3D,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/index.js b/deps/npm/node_modules/rimraf/dist/commonjs/index.js new file mode 100644 index 00000000000000..09b5d9993c1e78 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/index.js @@ -0,0 +1,78 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rimraf = exports.sync = exports.rimrafSync = exports.moveRemove = exports.moveRemoveSync = exports.posix = exports.posixSync = exports.windows = exports.windowsSync = exports.manual = exports.manualSync = exports.native = exports.nativeSync = exports.isRimrafOptions = exports.assertRimrafOptions = void 0; +const glob_1 = require("glob"); +const opt_arg_js_1 = require("./opt-arg.js"); +const path_arg_js_1 = __importDefault(require("./path-arg.js")); +const rimraf_manual_js_1 = require("./rimraf-manual.js"); +const rimraf_move_remove_js_1 = require("./rimraf-move-remove.js"); +const rimraf_native_js_1 = require("./rimraf-native.js"); +const rimraf_posix_js_1 = require("./rimraf-posix.js"); +const rimraf_windows_js_1 = require("./rimraf-windows.js"); +const use_native_js_1 = require("./use-native.js"); +var opt_arg_js_2 = require("./opt-arg.js"); +Object.defineProperty(exports, "assertRimrafOptions", { enumerable: true, get: function () { return opt_arg_js_2.assertRimrafOptions; } }); +Object.defineProperty(exports, "isRimrafOptions", { enumerable: true, get: function () { return opt_arg_js_2.isRimrafOptions; } }); +const wrap = (fn) => async (path, opt) => { + const options = (0, opt_arg_js_1.optArg)(opt); + if (options.glob) { + path = await (0, glob_1.glob)(path, options.glob); + } + if (Array.isArray(path)) { + return !!(await Promise.all(path.map(p => fn((0, path_arg_js_1.default)(p, options), options)))).reduce((a, b) => a && b, true); + } + else { + return !!(await fn((0, path_arg_js_1.default)(path, options), options)); + } +}; +const wrapSync = (fn) => (path, opt) => { + const options = (0, opt_arg_js_1.optArgSync)(opt); + if (options.glob) { + path = (0, glob_1.globSync)(path, options.glob); + } + if (Array.isArray(path)) { + return !!path + .map(p => fn((0, path_arg_js_1.default)(p, options), options)) + .reduce((a, b) => a && b, true); + } + else { + return !!fn((0, path_arg_js_1.default)(path, options), options); + } +}; +exports.nativeSync = wrapSync(rimraf_native_js_1.rimrafNativeSync); +exports.native = Object.assign(wrap(rimraf_native_js_1.rimrafNative), { sync: exports.nativeSync }); +exports.manualSync = wrapSync(rimraf_manual_js_1.rimrafManualSync); +exports.manual = Object.assign(wrap(rimraf_manual_js_1.rimrafManual), { sync: exports.manualSync }); +exports.windowsSync = wrapSync(rimraf_windows_js_1.rimrafWindowsSync); +exports.windows = Object.assign(wrap(rimraf_windows_js_1.rimrafWindows), { sync: exports.windowsSync }); +exports.posixSync = wrapSync(rimraf_posix_js_1.rimrafPosixSync); +exports.posix = Object.assign(wrap(rimraf_posix_js_1.rimrafPosix), { sync: exports.posixSync }); +exports.moveRemoveSync = wrapSync(rimraf_move_remove_js_1.rimrafMoveRemoveSync); +exports.moveRemove = Object.assign(wrap(rimraf_move_remove_js_1.rimrafMoveRemove), { + sync: exports.moveRemoveSync, +}); +exports.rimrafSync = wrapSync((path, opt) => (0, use_native_js_1.useNativeSync)(opt) ? + (0, rimraf_native_js_1.rimrafNativeSync)(path, opt) + : (0, rimraf_manual_js_1.rimrafManualSync)(path, opt)); +exports.sync = exports.rimrafSync; +const rimraf_ = wrap((path, opt) => (0, use_native_js_1.useNative)(opt) ? (0, rimraf_native_js_1.rimrafNative)(path, opt) : (0, rimraf_manual_js_1.rimrafManual)(path, opt)); +exports.rimraf = Object.assign(rimraf_, { + rimraf: rimraf_, + sync: exports.rimrafSync, + rimrafSync: exports.rimrafSync, + manual: exports.manual, + manualSync: exports.manualSync, + native: exports.native, + nativeSync: exports.nativeSync, + posix: exports.posix, + posixSync: exports.posixSync, + windows: exports.windows, + windowsSync: exports.windowsSync, + moveRemove: exports.moveRemove, + moveRemoveSync: exports.moveRemoveSync, +}); +exports.rimraf.rimraf = exports.rimraf; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/index.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/index.js.map new file mode 100644 index 00000000000000..5ed1978ae92f1f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":";;;;;;AAAA,+BAAqC;AACrC,6CAKqB;AACrB,gEAAmC;AACnC,yDAAmE;AACnE,mEAAgF;AAChF,yDAAmE;AACnE,uDAAgE;AAChE,2DAAsE;AACtE,mDAA0D;AAE1D,2CAMqB;AALnB,iHAAA,mBAAmB,OAAA;AACnB,6GAAA,eAAe,OAAA;AAMjB,MAAM,IAAI,GACR,CAAC,EAA0D,EAAE,EAAE,CAC/D,KAAK,EACH,IAAuB,EACvB,GAAwB,EACN,EAAE;IACpB,MAAM,OAAO,GAAG,IAAA,mBAAM,EAAC,GAAG,CAAC,CAAA;IAC3B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;QACjB,IAAI,GAAG,MAAM,IAAA,WAAI,EAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACvC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,OAAO,CAAC,CAAC,CACP,MAAM,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAA,qBAAO,EAAC,CAAC,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC,CACnE,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAClC,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,IAAA,qBAAO,EAAC,IAAI,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,CAAA;IACtD,CAAC;AACH,CAAC,CAAA;AAEH,MAAM,QAAQ,GACZ,CAAC,EAAgD,EAAE,EAAE,CACrD,CAAC,IAAuB,EAAE,GAAuB,EAAW,EAAE;IAC5D,MAAM,OAAO,GAAG,IAAA,uBAAU,EAAC,GAAG,CAAC,CAAA;IAC/B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;QACjB,IAAI,GAAG,IAAA,eAAQ,EAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACrC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,OAAO,CAAC,CAAC,IAAI;aACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,IAAA,qBAAO,EAAC,CAAC,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC;aAC1C,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IACnC,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,EAAE,CAAC,IAAA,qBAAO,EAAC,IAAI,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAA;IAC9C,CAAC;AACH,CAAC,CAAA;AAEU,QAAA,UAAU,GAAG,QAAQ,CAAC,mCAAgB,CAAC,CAAA;AACvC,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,+BAAY,CAAC,EAAE,EAAE,IAAI,EAAE,kBAAU,EAAE,CAAC,CAAA;AAEhE,QAAA,UAAU,GAAG,QAAQ,CAAC,mCAAgB,CAAC,CAAA;AACvC,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,+BAAY,CAAC,EAAE,EAAE,IAAI,EAAE,kBAAU,EAAE,CAAC,CAAA;AAEhE,QAAA,WAAW,GAAG,QAAQ,CAAC,qCAAiB,CAAC,CAAA;AACzC,QAAA,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,iCAAa,CAAC,EAAE,EAAE,IAAI,EAAE,mBAAW,EAAE,CAAC,CAAA;AAEnE,QAAA,SAAS,GAAG,QAAQ,CAAC,iCAAe,CAAC,CAAA;AACrC,QAAA,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,6BAAW,CAAC,EAAE,EAAE,IAAI,EAAE,iBAAS,EAAE,CAAC,CAAA;AAE7D,QAAA,cAAc,GAAG,QAAQ,CAAC,4CAAoB,CAAC,CAAA;AAC/C,QAAA,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,wCAAgB,CAAC,EAAE;IAC9D,IAAI,EAAE,sBAAc;CACrB,CAAC,CAAA;AAEW,QAAA,UAAU,GAAG,QAAQ,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CAC/C,IAAA,6BAAa,EAAC,GAAG,CAAC,CAAC,CAAC;IAClB,IAAA,mCAAgB,EAAC,IAAI,EAAE,GAAG,CAAC;IAC7B,CAAC,CAAC,IAAA,mCAAgB,EAAC,IAAI,EAAE,GAAG,CAAC,CAC9B,CAAA;AACY,QAAA,IAAI,GAAG,kBAAU,CAAA;AAE9B,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CACjC,IAAA,yBAAS,EAAC,GAAG,CAAC,CAAC,CAAC,CAAC,IAAA,+BAAY,EAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,IAAA,+BAAY,EAAC,IAAI,EAAE,GAAG,CAAC,CACnE,CAAA;AACY,QAAA,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE;IAC3C,MAAM,EAAE,OAAO;IACf,IAAI,EAAE,kBAAU;IAChB,UAAU,EAAE,kBAAU;IACtB,MAAM,EAAN,cAAM;IACN,UAAU,EAAV,kBAAU;IACV,MAAM,EAAN,cAAM;IACN,UAAU,EAAV,kBAAU;IACV,KAAK,EAAL,aAAK;IACL,SAAS,EAAT,iBAAS;IACT,OAAO,EAAP,eAAO;IACP,WAAW,EAAX,mBAAW;IACX,UAAU,EAAV,kBAAU;IACV,cAAc,EAAd,sBAAc;CACf,CAAC,CAAA;AACF,cAAM,CAAC,MAAM,GAAG,cAAM,CAAA","sourcesContent":["import { glob, globSync } from 'glob'\nimport {\n optArg,\n optArgSync,\n RimrafAsyncOptions,\n RimrafSyncOptions,\n} from './opt-arg.js'\nimport pathArg from './path-arg.js'\nimport { rimrafManual, rimrafManualSync } from './rimraf-manual.js'\nimport { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'\nimport { rimrafNative, rimrafNativeSync } from './rimraf-native.js'\nimport { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'\nimport { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'\nimport { useNative, useNativeSync } from './use-native.js'\n\nexport {\n assertRimrafOptions,\n isRimrafOptions,\n type RimrafAsyncOptions,\n type RimrafOptions,\n type RimrafSyncOptions,\n} from './opt-arg.js'\n\nconst wrap =\n (fn: (p: string, o: RimrafAsyncOptions) => Promise) =>\n async (\n path: string | string[],\n opt?: RimrafAsyncOptions,\n ): Promise => {\n const options = optArg(opt)\n if (options.glob) {\n path = await glob(path, options.glob)\n }\n if (Array.isArray(path)) {\n return !!(\n await Promise.all(path.map(p => fn(pathArg(p, options), options)))\n ).reduce((a, b) => a && b, true)\n } else {\n return !!(await fn(pathArg(path, options), options))\n }\n }\n\nconst wrapSync =\n (fn: (p: string, o: RimrafSyncOptions) => boolean) =>\n (path: string | string[], opt?: RimrafSyncOptions): boolean => {\n const options = optArgSync(opt)\n if (options.glob) {\n path = globSync(path, options.glob)\n }\n if (Array.isArray(path)) {\n return !!path\n .map(p => fn(pathArg(p, options), options))\n .reduce((a, b) => a && b, true)\n } else {\n return !!fn(pathArg(path, options), options)\n }\n }\n\nexport const nativeSync = wrapSync(rimrafNativeSync)\nexport const native = Object.assign(wrap(rimrafNative), { sync: nativeSync })\n\nexport const manualSync = wrapSync(rimrafManualSync)\nexport const manual = Object.assign(wrap(rimrafManual), { sync: manualSync })\n\nexport const windowsSync = wrapSync(rimrafWindowsSync)\nexport const windows = Object.assign(wrap(rimrafWindows), { sync: windowsSync })\n\nexport const posixSync = wrapSync(rimrafPosixSync)\nexport const posix = Object.assign(wrap(rimrafPosix), { sync: posixSync })\n\nexport const moveRemoveSync = wrapSync(rimrafMoveRemoveSync)\nexport const moveRemove = Object.assign(wrap(rimrafMoveRemove), {\n sync: moveRemoveSync,\n})\n\nexport const rimrafSync = wrapSync((path, opt) =>\n useNativeSync(opt) ?\n rimrafNativeSync(path, opt)\n : rimrafManualSync(path, opt),\n)\nexport const sync = rimrafSync\n\nconst rimraf_ = wrap((path, opt) =>\n useNative(opt) ? rimrafNative(path, opt) : rimrafManual(path, opt),\n)\nexport const rimraf = Object.assign(rimraf_, {\n rimraf: rimraf_,\n sync: rimrafSync,\n rimrafSync: rimrafSync,\n manual,\n manualSync,\n native,\n nativeSync,\n posix,\n posixSync,\n windows,\n windowsSync,\n moveRemove,\n moveRemoveSync,\n})\nrimraf.rimraf = rimraf\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts new file mode 100644 index 00000000000000..c869d4ae85251b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts @@ -0,0 +1,34 @@ +import { Dirent, Stats } from 'fs'; +import { GlobOptions } from 'glob'; +export declare const isRimrafOptions: (o: any) => o is RimrafOptions; +export declare const assertRimrafOptions: (o: any) => void; +export interface RimrafAsyncOptions { + preserveRoot?: boolean; + tmp?: string; + maxRetries?: number; + retryDelay?: number; + backoff?: number; + maxBackoff?: number; + signal?: AbortSignal; + glob?: boolean | GlobOptions; + filter?: ((path: string, ent: Dirent | Stats) => boolean) | ((path: string, ent: Dirent | Stats) => Promise); +} +export interface RimrafSyncOptions extends RimrafAsyncOptions { + filter?: (path: string, ent: Dirent | Stats) => boolean; +} +export type RimrafOptions = RimrafSyncOptions | RimrafAsyncOptions; +export declare const optArg: (opt?: RimrafAsyncOptions) => (RimrafAsyncOptions & { + glob: GlobOptions & { + withFileTypes: false; + }; +}) | (RimrafAsyncOptions & { + glob: undefined; +}); +export declare const optArgSync: (opt?: RimrafSyncOptions) => (RimrafSyncOptions & { + glob: GlobOptions & { + withFileTypes: false; + }; +}) | (RimrafSyncOptions & { + glob: undefined; +}); +//# sourceMappingURL=opt-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts.map new file mode 100644 index 00000000000000..89e83b205ac628 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"opt-arg.d.ts","sourceRoot":"","sources":["../../src/opt-arg.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,IAAI,CAAA;AAClC,OAAO,EAAE,WAAW,EAAE,MAAM,MAAM,CAAA;AAKlC,eAAO,MAAM,eAAe,MAAO,GAAG,KAAG,CAAC,IAAI,aAUX,CAAA;AAEnC,eAAO,MAAM,mBAAmB,EAAE,CAAC,CAAC,EAAE,GAAG,KAAK,IAM7C,CAAA;AAED,MAAM,WAAW,kBAAkB;IACjC,YAAY,CAAC,EAAE,OAAO,CAAA;IACtB,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,IAAI,CAAC,EAAE,OAAO,GAAG,WAAW,CAAA;IAC5B,MAAM,CAAC,EACH,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAC,GAChD,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC,CAAA;CAC9D;AAED,MAAM,WAAW,iBAAkB,SAAQ,kBAAkB;IAC3D,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAA;CACxD;AAED,MAAM,MAAM,aAAa,GAAG,iBAAiB,GAAG,kBAAkB,CAAA;AAqClE,eAAO,MAAM,MAAM,SAAS,kBAAkB;UA/BlC,WAAW,GAAG;QAAE,aAAa,EAAE,KAAK,CAAA;KAAE;;UAEjC,SAAS;EA6B0C,CAAA;AACpE,eAAO,MAAM,UAAU,SAAS,iBAAiB;UAhCrC,WAAW,GAAG;QAAE,aAAa,EAAE,KAAK,CAAA;KAAE;;UAEjC,SAAS;EA8B6C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js new file mode 100644 index 00000000000000..1d030a16d3c0f0 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js @@ -0,0 +1,53 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.optArgSync = exports.optArg = exports.assertRimrafOptions = exports.isRimrafOptions = void 0; +const typeOrUndef = (val, t) => typeof val === 'undefined' || typeof val === t; +const isRimrafOptions = (o) => !!o && + typeof o === 'object' && + typeOrUndef(o.preserveRoot, 'boolean') && + typeOrUndef(o.tmp, 'string') && + typeOrUndef(o.maxRetries, 'number') && + typeOrUndef(o.retryDelay, 'number') && + typeOrUndef(o.backoff, 'number') && + typeOrUndef(o.maxBackoff, 'number') && + (typeOrUndef(o.glob, 'boolean') || (o.glob && typeof o.glob === 'object')) && + typeOrUndef(o.filter, 'function'); +exports.isRimrafOptions = isRimrafOptions; +const assertRimrafOptions = (o) => { + if (!(0, exports.isRimrafOptions)(o)) { + throw new Error('invalid rimraf options'); + } +}; +exports.assertRimrafOptions = assertRimrafOptions; +const optArgT = (opt) => { + (0, exports.assertRimrafOptions)(opt); + const { glob, ...options } = opt; + if (!glob) { + return options; + } + const globOpt = glob === true ? + opt.signal ? + { signal: opt.signal } + : {} + : opt.signal ? + { + signal: opt.signal, + ...glob, + } + : glob; + return { + ...options, + glob: { + ...globOpt, + // always get absolute paths from glob, to ensure + // that we are referencing the correct thing. + absolute: true, + withFileTypes: false, + }, + }; +}; +const optArg = (opt = {}) => optArgT(opt); +exports.optArg = optArg; +const optArgSync = (opt = {}) => optArgT(opt); +exports.optArgSync = optArgSync; +//# sourceMappingURL=opt-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js.map new file mode 100644 index 00000000000000..d815735d639a46 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/opt-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"opt-arg.js","sourceRoot":"","sources":["../../src/opt-arg.ts"],"names":[],"mappings":";;;AAGA,MAAM,WAAW,GAAG,CAAC,GAAQ,EAAE,CAAS,EAAE,EAAE,CAC1C,OAAO,GAAG,KAAK,WAAW,IAAI,OAAO,GAAG,KAAK,CAAC,CAAA;AAEzC,MAAM,eAAe,GAAG,CAAC,CAAM,EAAsB,EAAE,CAC5D,CAAC,CAAC,CAAC;IACH,OAAO,CAAC,KAAK,QAAQ;IACrB,WAAW,CAAC,CAAC,CAAC,YAAY,EAAE,SAAS,CAAC;IACtC,WAAW,CAAC,CAAC,CAAC,GAAG,EAAE,QAAQ,CAAC;IAC5B,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,WAAW,CAAC,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC;IAChC,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;IAC1E,WAAW,CAAC,CAAC,CAAC,MAAM,EAAE,UAAU,CAAC,CAAA;AAVtB,QAAA,eAAe,mBAUO;AAE5B,MAAM,mBAAmB,GAAqB,CACnD,CAAM,EACsB,EAAE;IAC9B,IAAI,CAAC,IAAA,uBAAe,EAAC,CAAC,CAAC,EAAE,CAAC;QACxB,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAA;IAC3C,CAAC;AACH,CAAC,CAAA;AANY,QAAA,mBAAmB,uBAM/B;AAsBD,MAAM,OAAO,GAAG,CACd,GAAM,EAKsB,EAAE;IAC9B,IAAA,2BAAmB,EAAC,GAAG,CAAC,CAAA;IACxB,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAChC,IAAI,CAAC,IAAI,EAAE,CAAC;QACV,OAAO,OAAkC,CAAA;IAC3C,CAAC;IACD,MAAM,OAAO,GACX,IAAI,KAAK,IAAI,CAAC,CAAC;QACb,GAAG,CAAC,MAAM,CAAC,CAAC;YACV,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,EAAE;YACxB,CAAC,CAAC,EAAE;QACN,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YACZ;gBACE,MAAM,EAAE,GAAG,CAAC,MAAM;gBAClB,GAAG,IAAI;aACR;YACH,CAAC,CAAC,IAAI,CAAA;IACR,OAAO;QACL,GAAG,OAAO;QACV,IAAI,EAAE;YACJ,GAAG,OAAO;YACV,iDAAiD;YACjD,6CAA6C;YAC7C,QAAQ,EAAE,IAAI;YACd,aAAa,EAAE,KAAK;SACrB;KACsD,CAAA;AAC3D,CAAC,CAAA;AAEM,MAAM,MAAM,GAAG,CAAC,MAA0B,EAAE,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;AAAvD,QAAA,MAAM,UAAiD;AAC7D,MAAM,UAAU,GAAG,CAAC,MAAyB,EAAE,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;AAA1D,QAAA,UAAU,cAAgD","sourcesContent":["import { Dirent, Stats } from 'fs'\nimport { GlobOptions } from 'glob'\n\nconst typeOrUndef = (val: any, t: string) =>\n typeof val === 'undefined' || typeof val === t\n\nexport const isRimrafOptions = (o: any): o is RimrafOptions =>\n !!o &&\n typeof o === 'object' &&\n typeOrUndef(o.preserveRoot, 'boolean') &&\n typeOrUndef(o.tmp, 'string') &&\n typeOrUndef(o.maxRetries, 'number') &&\n typeOrUndef(o.retryDelay, 'number') &&\n typeOrUndef(o.backoff, 'number') &&\n typeOrUndef(o.maxBackoff, 'number') &&\n (typeOrUndef(o.glob, 'boolean') || (o.glob && typeof o.glob === 'object')) &&\n typeOrUndef(o.filter, 'function')\n\nexport const assertRimrafOptions: (o: any) => void = (\n o: any,\n): asserts o is RimrafOptions => {\n if (!isRimrafOptions(o)) {\n throw new Error('invalid rimraf options')\n }\n}\n\nexport interface RimrafAsyncOptions {\n preserveRoot?: boolean\n tmp?: string\n maxRetries?: number\n retryDelay?: number\n backoff?: number\n maxBackoff?: number\n signal?: AbortSignal\n glob?: boolean | GlobOptions\n filter?:\n | ((path: string, ent: Dirent | Stats) => boolean)\n | ((path: string, ent: Dirent | Stats) => Promise)\n}\n\nexport interface RimrafSyncOptions extends RimrafAsyncOptions {\n filter?: (path: string, ent: Dirent | Stats) => boolean\n}\n\nexport type RimrafOptions = RimrafSyncOptions | RimrafAsyncOptions\n\nconst optArgT = (\n opt: T,\n):\n | (T & {\n glob: GlobOptions & { withFileTypes: false }\n })\n | (T & { glob: undefined }) => {\n assertRimrafOptions(opt)\n const { glob, ...options } = opt\n if (!glob) {\n return options as T & { glob: undefined }\n }\n const globOpt =\n glob === true ?\n opt.signal ?\n { signal: opt.signal }\n : {}\n : opt.signal ?\n {\n signal: opt.signal,\n ...glob,\n }\n : glob\n return {\n ...options,\n glob: {\n ...globOpt,\n // always get absolute paths from glob, to ensure\n // that we are referencing the correct thing.\n absolute: true,\n withFileTypes: false,\n },\n } as T & { glob: GlobOptions & { withFileTypes: false } }\n}\n\nexport const optArg = (opt: RimrafAsyncOptions = {}) => optArgT(opt)\nexport const optArgSync = (opt: RimrafSyncOptions = {}) => optArgT(opt)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/package.json b/deps/npm/node_modules/rimraf/dist/commonjs/package.json new file mode 100644 index 00000000000000..5bbefffbabee39 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/package.json @@ -0,0 +1,3 @@ +{ + "type": "commonjs" +} diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts new file mode 100644 index 00000000000000..c0b7e7cb4b15e3 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions } from './index.js'; +declare const pathArg: (path: string, opt?: RimrafAsyncOptions) => string; +export default pathArg; +//# sourceMappingURL=path-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts.map new file mode 100644 index 00000000000000..4fe93c3a8aec47 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.d.ts","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAA;AAG/C,QAAA,MAAM,OAAO,SAAU,MAAM,QAAO,kBAAkB,WAgDrD,CAAA;AAED,eAAe,OAAO,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js new file mode 100644 index 00000000000000..8a4908aa08ef50 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js @@ -0,0 +1,52 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +const path_1 = require("path"); +const util_1 = require("util"); +const platform_js_1 = __importDefault(require("./platform.js")); +const pathArg = (path, opt = {}) => { + const type = typeof path; + if (type !== 'string') { + const ctor = path && type === 'object' && path.constructor; + const received = ctor && ctor.name ? `an instance of ${ctor.name}` + : type === 'object' ? (0, util_1.inspect)(path) + : `type ${type} ${path}`; + const msg = 'The "path" argument must be of type string. ' + `Received ${received}`; + throw Object.assign(new TypeError(msg), { + path, + code: 'ERR_INVALID_ARG_TYPE', + }); + } + if (/\0/.test(path)) { + // simulate same failure that node raises + const msg = 'path must be a string without null bytes'; + throw Object.assign(new TypeError(msg), { + path, + code: 'ERR_INVALID_ARG_VALUE', + }); + } + path = (0, path_1.resolve)(path); + const { root } = (0, path_1.parse)(path); + if (path === root && opt.preserveRoot !== false) { + const msg = 'refusing to remove root directory without preserveRoot:false'; + throw Object.assign(new Error(msg), { + path, + code: 'ERR_PRESERVE_ROOT', + }); + } + if (platform_js_1.default === 'win32') { + const badWinChars = /[*|"<>?:]/; + const { root } = (0, path_1.parse)(path); + if (badWinChars.test(path.substring(root.length))) { + throw Object.assign(new Error('Illegal characters in path.'), { + path, + code: 'EINVAL', + }); + } + } + return path; +}; +exports.default = pathArg; +//# sourceMappingURL=path-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js.map new file mode 100644 index 00000000000000..40e4a19e7003a0 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/path-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.js","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":";;;;;AAAA,+BAAqC;AACrC,+BAA8B;AAE9B,gEAAoC;AAEpC,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,MAA0B,EAAE,EAAE,EAAE;IAC7D,MAAM,IAAI,GAAG,OAAO,IAAI,CAAA;IACxB,IAAI,IAAI,KAAK,QAAQ,EAAE,CAAC;QACtB,MAAM,IAAI,GAAG,IAAI,IAAI,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,WAAW,CAAA;QAC1D,MAAM,QAAQ,GACZ,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,kBAAkB,IAAI,CAAC,IAAI,EAAE;YACjD,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,IAAA,cAAO,EAAC,IAAI,CAAC;gBACnC,CAAC,CAAC,QAAQ,IAAI,IAAI,IAAI,EAAE,CAAA;QAC1B,MAAM,GAAG,GACP,8CAA8C,GAAG,YAAY,QAAQ,EAAE,CAAA;QACzE,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;YACtC,IAAI;YACJ,IAAI,EAAE,sBAAsB;SAC7B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;QACpB,yCAAyC;QACzC,MAAM,GAAG,GAAG,0CAA0C,CAAA;QACtD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;YACtC,IAAI;YACJ,IAAI,EAAE,uBAAuB;SAC9B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,GAAG,IAAA,cAAO,EAAC,IAAI,CAAC,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;IAE5B,IAAI,IAAI,KAAK,IAAI,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,EAAE,CAAC;QAChD,MAAM,GAAG,GAAG,8DAA8D,CAAA;QAC1E,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,GAAG,CAAC,EAAE;YAClC,IAAI;YACJ,IAAI,EAAE,mBAAmB;SAC1B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,qBAAQ,KAAK,OAAO,EAAE,CAAC;QACzB,MAAM,WAAW,GAAG,WAAW,CAAA;QAC/B,MAAM,EAAE,IAAI,EAAE,GAAG,IAAA,YAAK,EAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC;YAClD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,IAAI;gBACJ,IAAI,EAAE,QAAQ;aACf,CAAC,CAAA;QACJ,CAAC;IACH,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,kBAAe,OAAO,CAAA","sourcesContent":["import { parse, resolve } from 'path'\nimport { inspect } from 'util'\nimport { RimrafAsyncOptions } from './index.js'\nimport platform from './platform.js'\n\nconst pathArg = (path: string, opt: RimrafAsyncOptions = {}) => {\n const type = typeof path\n if (type !== 'string') {\n const ctor = path && type === 'object' && path.constructor\n const received =\n ctor && ctor.name ? `an instance of ${ctor.name}`\n : type === 'object' ? inspect(path)\n : `type ${type} ${path}`\n const msg =\n 'The \"path\" argument must be of type string. ' + `Received ${received}`\n throw Object.assign(new TypeError(msg), {\n path,\n code: 'ERR_INVALID_ARG_TYPE',\n })\n }\n\n if (/\\0/.test(path)) {\n // simulate same failure that node raises\n const msg = 'path must be a string without null bytes'\n throw Object.assign(new TypeError(msg), {\n path,\n code: 'ERR_INVALID_ARG_VALUE',\n })\n }\n\n path = resolve(path)\n const { root } = parse(path)\n\n if (path === root && opt.preserveRoot !== false) {\n const msg = 'refusing to remove root directory without preserveRoot:false'\n throw Object.assign(new Error(msg), {\n path,\n code: 'ERR_PRESERVE_ROOT',\n })\n }\n\n if (platform === 'win32') {\n const badWinChars = /[*|\"<>?:]/\n const { root } = parse(path)\n if (badWinChars.test(path.substring(root.length))) {\n throw Object.assign(new Error('Illegal characters in path.'), {\n path,\n code: 'EINVAL',\n })\n }\n }\n\n return path\n}\n\nexport default pathArg\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts new file mode 100644 index 00000000000000..e127a8e529ffd2 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts @@ -0,0 +1,3 @@ +declare const _default: string; +export default _default; +//# sourceMappingURL=platform.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts.map new file mode 100644 index 00000000000000..ef2e6734f8cfbb --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/platform.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"platform.d.ts","sourceRoot":"","sources":["../../src/platform.ts"],"names":[],"mappings":";AAAA,wBAA0E"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/platform.js b/deps/npm/node_modules/rimraf/dist/commonjs/platform.js new file mode 100644 index 00000000000000..58f197ffbf8249 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/platform.js @@ -0,0 +1,4 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.default = process.env.__TESTING_RIMRAF_PLATFORM__ || process.platform; +//# sourceMappingURL=platform.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/platform.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/platform.js.map new file mode 100644 index 00000000000000..814cdb8c244c57 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/platform.js.map @@ -0,0 +1 @@ +{"version":3,"file":"platform.js","sourceRoot":"","sources":["../../src/platform.ts"],"names":[],"mappings":";;AAAA,kBAAe,OAAO,CAAC,GAAG,CAAC,2BAA2B,IAAI,OAAO,CAAC,QAAQ,CAAA","sourcesContent":["export default process.env.__TESTING_RIMRAF_PLATFORM__ || process.platform\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts new file mode 100644 index 00000000000000..cce73097f1681f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts @@ -0,0 +1,3 @@ +export declare const readdirOrError: (path: string) => Promise; +export declare const readdirOrErrorSync: (path: string) => import("fs").Dirent[] | NodeJS.ErrnoException; +//# sourceMappingURL=readdir-or-error.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts.map new file mode 100644 index 00000000000000..8a19f6bdfd0706 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"readdir-or-error.d.ts","sourceRoot":"","sources":["../../src/readdir-or-error.ts"],"names":[],"mappings":"AAIA,eAAO,MAAM,cAAc,SAAU,MAAM,2DACa,CAAA;AACxD,eAAO,MAAM,kBAAkB,SAAU,MAAM,kDAM9C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js new file mode 100644 index 00000000000000..75330cb3816c8b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js @@ -0,0 +1,19 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.readdirOrErrorSync = exports.readdirOrError = void 0; +// returns an array of entries if readdir() works, +// or the error that readdir() raised if not. +const fs_js_1 = require("./fs.js"); +const { readdir } = fs_js_1.promises; +const readdirOrError = (path) => readdir(path).catch(er => er); +exports.readdirOrError = readdirOrError; +const readdirOrErrorSync = (path) => { + try { + return (0, fs_js_1.readdirSync)(path); + } + catch (er) { + return er; + } +}; +exports.readdirOrErrorSync = readdirOrErrorSync; +//# sourceMappingURL=readdir-or-error.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js.map new file mode 100644 index 00000000000000..61dbfe11956140 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/readdir-or-error.js.map @@ -0,0 +1 @@ +{"version":3,"file":"readdir-or-error.js","sourceRoot":"","sources":["../../src/readdir-or-error.ts"],"names":[],"mappings":";;;AAAA,kDAAkD;AAClD,6CAA6C;AAC7C,mCAA+C;AAC/C,MAAM,EAAE,OAAO,EAAE,GAAG,gBAAQ,CAAA;AACrB,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAC7C,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,EAA2B,CAAC,CAAA;AAD3C,QAAA,cAAc,kBAC6B;AACjD,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,EAAE;IACjD,IAAI,CAAC;QACH,OAAO,IAAA,mBAAW,EAAC,IAAI,CAAC,CAAA;IAC1B,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,OAAO,EAA2B,CAAA;IACpC,CAAC;AACH,CAAC,CAAA;AANY,QAAA,kBAAkB,sBAM9B","sourcesContent":["// returns an array of entries if readdir() works,\n// or the error that readdir() raised if not.\nimport { promises, readdirSync } from './fs.js'\nconst { readdir } = promises\nexport const readdirOrError = (path: string) =>\n readdir(path).catch(er => er as NodeJS.ErrnoException)\nexport const readdirOrErrorSync = (path: string) => {\n try {\n return readdirSync(path)\n } catch (er) {\n return er as NodeJS.ErrnoException\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts new file mode 100644 index 00000000000000..c0af0dd62f0df9 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts @@ -0,0 +1,8 @@ +import { RimrafAsyncOptions, RimrafOptions } from './index.js'; +export declare const MAXBACKOFF = 200; +export declare const RATE = 1.2; +export declare const MAXRETRIES = 10; +export declare const codes: Set; +export declare const retryBusy: (fn: (path: string) => Promise) => (path: string, opt: RimrafAsyncOptions, backoff?: number, total?: number) => Promise; +export declare const retryBusySync: (fn: (path: string) => any) => (path: string, opt: RimrafOptions) => any; +//# sourceMappingURL=retry-busy.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts.map new file mode 100644 index 00000000000000..21960c58914b4b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"retry-busy.d.ts","sourceRoot":"","sources":["../../src/retry-busy.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAE9D,eAAO,MAAM,UAAU,MAAM,CAAA;AAC7B,eAAO,MAAM,IAAI,MAAM,CAAA;AACvB,eAAO,MAAM,UAAU,KAAK,CAAA;AAC5B,eAAO,MAAM,KAAK,aAAyC,CAAA;AAE3D,eAAO,MAAM,SAAS,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,GAAG,CAAC,YAElD,MAAM,OACP,kBAAkB,mDAkC1B,CAAA;AAGD,eAAO,MAAM,aAAa,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,GAAG,YAC/B,MAAM,OAAO,aAAa,QAsBjD,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js new file mode 100644 index 00000000000000..5f9d15252bb10f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js @@ -0,0 +1,68 @@ +"use strict"; +// note: max backoff is the maximum that any *single* backoff will do +Object.defineProperty(exports, "__esModule", { value: true }); +exports.retryBusySync = exports.retryBusy = exports.codes = exports.MAXRETRIES = exports.RATE = exports.MAXBACKOFF = void 0; +exports.MAXBACKOFF = 200; +exports.RATE = 1.2; +exports.MAXRETRIES = 10; +exports.codes = new Set(['EMFILE', 'ENFILE', 'EBUSY']); +const retryBusy = (fn) => { + const method = async (path, opt, backoff = 1, total = 0) => { + const mbo = opt.maxBackoff || exports.MAXBACKOFF; + const rate = opt.backoff || exports.RATE; + const max = opt.maxRetries || exports.MAXRETRIES; + let retries = 0; + while (true) { + try { + return await fn(path); + } + catch (er) { + const fer = er; + if (fer?.path === path && fer?.code && exports.codes.has(fer.code)) { + backoff = Math.ceil(backoff * rate); + total = backoff + total; + if (total < mbo) { + return new Promise((res, rej) => { + setTimeout(() => { + method(path, opt, backoff, total).then(res, rej); + }, backoff); + }); + } + if (retries < max) { + retries++; + continue; + } + } + throw er; + } + } + }; + return method; +}; +exports.retryBusy = retryBusy; +// just retries, no async so no backoff +const retryBusySync = (fn) => { + const method = (path, opt) => { + const max = opt.maxRetries || exports.MAXRETRIES; + let retries = 0; + while (true) { + try { + return fn(path); + } + catch (er) { + const fer = er; + if (fer?.path === path && + fer?.code && + exports.codes.has(fer.code) && + retries < max) { + retries++; + continue; + } + throw er; + } + } + }; + return method; +}; +exports.retryBusySync = retryBusySync; +//# sourceMappingURL=retry-busy.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js.map new file mode 100644 index 00000000000000..1f1051d2f115fc --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/retry-busy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"retry-busy.js","sourceRoot":"","sources":["../../src/retry-busy.ts"],"names":[],"mappings":";AAAA,qEAAqE;;;AAIxD,QAAA,UAAU,GAAG,GAAG,CAAA;AAChB,QAAA,IAAI,GAAG,GAAG,CAAA;AACV,QAAA,UAAU,GAAG,EAAE,CAAA;AACf,QAAA,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAA;AAEpD,MAAM,SAAS,GAAG,CAAC,EAAkC,EAAE,EAAE;IAC9D,MAAM,MAAM,GAAG,KAAK,EAClB,IAAY,EACZ,GAAuB,EACvB,OAAO,GAAG,CAAC,EACX,KAAK,GAAG,CAAC,EACT,EAAE;QACF,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,kBAAU,CAAA;QACxC,MAAM,IAAI,GAAG,GAAG,CAAC,OAAO,IAAI,YAAI,CAAA;QAChC,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,kBAAU,CAAA;QACxC,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;YACvB,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;gBACvC,IAAI,GAAG,EAAE,IAAI,KAAK,IAAI,IAAI,GAAG,EAAE,IAAI,IAAI,aAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC;oBAC3D,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,CAAA;oBACnC,KAAK,GAAG,OAAO,GAAG,KAAK,CAAA;oBACvB,IAAI,KAAK,GAAG,GAAG,EAAE,CAAC;wBAChB,OAAO,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;4BAC9B,UAAU,CAAC,GAAG,EAAE;gCACd,MAAM,CAAC,IAAI,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;4BAClD,CAAC,EAAE,OAAO,CAAC,CAAA;wBACb,CAAC,CAAC,CAAA;oBACJ,CAAC;oBACD,IAAI,OAAO,GAAG,GAAG,EAAE,CAAC;wBAClB,OAAO,EAAE,CAAA;wBACT,SAAQ;oBACV,CAAC;gBACH,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA;IAED,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AArCY,QAAA,SAAS,aAqCrB;AAED,uCAAuC;AAChC,MAAM,aAAa,GAAG,CAAC,EAAyB,EAAE,EAAE;IACzD,MAAM,MAAM,GAAG,CAAC,IAAY,EAAE,GAAkB,EAAE,EAAE;QAClD,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,kBAAU,CAAA;QACxC,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;YACjB,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;gBACvC,IACE,GAAG,EAAE,IAAI,KAAK,IAAI;oBAClB,GAAG,EAAE,IAAI;oBACT,aAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC;oBACnB,OAAO,GAAG,GAAG,EACb,CAAC;oBACD,OAAO,EAAE,CAAA;oBACT,SAAQ;gBACV,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA;IACD,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAvBY,QAAA,aAAa,iBAuBzB","sourcesContent":["// note: max backoff is the maximum that any *single* backoff will do\n\nimport { RimrafAsyncOptions, RimrafOptions } from './index.js'\n\nexport const MAXBACKOFF = 200\nexport const RATE = 1.2\nexport const MAXRETRIES = 10\nexport const codes = new Set(['EMFILE', 'ENFILE', 'EBUSY'])\n\nexport const retryBusy = (fn: (path: string) => Promise) => {\n const method = async (\n path: string,\n opt: RimrafAsyncOptions,\n backoff = 1,\n total = 0,\n ) => {\n const mbo = opt.maxBackoff || MAXBACKOFF\n const rate = opt.backoff || RATE\n const max = opt.maxRetries || MAXRETRIES\n let retries = 0\n while (true) {\n try {\n return await fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.path === path && fer?.code && codes.has(fer.code)) {\n backoff = Math.ceil(backoff * rate)\n total = backoff + total\n if (total < mbo) {\n return new Promise((res, rej) => {\n setTimeout(() => {\n method(path, opt, backoff, total).then(res, rej)\n }, backoff)\n })\n }\n if (retries < max) {\n retries++\n continue\n }\n }\n throw er\n }\n }\n }\n\n return method\n}\n\n// just retries, no async so no backoff\nexport const retryBusySync = (fn: (path: string) => any) => {\n const method = (path: string, opt: RimrafOptions) => {\n const max = opt.maxRetries || MAXRETRIES\n let retries = 0\n while (true) {\n try {\n return fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (\n fer?.path === path &&\n fer?.code &&\n codes.has(fer.code) &&\n retries < max\n ) {\n retries++\n continue\n }\n throw er\n }\n }\n }\n return method\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts new file mode 100644 index 00000000000000..35c5c86844c7fa --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts @@ -0,0 +1,3 @@ +export declare const rimrafManual: (path: string, opt: import("./opt-arg.js").RimrafAsyncOptions) => Promise; +export declare const rimrafManualSync: (path: string, opt: import("./opt-arg.js").RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-manual.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts.map new file mode 100644 index 00000000000000..19bd25149ceb07 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-manual.d.ts","sourceRoot":"","sources":["../../src/rimraf-manual.ts"],"names":[],"mappings":"AAKA,eAAO,MAAM,YAAY,oFAAqD,CAAA;AAC9E,eAAO,MAAM,gBAAgB,0EAC+B,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js new file mode 100644 index 00000000000000..1c95ae23bb98b1 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js @@ -0,0 +1,12 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rimrafManualSync = exports.rimrafManual = void 0; +const platform_js_1 = __importDefault(require("./platform.js")); +const rimraf_posix_js_1 = require("./rimraf-posix.js"); +const rimraf_windows_js_1 = require("./rimraf-windows.js"); +exports.rimrafManual = platform_js_1.default === 'win32' ? rimraf_windows_js_1.rimrafWindows : rimraf_posix_js_1.rimrafPosix; +exports.rimrafManualSync = platform_js_1.default === 'win32' ? rimraf_windows_js_1.rimrafWindowsSync : rimraf_posix_js_1.rimrafPosixSync; +//# sourceMappingURL=rimraf-manual.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js.map new file mode 100644 index 00000000000000..e26e44577d9f0d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-manual.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-manual.js","sourceRoot":"","sources":["../../src/rimraf-manual.ts"],"names":[],"mappings":";;;;;;AAAA,gEAAoC;AAEpC,uDAAgE;AAChE,2DAAsE;AAEzD,QAAA,YAAY,GAAG,qBAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,iCAAa,CAAC,CAAC,CAAC,6BAAW,CAAA;AACjE,QAAA,gBAAgB,GAC3B,qBAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,qCAAiB,CAAC,CAAC,CAAC,iCAAe,CAAA","sourcesContent":["import platform from './platform.js'\n\nimport { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'\nimport { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'\n\nexport const rimrafManual = platform === 'win32' ? rimrafWindows : rimrafPosix\nexport const rimrafManualSync =\n platform === 'win32' ? rimrafWindowsSync : rimrafPosixSync\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts new file mode 100644 index 00000000000000..5d41d40825e4c7 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafMoveRemove: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafMoveRemoveSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-move-remove.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts.map new file mode 100644 index 00000000000000..4062eaebbb1302 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-move-remove.d.ts","sourceRoot":"","sources":["../../src/rimraf-move-remove.ts"],"names":[],"mappings":"AA6BA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AA4ClE,eAAO,MAAM,gBAAgB,SACrB,MAAM,OACP,kBAAkB,qBAWxB,CAAA;AA4ED,eAAO,MAAM,oBAAoB,SAAU,MAAM,OAAO,iBAAiB,YAUxE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js new file mode 100644 index 00000000000000..ac668d1c9dbbae --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js @@ -0,0 +1,192 @@ +"use strict"; +// https://youtu.be/uhRWMGBjlO8?t=537 +// +// 1. readdir +// 2. for each entry +// a. if a non-empty directory, recurse +// b. if an empty directory, move to random hidden file name in $TEMP +// c. unlink/rmdir $TEMP +// +// This works around the fact that unlink/rmdir is non-atomic and takes +// a non-deterministic amount of time to complete. +// +// However, it is HELLA SLOW, like 2-10x slower than a naive recursive rm. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rimrafMoveRemoveSync = exports.rimrafMoveRemove = void 0; +const path_1 = require("path"); +const default_tmp_js_1 = require("./default-tmp.js"); +const ignore_enoent_js_1 = require("./ignore-enoent.js"); +const fs_js_1 = require("./fs.js"); +const { lstat, rename, unlink, rmdir, chmod } = fs_js_1.promises; +const readdir_or_error_js_1 = require("./readdir-or-error.js"); +// crypto.randomBytes is much slower, and Math.random() is enough here +const uniqueFilename = (path) => `.${(0, path_1.basename)(path)}.${Math.random()}`; +const unlinkFixEPERM = async (path) => unlink(path).catch((er) => { + if (er.code === 'EPERM') { + return chmod(path, 0o666).then(() => unlink(path), er2 => { + if (er2.code === 'ENOENT') { + return; + } + throw er; + }); + } + else if (er.code === 'ENOENT') { + return; + } + throw er; +}); +const unlinkFixEPERMSync = (path) => { + try { + (0, fs_js_1.unlinkSync)(path); + } + catch (er) { + if (er?.code === 'EPERM') { + try { + return (0, fs_js_1.chmodSync)(path, 0o666); + } + catch (er2) { + if (er2?.code === 'ENOENT') { + return; + } + throw er; + } + } + else if (er?.code === 'ENOENT') { + return; + } + throw er; + } +}; +const rimrafMoveRemove = async (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return await rimrafMoveRemoveDir(path, opt, await lstat(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +exports.rimrafMoveRemove = rimrafMoveRemove; +const rimrafMoveRemoveDir = async (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + if (!opt.tmp) { + return rimrafMoveRemoveDir(path, { ...opt, tmp: await (0, default_tmp_js_1.defaultTmp)(path) }, ent); + } + if (path === opt.tmp && (0, path_1.parse)(path).root !== path) { + throw new Error('cannot delete temp directory used for deletion'); + } + const entries = ent.isDirectory() ? await (0, readdir_or_error_js_1.readdirOrError)(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await (0, ignore_enoent_js_1.ignoreENOENT)(tmpUnlink(path, opt.tmp, unlinkFixEPERM)); + return true; + } + const removedAll = (await Promise.all(entries.map(ent => rimrafMoveRemoveDir((0, path_1.resolve)(path, ent.name), opt, ent)))).reduce((a, b) => a && b, true); + if (!removedAll) { + return false; + } + // we don't ever ACTUALLY try to unlink /, because that can never work + // but when preserveRoot is false, we could be operating on it. + // No need to check if preserveRoot is not false. + if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) { + return false; + } + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await (0, ignore_enoent_js_1.ignoreENOENT)(tmpUnlink(path, opt.tmp, rmdir)); + return true; +}; +const tmpUnlink = async (path, tmp, rm) => { + const tmpFile = (0, path_1.resolve)(tmp, uniqueFilename(path)); + await rename(path, tmpFile); + return await rm(tmpFile); +}; +const rimrafMoveRemoveSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return rimrafMoveRemoveDirSync(path, opt, (0, fs_js_1.lstatSync)(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +exports.rimrafMoveRemoveSync = rimrafMoveRemoveSync; +const rimrafMoveRemoveDirSync = (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + if (!opt.tmp) { + return rimrafMoveRemoveDirSync(path, { ...opt, tmp: (0, default_tmp_js_1.defaultTmpSync)(path) }, ent); + } + const tmp = opt.tmp; + if (path === opt.tmp && (0, path_1.parse)(path).root !== path) { + throw new Error('cannot delete temp directory used for deletion'); + } + const entries = ent.isDirectory() ? (0, readdir_or_error_js_1.readdirOrErrorSync)(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + (0, ignore_enoent_js_1.ignoreENOENTSync)(() => tmpUnlinkSync(path, tmp, unlinkFixEPERMSync)); + return true; + } + let removedAll = true; + for (const ent of entries) { + const p = (0, path_1.resolve)(path, ent.name); + removedAll = rimrafMoveRemoveDirSync(p, opt, ent) && removedAll; + } + if (!removedAll) { + return false; + } + if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) { + return false; + } + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + (0, ignore_enoent_js_1.ignoreENOENTSync)(() => tmpUnlinkSync(path, tmp, fs_js_1.rmdirSync)); + return true; +}; +const tmpUnlinkSync = (path, tmp, rmSync) => { + const tmpFile = (0, path_1.resolve)(tmp, uniqueFilename(path)); + (0, fs_js_1.renameSync)(path, tmpFile); + return rmSync(tmpFile); +}; +//# sourceMappingURL=rimraf-move-remove.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js.map new file mode 100644 index 00000000000000..44602502b90b2d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-move-remove.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-move-remove.js","sourceRoot":"","sources":["../../src/rimraf-move-remove.ts"],"names":[],"mappings":";AAAA,qCAAqC;AACrC,EAAE;AACF,aAAa;AACb,oBAAoB;AACpB,yCAAyC;AACzC,uEAAuE;AACvE,0BAA0B;AAC1B,EAAE;AACF,uEAAuE;AACvE,kDAAkD;AAClD,EAAE;AACF,0EAA0E;;;AAE1E,+BAA+C;AAC/C,qDAA6D;AAE7D,yDAAmE;AAEnE,mCAOgB;AAChB,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,gBAAU,CAAA;AAI1D,+DAA0E;AAE1E,sEAAsE;AACtE,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,IAAI,IAAA,eAAQ,EAAC,IAAI,CAAC,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE,CAAA;AAE9E,MAAM,cAAc,GAAG,KAAK,EAAE,IAAY,EAAE,EAAE,CAC5C,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,EAA6B,EAAE,EAAE;IACnD,IAAI,EAAE,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;QACxB,OAAO,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,IAAI,CAC5B,GAAG,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,EAClB,GAAG,CAAC,EAAE;YACJ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC1B,OAAM;YACR,CAAC;YACD,MAAM,EAAE,CAAA;QACV,CAAC,CACF,CAAA;IACH,CAAC;SAAM,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;QAChC,OAAM;IACR,CAAC;IACD,MAAM,EAAE,CAAA;AACV,CAAC,CAAC,CAAA;AAEJ,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,EAAE;IAC1C,IAAI,CAAC;QACH,IAAA,kBAAU,EAAC,IAAI,CAAC,CAAA;IAClB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YACpD,IAAI,CAAC;gBACH,OAAO,IAAA,iBAAS,EAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAC/B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,IAAK,GAA6B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBACtD,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;aAAM,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC5D,OAAM;QACR,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAEM,MAAM,gBAAgB,GAAG,KAAK,EACnC,IAAY,EACZ,GAAuB,EACvB,EAAE;IACF,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,mBAAmB,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAChE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAbY,QAAA,gBAAgB,oBAa5B;AAED,MAAM,mBAAmB,GAAG,KAAK,EAC/B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACD,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QACb,OAAO,mBAAmB,CACxB,IAAI,EACJ,EAAE,GAAG,GAAG,EAAE,GAAG,EAAE,MAAM,IAAA,2BAAU,EAAC,IAAI,CAAC,EAAE,EACvC,GAAG,CACJ,CAAA;IACH,CAAC;IACD,IAAI,IAAI,KAAK,GAAG,CAAC,GAAG,IAAI,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,IAAA,oCAAc,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,IAAA,+BAAY,EAAC,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC,CAAA;QAC5D,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAChB,mBAAmB,CAAC,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,CACvD,CACF,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAChC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,sEAAsE;IACtE,+DAA+D;IAC/D,iDAAiD;IACjD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,KAAK,CAAA;IACd,CAAC;IACD,MAAM,IAAA,+BAAY,EAAC,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAA;IACnD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,SAAS,GAAG,KAAK,EACrB,IAAY,EACZ,GAAW,EACX,EAA+B,EAC/B,EAAE;IACF,MAAM,OAAO,GAAG,IAAA,cAAO,EAAC,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC,CAAA;IAClD,MAAM,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IAC3B,OAAO,MAAM,EAAE,CAAC,OAAO,CAAC,CAAA;AAC1B,CAAC,CAAA;AAEM,MAAM,oBAAoB,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IAC3E,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,uBAAuB,CAAC,IAAI,EAAE,GAAG,EAAE,IAAA,iBAAS,EAAC,IAAI,CAAC,CAAC,CAAA;IAC5D,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,oBAAoB,wBAUhC;AAED,MAAM,uBAAuB,GAAG,CAC9B,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACV,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QACb,OAAO,uBAAuB,CAC5B,IAAI,EACJ,EAAE,GAAG,GAAG,EAAE,GAAG,EAAE,IAAA,+BAAc,EAAC,IAAI,CAAC,EAAE,EACrC,GAAG,CACJ,CAAA;IACH,CAAC;IACD,MAAM,GAAG,GAAW,GAAG,CAAC,GAAG,CAAA;IAE3B,IAAI,IAAI,KAAK,GAAG,CAAC,GAAG,IAAI,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAA,wCAAkB,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,kBAAkB,CAAC,CAAC,CAAA;QACpE,OAAO,IAAI,CAAA;IACb,CAAC;IAED,IAAI,UAAU,GAAG,IAAI,CAAA;IACrB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,uBAAuB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,IAAI,UAAU,CAAA;IACjE,CAAC;IACD,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACzC,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,iBAAS,CAAC,CAAC,CAAA;IAC3D,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,aAAa,GAAG,CACpB,IAAY,EACZ,GAAW,EACX,MAA2B,EAC3B,EAAE;IACF,MAAM,OAAO,GAAG,IAAA,cAAO,EAAC,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC,CAAA;IAClD,IAAA,kBAAU,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACzB,OAAO,MAAM,CAAC,OAAO,CAAC,CAAA;AACxB,CAAC,CAAA","sourcesContent":["// https://youtu.be/uhRWMGBjlO8?t=537\n//\n// 1. readdir\n// 2. for each entry\n// a. if a non-empty directory, recurse\n// b. if an empty directory, move to random hidden file name in $TEMP\n// c. unlink/rmdir $TEMP\n//\n// This works around the fact that unlink/rmdir is non-atomic and takes\n// a non-deterministic amount of time to complete.\n//\n// However, it is HELLA SLOW, like 2-10x slower than a naive recursive rm.\n\nimport { basename, parse, resolve } from 'path'\nimport { defaultTmp, defaultTmpSync } from './default-tmp.js'\n\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\n\nimport {\n chmodSync,\n lstatSync,\n promises as fsPromises,\n renameSync,\n rmdirSync,\n unlinkSync,\n} from './fs.js'\nconst { lstat, rename, unlink, rmdir, chmod } = fsPromises\n\nimport { Dirent, Stats } from 'fs'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\n\n// crypto.randomBytes is much slower, and Math.random() is enough here\nconst uniqueFilename = (path: string) => `.${basename(path)}.${Math.random()}`\n\nconst unlinkFixEPERM = async (path: string) =>\n unlink(path).catch((er: Error & { code?: string }) => {\n if (er.code === 'EPERM') {\n return chmod(path, 0o666).then(\n () => unlink(path),\n er2 => {\n if (er2.code === 'ENOENT') {\n return\n }\n throw er\n },\n )\n } else if (er.code === 'ENOENT') {\n return\n }\n throw er\n })\n\nconst unlinkFixEPERMSync = (path: string) => {\n try {\n unlinkSync(path)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'EPERM') {\n try {\n return chmodSync(path, 0o666)\n } catch (er2) {\n if ((er2 as NodeJS.ErrnoException)?.code === 'ENOENT') {\n return\n }\n throw er\n }\n } else if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') {\n return\n }\n throw er\n }\n}\n\nexport const rimrafMoveRemove = async (\n path: string,\n opt: RimrafAsyncOptions,\n) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafMoveRemoveDir(path, opt, await lstat(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafMoveRemoveDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n if (!opt.tmp) {\n return rimrafMoveRemoveDir(\n path,\n { ...opt, tmp: await defaultTmp(path) },\n ent,\n )\n }\n if (path === opt.tmp && parse(path).root !== path) {\n throw new Error('cannot delete temp directory used for deletion')\n }\n\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(tmpUnlink(path, opt.tmp, unlinkFixEPERM))\n return true\n }\n\n const removedAll = (\n await Promise.all(\n entries.map(ent =>\n rimrafMoveRemoveDir(resolve(path, ent.name), opt, ent),\n ),\n )\n ).reduce((a, b) => a && b, true)\n if (!removedAll) {\n return false\n }\n\n // we don't ever ACTUALLY try to unlink /, because that can never work\n // but when preserveRoot is false, we could be operating on it.\n // No need to check if preserveRoot is not false.\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(tmpUnlink(path, opt.tmp, rmdir))\n return true\n}\n\nconst tmpUnlink = async (\n path: string,\n tmp: string,\n rm: (p: string) => Promise,\n) => {\n const tmpFile = resolve(tmp, uniqueFilename(path))\n await rename(path, tmpFile)\n return await rm(tmpFile)\n}\n\nexport const rimrafMoveRemoveSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafMoveRemoveDirSync(path, opt, lstatSync(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafMoveRemoveDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n if (!opt.tmp) {\n return rimrafMoveRemoveDirSync(\n path,\n { ...opt, tmp: defaultTmpSync(path) },\n ent,\n )\n }\n const tmp: string = opt.tmp\n\n if (path === opt.tmp && parse(path).root !== path) {\n throw new Error('cannot delete temp directory used for deletion')\n }\n\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, unlinkFixEPERMSync))\n return true\n }\n\n let removedAll = true\n for (const ent of entries) {\n const p = resolve(path, ent.name)\n removedAll = rimrafMoveRemoveDirSync(p, opt, ent) && removedAll\n }\n if (!removedAll) {\n return false\n }\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, rmdirSync))\n return true\n}\n\nconst tmpUnlinkSync = (\n path: string,\n tmp: string,\n rmSync: (p: string) => void,\n) => {\n const tmpFile = resolve(tmp, uniqueFilename(path))\n renameSync(path, tmpFile)\n return rmSync(tmpFile)\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts new file mode 100644 index 00000000000000..cc84bf7ffd34d0 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafNative: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafNativeSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts.map new file mode 100644 index 00000000000000..bea6b79965192f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-native.d.ts","sourceRoot":"","sources":["../../src/rimraf-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAIlE,eAAO,MAAM,YAAY,SACjB,MAAM,OACP,kBAAkB,KACtB,OAAO,CAAC,OAAO,CAOjB,CAAA;AAED,eAAO,MAAM,gBAAgB,SACrB,MAAM,OACP,iBAAiB,KACrB,OAOF,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js new file mode 100644 index 00000000000000..ab9f633d7ca157 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js @@ -0,0 +1,24 @@ +"use strict"; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rimrafNativeSync = exports.rimrafNative = void 0; +const fs_js_1 = require("./fs.js"); +const { rm } = fs_js_1.promises; +const rimrafNative = async (path, opt) => { + await rm(path, { + ...opt, + force: true, + recursive: true, + }); + return true; +}; +exports.rimrafNative = rimrafNative; +const rimrafNativeSync = (path, opt) => { + (0, fs_js_1.rmSync)(path, { + ...opt, + force: true, + recursive: true, + }); + return true; +}; +exports.rimrafNativeSync = rimrafNativeSync; +//# sourceMappingURL=rimraf-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js.map new file mode 100644 index 00000000000000..6eddd444e49a13 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-native.js","sourceRoot":"","sources":["../../src/rimraf-native.ts"],"names":[],"mappings":";;;AACA,mCAA0C;AAC1C,MAAM,EAAE,EAAE,EAAE,GAAG,gBAAQ,CAAA;AAEhB,MAAM,YAAY,GAAG,KAAK,EAC/B,IAAY,EACZ,GAAuB,EACL,EAAE;IACpB,MAAM,EAAE,CAAC,IAAI,EAAE;QACb,GAAG,GAAG;QACN,KAAK,EAAE,IAAI;QACX,SAAS,EAAE,IAAI;KAChB,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAVY,QAAA,YAAY,gBAUxB;AAEM,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,GAAsB,EACb,EAAE;IACX,IAAA,cAAM,EAAC,IAAI,EAAE;QACX,GAAG,GAAG;QACN,KAAK,EAAE,IAAI;QACX,SAAS,EAAE,IAAI;KAChB,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAVY,QAAA,gBAAgB,oBAU5B","sourcesContent":["import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { promises, rmSync } from './fs.js'\nconst { rm } = promises\n\nexport const rimrafNative = async (\n path: string,\n opt: RimrafAsyncOptions,\n): Promise => {\n await rm(path, {\n ...opt,\n force: true,\n recursive: true,\n })\n return true\n}\n\nexport const rimrafNativeSync = (\n path: string,\n opt: RimrafSyncOptions,\n): boolean => {\n rmSync(path, {\n ...opt,\n force: true,\n recursive: true,\n })\n return true\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts new file mode 100644 index 00000000000000..8e532efe9aba21 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafPosix: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafPosixSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-posix.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts.map new file mode 100644 index 00000000000000..3f9b8084ed470b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-posix.d.ts","sourceRoot":"","sources":["../../src/rimraf-posix.ts"],"names":[],"mappings":"AAcA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAGlE,eAAO,MAAM,WAAW,SAAgB,MAAM,OAAO,kBAAkB,qBAUtE,CAAA;AAED,eAAO,MAAM,eAAe,SAAU,MAAM,OAAO,iBAAiB,YAUnE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js new file mode 100644 index 00000000000000..eb0e7f11680107 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js @@ -0,0 +1,123 @@ +"use strict"; +// the simple recursive removal, where unlink and rmdir are atomic +// Note that this approach does NOT work on Windows! +// We stat first and only unlink if the Dirent isn't a directory, +// because sunos will let root unlink a directory, and some +// SUPER weird breakage happens as a result. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rimrafPosixSync = exports.rimrafPosix = void 0; +const fs_js_1 = require("./fs.js"); +const { lstat, rmdir, unlink } = fs_js_1.promises; +const path_1 = require("path"); +const readdir_or_error_js_1 = require("./readdir-or-error.js"); +const ignore_enoent_js_1 = require("./ignore-enoent.js"); +const rimrafPosix = async (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return await rimrafPosixDir(path, opt, await lstat(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +exports.rimrafPosix = rimrafPosix; +const rimrafPosixSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return rimrafPosixDirSync(path, opt, (0, fs_js_1.lstatSync)(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +exports.rimrafPosixSync = rimrafPosixSync; +const rimrafPosixDir = async (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + const entries = ent.isDirectory() ? await (0, readdir_or_error_js_1.readdirOrError)(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await (0, ignore_enoent_js_1.ignoreENOENT)(unlink(path)); + return true; + } + const removedAll = (await Promise.all(entries.map(ent => rimrafPosixDir((0, path_1.resolve)(path, ent.name), opt, ent)))).reduce((a, b) => a && b, true); + if (!removedAll) { + return false; + } + // we don't ever ACTUALLY try to unlink /, because that can never work + // but when preserveRoot is false, we could be operating on it. + // No need to check if preserveRoot is not false. + if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) { + return false; + } + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await (0, ignore_enoent_js_1.ignoreENOENT)(rmdir(path)); + return true; +}; +const rimrafPosixDirSync = (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + const entries = ent.isDirectory() ? (0, readdir_or_error_js_1.readdirOrErrorSync)(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + (0, ignore_enoent_js_1.ignoreENOENTSync)(() => (0, fs_js_1.unlinkSync)(path)); + return true; + } + let removedAll = true; + for (const ent of entries) { + const p = (0, path_1.resolve)(path, ent.name); + removedAll = rimrafPosixDirSync(p, opt, ent) && removedAll; + } + if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) { + return false; + } + if (!removedAll) { + return false; + } + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + (0, ignore_enoent_js_1.ignoreENOENTSync)(() => (0, fs_js_1.rmdirSync)(path)); + return true; +}; +//# sourceMappingURL=rimraf-posix.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js.map new file mode 100644 index 00000000000000..32a366a54f7e3c --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-posix.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-posix.js","sourceRoot":"","sources":["../../src/rimraf-posix.ts"],"names":[],"mappings":";AAAA,kEAAkE;AAClE,oDAAoD;AACpD,iEAAiE;AACjE,2DAA2D;AAC3D,4CAA4C;;;AAE5C,mCAAoE;AACpE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,gBAAQ,CAAA;AAEzC,+BAAqC;AAErC,+DAA0E;AAI1E,yDAAmE;AAE5D,MAAM,WAAW,GAAG,KAAK,EAAE,IAAY,EAAE,GAAuB,EAAE,EAAE;IACzE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAC3D,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,WAAW,eAUvB;AAEM,MAAM,eAAe,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IACtE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,kBAAkB,CAAC,IAAI,EAAE,GAAG,EAAE,IAAA,iBAAS,EAAC,IAAI,CAAC,CAAC,CAAA;IACvD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,eAAe,mBAU3B;AAED,MAAM,cAAc,GAAG,KAAK,EAC1B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACD,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,IAAA,oCAAc,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,IAAA,+BAAY,EAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAA;QAChC,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,cAAc,CAAC,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,CACtE,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAEhC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,sEAAsE;IACtE,+DAA+D;IAC/D,iDAAiD;IACjD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,MAAM,IAAA,+BAAY,EAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAC/B,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,kBAAkB,GAAG,CACzB,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACV,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAA,wCAAkB,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,IAAA,kBAAU,EAAC,IAAI,CAAC,CAAC,CAAA;QACxC,OAAO,IAAI,CAAA;IACb,CAAC;IACD,IAAI,UAAU,GAAY,IAAI,CAAA;IAC9B,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,kBAAkB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,IAAI,UAAU,CAAA;IAC5D,CAAC;IACD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACzC,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,IAAA,iBAAS,EAAC,IAAI,CAAC,CAAC,CAAA;IACvC,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["// the simple recursive removal, where unlink and rmdir are atomic\n// Note that this approach does NOT work on Windows!\n// We stat first and only unlink if the Dirent isn't a directory,\n// because sunos will let root unlink a directory, and some\n// SUPER weird breakage happens as a result.\n\nimport { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'\nconst { lstat, rmdir, unlink } = promises\n\nimport { parse, resolve } from 'path'\n\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\n\nimport { Dirent, Stats } from 'fs'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\n\nexport const rimrafPosix = async (path: string, opt: RimrafAsyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafPosixDir(path, opt, await lstat(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nexport const rimrafPosixSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafPosixDirSync(path, opt, lstatSync(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafPosixDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(unlink(path))\n return true\n }\n\n const removedAll = (\n await Promise.all(\n entries.map(ent => rimrafPosixDir(resolve(path, ent.name), opt, ent)),\n )\n ).reduce((a, b) => a && b, true)\n\n if (!removedAll) {\n return false\n }\n\n // we don't ever ACTUALLY try to unlink /, because that can never work\n // but when preserveRoot is false, we could be operating on it.\n // No need to check if preserveRoot is not false.\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n\n await ignoreENOENT(rmdir(path))\n return true\n}\n\nconst rimrafPosixDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => unlinkSync(path))\n return true\n }\n let removedAll: boolean = true\n for (const ent of entries) {\n const p = resolve(path, ent.name)\n removedAll = rimrafPosixDirSync(p, opt, ent) && removedAll\n }\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n\n if (!removedAll) {\n return false\n }\n\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n\n ignoreENOENTSync(() => rmdirSync(path))\n return true\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts new file mode 100644 index 00000000000000..555689073ffe75 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafWindows: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafWindowsSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-windows.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts.map new file mode 100644 index 00000000000000..56f00d9f2e3d13 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-windows.d.ts","sourceRoot":"","sources":["../../src/rimraf-windows.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AA2DlE,eAAO,MAAM,aAAa,SAAgB,MAAM,OAAO,kBAAkB,qBAUxE,CAAA;AAED,eAAO,MAAM,iBAAiB,SAAU,MAAM,OAAO,iBAAiB,YAUrE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js new file mode 100644 index 00000000000000..8d19f98f963606 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js @@ -0,0 +1,182 @@ +"use strict"; +// This is the same as rimrafPosix, with the following changes: +// +// 1. EBUSY, ENFILE, EMFILE trigger retries and/or exponential backoff +// 2. All non-directories are removed first and then all directories are +// removed in a second sweep. +// 3. If we hit ENOTEMPTY in the second sweep, fall back to move-remove on +// the that folder. +// +// Note: "move then remove" is 2-10 times slower, and just as unreliable. +Object.defineProperty(exports, "__esModule", { value: true }); +exports.rimrafWindowsSync = exports.rimrafWindows = void 0; +const path_1 = require("path"); +const fix_eperm_js_1 = require("./fix-eperm.js"); +const fs_js_1 = require("./fs.js"); +const ignore_enoent_js_1 = require("./ignore-enoent.js"); +const readdir_or_error_js_1 = require("./readdir-or-error.js"); +const retry_busy_js_1 = require("./retry-busy.js"); +const rimraf_move_remove_js_1 = require("./rimraf-move-remove.js"); +const { unlink, rmdir, lstat } = fs_js_1.promises; +const rimrafWindowsFile = (0, retry_busy_js_1.retryBusy)((0, fix_eperm_js_1.fixEPERM)(unlink)); +const rimrafWindowsFileSync = (0, retry_busy_js_1.retryBusySync)((0, fix_eperm_js_1.fixEPERMSync)(fs_js_1.unlinkSync)); +const rimrafWindowsDirRetry = (0, retry_busy_js_1.retryBusy)((0, fix_eperm_js_1.fixEPERM)(rmdir)); +const rimrafWindowsDirRetrySync = (0, retry_busy_js_1.retryBusySync)((0, fix_eperm_js_1.fixEPERMSync)(fs_js_1.rmdirSync)); +const rimrafWindowsDirMoveRemoveFallback = async (path, opt) => { + /* c8 ignore start */ + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + /* c8 ignore stop */ + // already filtered, remove from options so we don't call unnecessarily + const { filter, ...options } = opt; + try { + return await rimrafWindowsDirRetry(path, options); + } + catch (er) { + if (er?.code === 'ENOTEMPTY') { + return await (0, rimraf_move_remove_js_1.rimrafMoveRemove)(path, options); + } + throw er; + } +}; +const rimrafWindowsDirMoveRemoveFallbackSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + // already filtered, remove from options so we don't call unnecessarily + const { filter, ...options } = opt; + try { + return rimrafWindowsDirRetrySync(path, options); + } + catch (er) { + const fer = er; + if (fer?.code === 'ENOTEMPTY') { + return (0, rimraf_move_remove_js_1.rimrafMoveRemoveSync)(path, options); + } + throw er; + } +}; +const START = Symbol('start'); +const CHILD = Symbol('child'); +const FINISH = Symbol('finish'); +const rimrafWindows = async (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return await rimrafWindowsDir(path, opt, await lstat(path), START); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +exports.rimrafWindows = rimrafWindows; +const rimrafWindowsSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return rimrafWindowsDirSync(path, opt, (0, fs_js_1.lstatSync)(path), START); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +exports.rimrafWindowsSync = rimrafWindowsSync; +const rimrafWindowsDir = async (path, opt, ent, state = START) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + const entries = ent.isDirectory() ? await (0, readdir_or_error_js_1.readdirOrError)(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + // is a file + await (0, ignore_enoent_js_1.ignoreENOENT)(rimrafWindowsFile(path, opt)); + return true; + } + const s = state === START ? CHILD : state; + const removedAll = (await Promise.all(entries.map(ent => rimrafWindowsDir((0, path_1.resolve)(path, ent.name), opt, ent, s)))).reduce((a, b) => a && b, true); + if (state === START) { + return rimrafWindowsDir(path, opt, ent, FINISH); + } + else if (state === FINISH) { + if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) { + return false; + } + if (!removedAll) { + return false; + } + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await (0, ignore_enoent_js_1.ignoreENOENT)(rimrafWindowsDirMoveRemoveFallback(path, opt)); + } + return true; +}; +const rimrafWindowsDirSync = (path, opt, ent, state = START) => { + const entries = ent.isDirectory() ? (0, readdir_or_error_js_1.readdirOrErrorSync)(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + // is a file + (0, ignore_enoent_js_1.ignoreENOENTSync)(() => rimrafWindowsFileSync(path, opt)); + return true; + } + let removedAll = true; + for (const ent of entries) { + const s = state === START ? CHILD : state; + const p = (0, path_1.resolve)(path, ent.name); + removedAll = rimrafWindowsDirSync(p, opt, ent, s) && removedAll; + } + if (state === START) { + return rimrafWindowsDirSync(path, opt, ent, FINISH); + } + else if (state === FINISH) { + if (opt.preserveRoot === false && path === (0, path_1.parse)(path).root) { + return false; + } + if (!removedAll) { + return false; + } + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + (0, ignore_enoent_js_1.ignoreENOENTSync)(() => { + rimrafWindowsDirMoveRemoveFallbackSync(path, opt); + }); + } + return true; +}; +//# sourceMappingURL=rimraf-windows.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js.map new file mode 100644 index 00000000000000..50a97f890d84aa --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/rimraf-windows.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-windows.js","sourceRoot":"","sources":["../../src/rimraf-windows.ts"],"names":[],"mappings":";AAAA,+DAA+D;AAC/D,EAAE;AACF,sEAAsE;AACtE,wEAAwE;AACxE,gCAAgC;AAChC,0EAA0E;AAC1E,sBAAsB;AACtB,EAAE;AACF,yEAAyE;;;AAGzE,+BAAqC;AAErC,iDAAuD;AACvD,mCAAoE;AACpE,yDAAmE;AACnE,+DAA0E;AAC1E,mDAA0D;AAC1D,mEAAgF;AAChF,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,gBAAQ,CAAA;AAEzC,MAAM,iBAAiB,GAAG,IAAA,yBAAS,EAAC,IAAA,uBAAQ,EAAC,MAAM,CAAC,CAAC,CAAA;AACrD,MAAM,qBAAqB,GAAG,IAAA,6BAAa,EAAC,IAAA,2BAAY,EAAC,kBAAU,CAAC,CAAC,CAAA;AACrE,MAAM,qBAAqB,GAAG,IAAA,yBAAS,EAAC,IAAA,uBAAQ,EAAC,KAAK,CAAC,CAAC,CAAA;AACxD,MAAM,yBAAyB,GAAG,IAAA,6BAAa,EAAC,IAAA,2BAAY,EAAC,iBAAS,CAAC,CAAC,CAAA;AAExE,MAAM,kCAAkC,GAAG,KAAK,EAC9C,IAAY,EACZ,GAAuB,EACL,EAAE;IACpB,qBAAqB;IACrB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,oBAAoB;IACpB,uEAAuE;IACvE,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAClC,IAAI,CAAC;QACH,OAAO,MAAM,qBAAqB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACnD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,WAAW,EAAE,CAAC;YACxD,OAAO,MAAM,IAAA,wCAAgB,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;QAC9C,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,sCAAsC,GAAG,CAC7C,IAAY,EACZ,GAAsB,EACb,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,uEAAuE;IACvE,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAClC,IAAI,CAAC;QACH,OAAO,yBAAyB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACjD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,WAAW,EAAE,CAAC;YAC9B,OAAO,IAAA,4CAAoB,EAAC,IAAI,EAAE,OAAO,CAAC,CAAA;QAC5C,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,CAAA;AAC7B,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,CAAA;AAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAA;AAExB,MAAM,aAAa,GAAG,KAAK,EAAE,IAAY,EAAE,GAAuB,EAAE,EAAE;IAC3E,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;IACpE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,aAAa,iBAUzB;AAEM,MAAM,iBAAiB,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IACxE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,oBAAoB,CAAC,IAAI,EAAE,GAAG,EAAE,IAAA,iBAAS,EAAC,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;IAChE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAVY,QAAA,iBAAiB,qBAU7B;AAED,MAAM,gBAAgB,GAAG,KAAK,EAC5B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACnB,KAAK,GAAG,KAAK,EACK,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,IAAA,oCAAc,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,YAAY;QACZ,MAAM,IAAA,+BAAY,EAAC,iBAAiB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QAChD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,CAAC,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;IACzC,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAChB,gBAAgB,CAAC,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,CACvD,CACF,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAEhC,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,OAAO,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;IACjD,CAAC;SAAM,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;YAC5D,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,IAAA,+BAAY,EAAC,kCAAkC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IACnE,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,oBAAoB,GAAG,CAC3B,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACnB,KAAK,GAAG,KAAK,EACJ,EAAE;IACX,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,IAAA,wCAAkB,EAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,YAAY;QACZ,IAAA,mCAAgB,EAAC,GAAG,EAAE,CAAC,qBAAqB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,IAAI,UAAU,GAAG,IAAI,CAAA;IACrB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;QACzC,MAAM,CAAC,GAAG,IAAA,cAAO,EAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,oBAAoB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,IAAI,UAAU,CAAA;IACjE,CAAC;IAED,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,OAAO,oBAAoB,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;IACrD,CAAC;SAAM,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,IAAA,YAAK,EAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;YAC5D,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAA,mCAAgB,EAAC,GAAG,EAAE;YACpB,sCAAsC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QACnD,CAAC,CAAC,CAAA;IACJ,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["// This is the same as rimrafPosix, with the following changes:\n//\n// 1. EBUSY, ENFILE, EMFILE trigger retries and/or exponential backoff\n// 2. All non-directories are removed first and then all directories are\n// removed in a second sweep.\n// 3. If we hit ENOTEMPTY in the second sweep, fall back to move-remove on\n// the that folder.\n//\n// Note: \"move then remove\" is 2-10 times slower, and just as unreliable.\n\nimport { Dirent, Stats } from 'fs'\nimport { parse, resolve } from 'path'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { fixEPERM, fixEPERMSync } from './fix-eperm.js'\nimport { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\nimport { retryBusy, retryBusySync } from './retry-busy.js'\nimport { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'\nconst { unlink, rmdir, lstat } = promises\n\nconst rimrafWindowsFile = retryBusy(fixEPERM(unlink))\nconst rimrafWindowsFileSync = retryBusySync(fixEPERMSync(unlinkSync))\nconst rimrafWindowsDirRetry = retryBusy(fixEPERM(rmdir))\nconst rimrafWindowsDirRetrySync = retryBusySync(fixEPERMSync(rmdirSync))\n\nconst rimrafWindowsDirMoveRemoveFallback = async (\n path: string,\n opt: RimrafAsyncOptions,\n): Promise => {\n /* c8 ignore start */\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n /* c8 ignore stop */\n // already filtered, remove from options so we don't call unnecessarily\n const { filter, ...options } = opt\n try {\n return await rimrafWindowsDirRetry(path, options)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOTEMPTY') {\n return await rimrafMoveRemove(path, options)\n }\n throw er\n }\n}\n\nconst rimrafWindowsDirMoveRemoveFallbackSync = (\n path: string,\n opt: RimrafSyncOptions,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n // already filtered, remove from options so we don't call unnecessarily\n const { filter, ...options } = opt\n try {\n return rimrafWindowsDirRetrySync(path, options)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOTEMPTY') {\n return rimrafMoveRemoveSync(path, options)\n }\n throw er\n }\n}\n\nconst START = Symbol('start')\nconst CHILD = Symbol('child')\nconst FINISH = Symbol('finish')\n\nexport const rimrafWindows = async (path: string, opt: RimrafAsyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafWindowsDir(path, opt, await lstat(path), START)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nexport const rimrafWindowsSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafWindowsDirSync(path, opt, lstatSync(path), START)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafWindowsDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n state = START,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n // is a file\n await ignoreENOENT(rimrafWindowsFile(path, opt))\n return true\n }\n\n const s = state === START ? CHILD : state\n const removedAll = (\n await Promise.all(\n entries.map(ent =>\n rimrafWindowsDir(resolve(path, ent.name), opt, ent, s),\n ),\n )\n ).reduce((a, b) => a && b, true)\n\n if (state === START) {\n return rimrafWindowsDir(path, opt, ent, FINISH)\n } else if (state === FINISH) {\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (!removedAll) {\n return false\n }\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(rimrafWindowsDirMoveRemoveFallback(path, opt))\n }\n return true\n}\n\nconst rimrafWindowsDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n state = START,\n): boolean => {\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n // is a file\n ignoreENOENTSync(() => rimrafWindowsFileSync(path, opt))\n return true\n }\n\n let removedAll = true\n for (const ent of entries) {\n const s = state === START ? CHILD : state\n const p = resolve(path, ent.name)\n removedAll = rimrafWindowsDirSync(p, opt, ent, s) && removedAll\n }\n\n if (state === START) {\n return rimrafWindowsDirSync(path, opt, ent, FINISH)\n } else if (state === FINISH) {\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (!removedAll) {\n return false\n }\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => {\n rimrafWindowsDirMoveRemoveFallbackSync(path, opt)\n })\n }\n return true\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts new file mode 100644 index 00000000000000..e191fd90da93d3 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafOptions } from './index.js'; +export declare const useNative: (opt?: RimrafAsyncOptions) => boolean; +export declare const useNativeSync: (opt?: RimrafOptions) => boolean; +//# sourceMappingURL=use-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts.map b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts.map new file mode 100644 index 00000000000000..b182beb1707a7d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.d.ts","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAa9D,eAAO,MAAM,SAAS,EAAE,CAAC,GAAG,CAAC,EAAE,kBAAkB,KAAK,OAGf,CAAA;AACvC,eAAO,MAAM,aAAa,EAAE,CAAC,GAAG,CAAC,EAAE,aAAa,KAAK,OAGd,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js new file mode 100644 index 00000000000000..1f668768d96bcb --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js @@ -0,0 +1,22 @@ +"use strict"; +var __importDefault = (this && this.__importDefault) || function (mod) { + return (mod && mod.__esModule) ? mod : { "default": mod }; +}; +Object.defineProperty(exports, "__esModule", { value: true }); +exports.useNativeSync = exports.useNative = void 0; +const platform_js_1 = __importDefault(require("./platform.js")); +const version = process.env.__TESTING_RIMRAF_NODE_VERSION__ || process.version; +const versArr = version.replace(/^v/, '').split('.'); +/* c8 ignore start */ +const [major = 0, minor = 0] = versArr.map(v => parseInt(v, 10)); +/* c8 ignore stop */ +const hasNative = major > 14 || (major === 14 && minor >= 14); +// we do NOT use native by default on Windows, because Node's native +// rm implementation is less advanced. Change this code if that changes. +exports.useNative = !hasNative || platform_js_1.default === 'win32' ? + () => false + : opt => !opt?.signal && !opt?.filter; +exports.useNativeSync = !hasNative || platform_js_1.default === 'win32' ? + () => false + : opt => !opt?.signal && !opt?.filter; +//# sourceMappingURL=use-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js.map b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js.map new file mode 100644 index 00000000000000..a89b8db7e68352 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/commonjs/use-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.js","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":";;;;;;AACA,gEAAoC;AAEpC,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,OAAO,CAAC,OAAO,CAAA;AAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;AAEpD,qBAAqB;AACrB,MAAM,CAAC,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAA;AAChE,oBAAoB;AACpB,MAAM,SAAS,GAAG,KAAK,GAAG,EAAE,IAAI,CAAC,KAAK,KAAK,EAAE,IAAI,KAAK,IAAI,EAAE,CAAC,CAAA;AAE7D,oEAAoE;AACpE,yEAAyE;AAC5D,QAAA,SAAS,GACpB,CAAC,SAAS,IAAI,qBAAQ,KAAK,OAAO,CAAC,CAAC;IAClC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE,MAAM,IAAI,CAAC,GAAG,EAAE,MAAM,CAAA;AAC1B,QAAA,aAAa,GACxB,CAAC,SAAS,IAAI,qBAAQ,KAAK,OAAO,CAAC,CAAC;IAClC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE,MAAM,IAAI,CAAC,GAAG,EAAE,MAAM,CAAA","sourcesContent":["import { RimrafAsyncOptions, RimrafOptions } from './index.js'\nimport platform from './platform.js'\n\nconst version = process.env.__TESTING_RIMRAF_NODE_VERSION__ || process.version\nconst versArr = version.replace(/^v/, '').split('.')\n\n/* c8 ignore start */\nconst [major = 0, minor = 0] = versArr.map(v => parseInt(v, 10))\n/* c8 ignore stop */\nconst hasNative = major > 14 || (major === 14 && minor >= 14)\n\n// we do NOT use native by default on Windows, because Node's native\n// rm implementation is less advanced. Change this code if that changes.\nexport const useNative: (opt?: RimrafAsyncOptions) => boolean =\n !hasNative || platform === 'win32' ?\n () => false\n : opt => !opt?.signal && !opt?.filter\nexport const useNativeSync: (opt?: RimrafOptions) => boolean =\n !hasNative || platform === 'win32' ?\n () => false\n : opt => !opt?.signal && !opt?.filter\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts b/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts new file mode 100644 index 00000000000000..5600d7c766e6d6 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts @@ -0,0 +1,8 @@ +#!/usr/bin/env node +export declare const help: string; +declare const main: { + (...args: string[]): Promise<1 | 0>; + help: string; +}; +export default main; +//# sourceMappingURL=bin.d.mts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts.map b/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts.map new file mode 100644 index 00000000000000..a5f1ec2cb6c8e8 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/bin.d.mts.map @@ -0,0 +1 @@ +{"version":3,"file":"bin.d.mts","sourceRoot":"","sources":["../../src/bin.mts"],"names":[],"mappings":";AAcA,eAAO,MAAM,IAAI,QAkChB,CAAA;AA8ED,QAAA,MAAM,IAAI;cAAmB,MAAM,EAAE;;CAoIpC,CAAA;AAGD,eAAe,IAAI,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/bin.mjs b/deps/npm/node_modules/rimraf/dist/esm/bin.mjs new file mode 100755 index 00000000000000..4aea35e9c43256 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/bin.mjs @@ -0,0 +1,256 @@ +#!/usr/bin/env node +import { readFile } from 'fs/promises'; +import { rimraf } from './index.js'; +const pj = fileURLToPath(new URL('../package.json', import.meta.url)); +const pjDist = fileURLToPath(new URL('../../package.json', import.meta.url)); +const { version } = JSON.parse(await readFile(pjDist, 'utf8').catch(() => readFile(pj, 'utf8'))); +const runHelpForUsage = () => console.error('run `rimraf --help` for usage information'); +export const help = `rimraf version ${version} + +Usage: rimraf [ ...] +Deletes all files and folders at "path", recursively. + +Options: + -- Treat all subsequent arguments as paths + -h --help Display this usage info + --preserve-root Do not remove '/' recursively (default) + --no-preserve-root Do not treat '/' specially + -G --no-glob Treat arguments as literal paths, not globs (default) + -g --glob Treat arguments as glob patterns + -v --verbose Be verbose when deleting files, showing them as + they are removed. Not compatible with --impl=native + -V --no-verbose Be silent when deleting files, showing nothing as + they are removed (default) + -i --interactive Ask for confirmation before deleting anything + Not compatible with --impl=native + -I --no-interactive Do not ask for confirmation before deleting + + --impl= Specify the implementation to use: + rimraf: choose the best option (default) + native: the built-in implementation in Node.js + manual: the platform-specific JS implementation + posix: the Posix JS implementation + windows: the Windows JS implementation (falls back to + move-remove on ENOTEMPTY) + move-remove: a slow reliable Windows fallback + +Implementation-specific options: + --tmp= Temp file folder for 'move-remove' implementation + --max-retries= maxRetries for 'native' and 'windows' implementations + --retry-delay= retryDelay for 'native' implementation, default 100 + --backoff= Exponential backoff factor for retries (default: 1.2) +`; +import { parse, relative, resolve } from 'path'; +const cwd = process.cwd(); +import { createInterface } from 'readline'; +import { fileURLToPath } from 'url'; +const prompt = async (rl, q) => new Promise(res => rl.question(q, res)); +const interactiveRimraf = async (impl, paths, opt) => { + const existingFilter = opt.filter || (() => true); + let allRemaining = false; + let noneRemaining = false; + const queue = []; + let processing = false; + const processQueue = async () => { + if (processing) + return; + processing = true; + let next; + while ((next = queue.shift())) { + await next(); + } + processing = false; + }; + const oneAtATime = (fn) => async (s, e) => { + const p = new Promise(res => { + queue.push(async () => { + const result = await fn(s, e); + res(result); + return result; + }); + }); + processQueue(); + return p; + }; + const rl = createInterface({ + input: process.stdin, + output: process.stdout, + }); + opt.filter = oneAtATime(async (path, ent) => { + if (noneRemaining) { + return false; + } + while (!allRemaining) { + const a = (await prompt(rl, `rm? ${relative(cwd, path)}\n[(Yes)/No/All/Quit] > `)).trim(); + if (/^n/i.test(a)) { + return false; + } + else if (/^a/i.test(a)) { + allRemaining = true; + break; + } + else if (/^q/i.test(a)) { + noneRemaining = true; + return false; + } + else if (a === '' || /^y/i.test(a)) { + break; + } + else { + continue; + } + } + return existingFilter(path, ent); + }); + await impl(paths, opt); + rl.close(); +}; +const main = async (...args) => { + const verboseFilter = (s) => { + console.log(relative(cwd, s)); + return true; + }; + if (process.env.__RIMRAF_TESTING_BIN_FAIL__ === '1') { + throw new Error('simulated rimraf failure'); + } + const opt = {}; + const paths = []; + let dashdash = false; + let impl = rimraf; + let interactive = false; + for (const arg of args) { + if (dashdash) { + paths.push(arg); + continue; + } + if (arg === '--') { + dashdash = true; + continue; + } + else if (arg === '-rf' || arg === '-fr') { + // this never did anything, but people put it there I guess + continue; + } + else if (arg === '-h' || arg === '--help') { + console.log(help); + return 0; + } + else if (arg === '--interactive' || arg === '-i') { + interactive = true; + continue; + } + else if (arg === '--no-interactive' || arg === '-I') { + interactive = false; + continue; + } + else if (arg === '--verbose' || arg === '-v') { + opt.filter = verboseFilter; + continue; + } + else if (arg === '--no-verbose' || arg === '-V') { + opt.filter = undefined; + continue; + } + else if (arg === '-g' || arg === '--glob') { + opt.glob = true; + continue; + } + else if (arg === '-G' || arg === '--no-glob') { + opt.glob = false; + continue; + } + else if (arg === '--preserve-root') { + opt.preserveRoot = true; + continue; + } + else if (arg === '--no-preserve-root') { + opt.preserveRoot = false; + continue; + } + else if (/^--tmp=/.test(arg)) { + const val = arg.substring('--tmp='.length); + opt.tmp = val; + continue; + } + else if (/^--max-retries=/.test(arg)) { + const val = +arg.substring('--max-retries='.length); + opt.maxRetries = val; + continue; + } + else if (/^--retry-delay=/.test(arg)) { + const val = +arg.substring('--retry-delay='.length); + opt.retryDelay = val; + continue; + } + else if (/^--backoff=/.test(arg)) { + const val = +arg.substring('--backoff='.length); + opt.backoff = val; + continue; + } + else if (/^--impl=/.test(arg)) { + const val = arg.substring('--impl='.length); + switch (val) { + case 'rimraf': + impl = rimraf; + continue; + case 'native': + case 'manual': + case 'posix': + case 'windows': + impl = rimraf[val]; + continue; + case 'move-remove': + impl = rimraf.moveRemove; + continue; + default: + console.error(`unknown implementation: ${val}`); + runHelpForUsage(); + return 1; + } + } + else if (/^-/.test(arg)) { + console.error(`unknown option: ${arg}`); + runHelpForUsage(); + return 1; + } + else { + paths.push(arg); + } + } + if (opt.preserveRoot !== false) { + for (const path of paths.map(p => resolve(p))) { + if (path === parse(path).root) { + console.error(`rimraf: it is dangerous to operate recursively on '/'`); + console.error('use --no-preserve-root to override this failsafe'); + return 1; + } + } + } + if (!paths.length) { + console.error('rimraf: must provide a path to remove'); + runHelpForUsage(); + return 1; + } + if (impl === rimraf.native && (interactive || opt.filter)) { + console.error('native implementation does not support -v or -i'); + runHelpForUsage(); + return 1; + } + if (interactive) { + await interactiveRimraf(impl, paths, opt); + } + else { + await impl(paths, opt); + } + return 0; +}; +main.help = help; +export default main; +if (process.env.__TESTING_RIMRAF_BIN__ !== '1') { + const args = process.argv.slice(2); + main(...args).then(code => process.exit(code), er => { + console.error(er); + process.exit(1); + }); +} +//# sourceMappingURL=bin.mjs.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/bin.mjs.map b/deps/npm/node_modules/rimraf/dist/esm/bin.mjs.map new file mode 100644 index 00000000000000..163fc96df5b6a2 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/bin.mjs.map @@ -0,0 +1 @@ +{"version":3,"file":"bin.mjs","sourceRoot":"","sources":["../../src/bin.mts"],"names":[],"mappings":";AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,aAAa,CAAA;AAEtC,OAAO,EAAE,MAAM,EAAE,MAAM,YAAY,CAAA;AAEnC,MAAM,EAAE,GAAG,aAAa,CAAC,IAAI,GAAG,CAAC,iBAAiB,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;AACrE,MAAM,MAAM,GAAG,aAAa,CAAC,IAAI,GAAG,CAAC,oBAAoB,EAAE,MAAM,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAA;AAC5E,MAAM,EAAE,OAAO,EAAE,GAAG,IAAI,CAAC,KAAK,CAC5B,MAAM,QAAQ,CAAC,MAAM,EAAE,MAAM,CAAC,CAAC,KAAK,CAAC,GAAG,EAAE,CAAC,QAAQ,CAAC,EAAE,EAAE,MAAM,CAAC,CAAC,CAC1C,CAAA;AAExB,MAAM,eAAe,GAAG,GAAG,EAAE,CAC3B,OAAO,CAAC,KAAK,CAAC,2CAA2C,CAAC,CAAA;AAE5D,MAAM,CAAC,MAAM,IAAI,GAAG,kBAAkB,OAAO;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;CAkC5C,CAAA;AAED,OAAO,EAAE,KAAK,EAAE,QAAQ,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAC/C,MAAM,GAAG,GAAG,OAAO,CAAC,GAAG,EAAE,CAAA;AAGzB,OAAO,EAAE,eAAe,EAAa,MAAM,UAAU,CAAA;AACrD,OAAO,EAAE,aAAa,EAAE,MAAM,KAAK,CAAA;AAEnC,MAAM,MAAM,GAAG,KAAK,EAAE,EAAa,EAAE,CAAS,EAAE,EAAE,CAChD,IAAI,OAAO,CAAS,GAAG,CAAC,EAAE,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,GAAG,CAAC,CAAC,CAAA;AAEjD,MAAM,iBAAiB,GAAG,KAAK,EAC7B,IAA6E,EAC7E,KAAe,EACf,GAAuB,EACvB,EAAE;IACF,MAAM,cAAc,GAAG,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,EAAE,CAAC,IAAI,CAAC,CAAA;IACjD,IAAI,YAAY,GAAG,KAAK,CAAA;IACxB,IAAI,aAAa,GAAG,KAAK,CAAA;IACzB,MAAM,KAAK,GAA+B,EAAE,CAAA;IAC5C,IAAI,UAAU,GAAG,KAAK,CAAA;IACtB,MAAM,YAAY,GAAG,KAAK,IAAI,EAAE;QAC9B,IAAI,UAAU;YAAE,OAAM;QACtB,UAAU,GAAG,IAAI,CAAA;QACjB,IAAI,IAA0C,CAAA;QAC9C,OAAO,CAAC,IAAI,GAAG,KAAK,CAAC,KAAK,EAAE,CAAC,EAAE,CAAC;YAC9B,MAAM,IAAI,EAAE,CAAA;QACd,CAAC;QACD,UAAU,GAAG,KAAK,CAAA;IACpB,CAAC,CAAA;IACD,MAAM,UAAU,GACd,CAAC,EAAsD,EAAE,EAAE,CAC3D,KAAK,EAAE,CAAS,EAAE,CAAiB,EAAoB,EAAE;QACvD,MAAM,CAAC,GAAG,IAAI,OAAO,CAAU,GAAG,CAAC,EAAE;YACnC,KAAK,CAAC,IAAI,CAAC,KAAK,IAAI,EAAE;gBACpB,MAAM,MAAM,GAAG,MAAM,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAA;gBAC7B,GAAG,CAAC,MAAM,CAAC,CAAA;gBACX,OAAO,MAAM,CAAA;YACf,CAAC,CAAC,CAAA;QACJ,CAAC,CAAC,CAAA;QACF,YAAY,EAAE,CAAA;QACd,OAAO,CAAC,CAAA;IACV,CAAC,CAAA;IACH,MAAM,EAAE,GAAG,eAAe,CAAC;QACzB,KAAK,EAAE,OAAO,CAAC,KAAK;QACpB,MAAM,EAAE,OAAO,CAAC,MAAM;KACvB,CAAC,CAAA;IACF,GAAG,CAAC,MAAM,GAAG,UAAU,CACrB,KAAK,EAAE,IAAY,EAAE,GAAmB,EAAoB,EAAE;QAC5D,IAAI,aAAa,EAAE,CAAC;YAClB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,OAAO,CAAC,YAAY,EAAE,CAAC;YACrB,MAAM,CAAC,GAAG,CACR,MAAM,MAAM,CAAC,EAAE,EAAE,OAAO,QAAQ,CAAC,GAAG,EAAE,IAAI,CAAC,0BAA0B,CAAC,CACvE,CAAC,IAAI,EAAE,CAAA;YACR,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;gBAClB,OAAO,KAAK,CAAA;YACd,CAAC;iBAAM,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;gBACzB,YAAY,GAAG,IAAI,CAAA;gBACnB,MAAK;YACP,CAAC;iBAAM,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;gBACzB,aAAa,GAAG,IAAI,CAAA;gBACpB,OAAO,KAAK,CAAA;YACd,CAAC;iBAAM,IAAI,CAAC,KAAK,EAAE,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,EAAE,CAAC;gBACrC,MAAK;YACP,CAAC;iBAAM,CAAC;gBACN,SAAQ;YACV,CAAC;QACH,CAAC;QACD,OAAO,cAAc,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;IAClC,CAAC,CACF,CAAA;IACD,MAAM,IAAI,CAAC,KAAK,EAAE,GAAG,CAAC,CAAA;IACtB,EAAE,CAAC,KAAK,EAAE,CAAA;AACZ,CAAC,CAAA;AAED,MAAM,IAAI,GAAG,KAAK,EAAE,GAAG,IAAc,EAAE,EAAE;IACvC,MAAM,aAAa,GAAG,CAAC,CAAS,EAAE,EAAE;QAClC,OAAO,CAAC,GAAG,CAAC,QAAQ,CAAC,GAAG,EAAE,CAAC,CAAC,CAAC,CAAA;QAC7B,OAAO,IAAI,CAAA;IACb,CAAC,CAAA;IAED,IAAI,OAAO,CAAC,GAAG,CAAC,2BAA2B,KAAK,GAAG,EAAE,CAAC;QACpD,MAAM,IAAI,KAAK,CAAC,0BAA0B,CAAC,CAAA;IAC7C,CAAC;IAED,MAAM,GAAG,GAAuB,EAAE,CAAA;IAClC,MAAM,KAAK,GAAa,EAAE,CAAA;IAC1B,IAAI,QAAQ,GAAG,KAAK,CAAA;IACpB,IAAI,IAAI,GAGgB,MAAM,CAAA;IAE9B,IAAI,WAAW,GAAG,KAAK,CAAA;IAEvB,KAAK,MAAM,GAAG,IAAI,IAAI,EAAE,CAAC;QACvB,IAAI,QAAQ,EAAE,CAAC;YACb,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;YACf,SAAQ;QACV,CAAC;QACD,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YACjB,QAAQ,GAAG,IAAI,CAAA;YACf,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,KAAK,IAAI,GAAG,KAAK,KAAK,EAAE,CAAC;YAC1C,2DAA2D;YAC3D,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,QAAQ,EAAE,CAAC;YAC5C,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAA;YACjB,OAAO,CAAC,CAAA;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,eAAe,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YACnD,WAAW,GAAG,IAAI,CAAA;YAClB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,kBAAkB,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YACtD,WAAW,GAAG,KAAK,CAAA;YACnB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,WAAW,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YAC/C,GAAG,CAAC,MAAM,GAAG,aAAa,CAAA;YAC1B,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,cAAc,IAAI,GAAG,KAAK,IAAI,EAAE,CAAC;YAClD,GAAG,CAAC,MAAM,GAAG,SAAS,CAAA;YACtB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,QAAQ,EAAE,CAAC;YAC5C,GAAG,CAAC,IAAI,GAAG,IAAI,CAAA;YACf,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,IAAI,IAAI,GAAG,KAAK,WAAW,EAAE,CAAC;YAC/C,GAAG,CAAC,IAAI,GAAG,KAAK,CAAA;YAChB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,iBAAiB,EAAE,CAAC;YACrC,GAAG,CAAC,YAAY,GAAG,IAAI,CAAA;YACvB,SAAQ;QACV,CAAC;aAAM,IAAI,GAAG,KAAK,oBAAoB,EAAE,CAAC;YACxC,GAAG,CAAC,YAAY,GAAG,KAAK,CAAA;YACxB,SAAQ;QACV,CAAC;aAAM,IAAI,SAAS,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YAC/B,MAAM,GAAG,GAAG,GAAG,CAAC,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAA;YAC1C,GAAG,CAAC,GAAG,GAAG,GAAG,CAAA;YACb,SAAQ;QACV,CAAC;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACvC,MAAM,GAAG,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAA;YACnD,GAAG,CAAC,UAAU,GAAG,GAAG,CAAA;YACpB,SAAQ;QACV,CAAC;aAAM,IAAI,iBAAiB,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACvC,MAAM,GAAG,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,gBAAgB,CAAC,MAAM,CAAC,CAAA;YACnD,GAAG,CAAC,UAAU,GAAG,GAAG,CAAA;YACpB,SAAQ;QACV,CAAC;aAAM,IAAI,aAAa,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YACnC,MAAM,GAAG,GAAG,CAAC,GAAG,CAAC,SAAS,CAAC,YAAY,CAAC,MAAM,CAAC,CAAA;YAC/C,GAAG,CAAC,OAAO,GAAG,GAAG,CAAA;YACjB,SAAQ;QACV,CAAC;aAAM,IAAI,UAAU,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YAChC,MAAM,GAAG,GAAG,GAAG,CAAC,SAAS,CAAC,SAAS,CAAC,MAAM,CAAC,CAAA;YAC3C,QAAQ,GAAG,EAAE,CAAC;gBACZ,KAAK,QAAQ;oBACX,IAAI,GAAG,MAAM,CAAA;oBACb,SAAQ;gBACV,KAAK,QAAQ,CAAC;gBACd,KAAK,QAAQ,CAAC;gBACd,KAAK,OAAO,CAAC;gBACb,KAAK,SAAS;oBACZ,IAAI,GAAG,MAAM,CAAC,GAAG,CAAC,CAAA;oBAClB,SAAQ;gBACV,KAAK,aAAa;oBAChB,IAAI,GAAG,MAAM,CAAC,UAAU,CAAA;oBACxB,SAAQ;gBACV;oBACE,OAAO,CAAC,KAAK,CAAC,2BAA2B,GAAG,EAAE,CAAC,CAAA;oBAC/C,eAAe,EAAE,CAAA;oBACjB,OAAO,CAAC,CAAA;YACZ,CAAC;QACH,CAAC;aAAM,IAAI,IAAI,CAAC,IAAI,CAAC,GAAG,CAAC,EAAE,CAAC;YAC1B,OAAO,CAAC,KAAK,CAAC,mBAAmB,GAAG,EAAE,CAAC,CAAA;YACvC,eAAe,EAAE,CAAA;YACjB,OAAO,CAAC,CAAA;QACV,CAAC;aAAM,CAAC;YACN,KAAK,CAAC,IAAI,CAAC,GAAG,CAAC,CAAA;QACjB,CAAC;IACH,CAAC;IAED,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,EAAE,CAAC;QAC/B,KAAK,MAAM,IAAI,IAAI,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,CAAC,CAAC,EAAE,CAAC;YAC9C,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;gBAC9B,OAAO,CAAC,KAAK,CAAC,uDAAuD,CAAC,CAAA;gBACtE,OAAO,CAAC,KAAK,CAAC,kDAAkD,CAAC,CAAA;gBACjE,OAAO,CAAC,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC;IAED,IAAI,CAAC,KAAK,CAAC,MAAM,EAAE,CAAC;QAClB,OAAO,CAAC,KAAK,CAAC,uCAAuC,CAAC,CAAA;QACtD,eAAe,EAAE,CAAA;QACjB,OAAO,CAAC,CAAA;IACV,CAAC;IAED,IAAI,IAAI,KAAK,MAAM,CAAC,MAAM,IAAI,CAAC,WAAW,IAAI,GAAG,CAAC,MAAM,CAAC,EAAE,CAAC;QAC1D,OAAO,CAAC,KAAK,CAAC,iDAAiD,CAAC,CAAA;QAChE,eAAe,EAAE,CAAA;QACjB,OAAO,CAAC,CAAA;IACV,CAAC;IAED,IAAI,WAAW,EAAE,CAAC;QAChB,MAAM,iBAAiB,CAAC,IAAI,EAAE,KAAK,EAAE,GAAG,CAAC,CAAA;IAC3C,CAAC;SAAM,CAAC;QACN,MAAM,IAAI,CAAC,KAAK,EAAE,GAAG,CAAC,CAAA;IACxB,CAAC;IAED,OAAO,CAAC,CAAA;AACV,CAAC,CAAA;AACD,IAAI,CAAC,IAAI,GAAG,IAAI,CAAA;AAEhB,eAAe,IAAI,CAAA;AAEnB,IAAI,OAAO,CAAC,GAAG,CAAC,sBAAsB,KAAK,GAAG,EAAE,CAAC;IAC/C,MAAM,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,CAAA;IAClC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,IAAI,CAChB,IAAI,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,CAAC,IAAI,CAAC,EAC1B,EAAE,CAAC,EAAE;QACH,OAAO,CAAC,KAAK,CAAC,EAAE,CAAC,CAAA;QACjB,OAAO,CAAC,IAAI,CAAC,CAAC,CAAC,CAAA;IACjB,CAAC,CACF,CAAA;AACH,CAAC","sourcesContent":["#!/usr/bin/env node\nimport { readFile } from 'fs/promises'\nimport type { RimrafAsyncOptions } from './index.js'\nimport { rimraf } from './index.js'\n\nconst pj = fileURLToPath(new URL('../package.json', import.meta.url))\nconst pjDist = fileURLToPath(new URL('../../package.json', import.meta.url))\nconst { version } = JSON.parse(\n await readFile(pjDist, 'utf8').catch(() => readFile(pj, 'utf8')),\n) as { version: string }\n\nconst runHelpForUsage = () =>\n console.error('run `rimraf --help` for usage information')\n\nexport const help = `rimraf version ${version}\n\nUsage: rimraf [ ...]\nDeletes all files and folders at \"path\", recursively.\n\nOptions:\n -- Treat all subsequent arguments as paths\n -h --help Display this usage info\n --preserve-root Do not remove '/' recursively (default)\n --no-preserve-root Do not treat '/' specially\n -G --no-glob Treat arguments as literal paths, not globs (default)\n -g --glob Treat arguments as glob patterns\n -v --verbose Be verbose when deleting files, showing them as\n they are removed. Not compatible with --impl=native\n -V --no-verbose Be silent when deleting files, showing nothing as\n they are removed (default)\n -i --interactive Ask for confirmation before deleting anything\n Not compatible with --impl=native\n -I --no-interactive Do not ask for confirmation before deleting\n\n --impl= Specify the implementation to use:\n rimraf: choose the best option (default)\n native: the built-in implementation in Node.js\n manual: the platform-specific JS implementation\n posix: the Posix JS implementation\n windows: the Windows JS implementation (falls back to\n move-remove on ENOTEMPTY)\n move-remove: a slow reliable Windows fallback\n\nImplementation-specific options:\n --tmp= Temp file folder for 'move-remove' implementation\n --max-retries= maxRetries for 'native' and 'windows' implementations\n --retry-delay= retryDelay for 'native' implementation, default 100\n --backoff= Exponential backoff factor for retries (default: 1.2)\n`\n\nimport { parse, relative, resolve } from 'path'\nconst cwd = process.cwd()\n\nimport { Dirent, Stats } from 'fs'\nimport { createInterface, Interface } from 'readline'\nimport { fileURLToPath } from 'url'\n\nconst prompt = async (rl: Interface, q: string) =>\n new Promise(res => rl.question(q, res))\n\nconst interactiveRimraf = async (\n impl: (path: string | string[], opt?: RimrafAsyncOptions) => Promise,\n paths: string[],\n opt: RimrafAsyncOptions,\n) => {\n const existingFilter = opt.filter || (() => true)\n let allRemaining = false\n let noneRemaining = false\n const queue: (() => Promise)[] = []\n let processing = false\n const processQueue = async () => {\n if (processing) return\n processing = true\n let next: (() => Promise) | undefined\n while ((next = queue.shift())) {\n await next()\n }\n processing = false\n }\n const oneAtATime =\n (fn: (s: string, e: Dirent | Stats) => Promise) =>\n async (s: string, e: Dirent | Stats): Promise => {\n const p = new Promise(res => {\n queue.push(async () => {\n const result = await fn(s, e)\n res(result)\n return result\n })\n })\n processQueue()\n return p\n }\n const rl = createInterface({\n input: process.stdin,\n output: process.stdout,\n })\n opt.filter = oneAtATime(\n async (path: string, ent: Dirent | Stats): Promise => {\n if (noneRemaining) {\n return false\n }\n while (!allRemaining) {\n const a = (\n await prompt(rl, `rm? ${relative(cwd, path)}\\n[(Yes)/No/All/Quit] > `)\n ).trim()\n if (/^n/i.test(a)) {\n return false\n } else if (/^a/i.test(a)) {\n allRemaining = true\n break\n } else if (/^q/i.test(a)) {\n noneRemaining = true\n return false\n } else if (a === '' || /^y/i.test(a)) {\n break\n } else {\n continue\n }\n }\n return existingFilter(path, ent)\n },\n )\n await impl(paths, opt)\n rl.close()\n}\n\nconst main = async (...args: string[]) => {\n const verboseFilter = (s: string) => {\n console.log(relative(cwd, s))\n return true\n }\n\n if (process.env.__RIMRAF_TESTING_BIN_FAIL__ === '1') {\n throw new Error('simulated rimraf failure')\n }\n\n const opt: RimrafAsyncOptions = {}\n const paths: string[] = []\n let dashdash = false\n let impl: (\n path: string | string[],\n opt?: RimrafAsyncOptions,\n ) => Promise = rimraf\n\n let interactive = false\n\n for (const arg of args) {\n if (dashdash) {\n paths.push(arg)\n continue\n }\n if (arg === '--') {\n dashdash = true\n continue\n } else if (arg === '-rf' || arg === '-fr') {\n // this never did anything, but people put it there I guess\n continue\n } else if (arg === '-h' || arg === '--help') {\n console.log(help)\n return 0\n } else if (arg === '--interactive' || arg === '-i') {\n interactive = true\n continue\n } else if (arg === '--no-interactive' || arg === '-I') {\n interactive = false\n continue\n } else if (arg === '--verbose' || arg === '-v') {\n opt.filter = verboseFilter\n continue\n } else if (arg === '--no-verbose' || arg === '-V') {\n opt.filter = undefined\n continue\n } else if (arg === '-g' || arg === '--glob') {\n opt.glob = true\n continue\n } else if (arg === '-G' || arg === '--no-glob') {\n opt.glob = false\n continue\n } else if (arg === '--preserve-root') {\n opt.preserveRoot = true\n continue\n } else if (arg === '--no-preserve-root') {\n opt.preserveRoot = false\n continue\n } else if (/^--tmp=/.test(arg)) {\n const val = arg.substring('--tmp='.length)\n opt.tmp = val\n continue\n } else if (/^--max-retries=/.test(arg)) {\n const val = +arg.substring('--max-retries='.length)\n opt.maxRetries = val\n continue\n } else if (/^--retry-delay=/.test(arg)) {\n const val = +arg.substring('--retry-delay='.length)\n opt.retryDelay = val\n continue\n } else if (/^--backoff=/.test(arg)) {\n const val = +arg.substring('--backoff='.length)\n opt.backoff = val\n continue\n } else if (/^--impl=/.test(arg)) {\n const val = arg.substring('--impl='.length)\n switch (val) {\n case 'rimraf':\n impl = rimraf\n continue\n case 'native':\n case 'manual':\n case 'posix':\n case 'windows':\n impl = rimraf[val]\n continue\n case 'move-remove':\n impl = rimraf.moveRemove\n continue\n default:\n console.error(`unknown implementation: ${val}`)\n runHelpForUsage()\n return 1\n }\n } else if (/^-/.test(arg)) {\n console.error(`unknown option: ${arg}`)\n runHelpForUsage()\n return 1\n } else {\n paths.push(arg)\n }\n }\n\n if (opt.preserveRoot !== false) {\n for (const path of paths.map(p => resolve(p))) {\n if (path === parse(path).root) {\n console.error(`rimraf: it is dangerous to operate recursively on '/'`)\n console.error('use --no-preserve-root to override this failsafe')\n return 1\n }\n }\n }\n\n if (!paths.length) {\n console.error('rimraf: must provide a path to remove')\n runHelpForUsage()\n return 1\n }\n\n if (impl === rimraf.native && (interactive || opt.filter)) {\n console.error('native implementation does not support -v or -i')\n runHelpForUsage()\n return 1\n }\n\n if (interactive) {\n await interactiveRimraf(impl, paths, opt)\n } else {\n await impl(paths, opt)\n }\n\n return 0\n}\nmain.help = help\n\nexport default main\n\nif (process.env.__TESTING_RIMRAF_BIN__ !== '1') {\n const args = process.argv.slice(2)\n main(...args).then(\n code => process.exit(code),\n er => {\n console.error(er)\n process.exit(1)\n },\n )\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts new file mode 100644 index 00000000000000..a68e925b249a8d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts @@ -0,0 +1,3 @@ +export declare const defaultTmp: (path: string) => Promise; +export declare const defaultTmpSync: (path: string) => string; +//# sourceMappingURL=default-tmp.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts.map new file mode 100644 index 00000000000000..d0b35f2786233b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"default-tmp.d.ts","sourceRoot":"","sources":["../../src/default-tmp.ts"],"names":[],"mappings":"AAiEA,eAAO,MAAM,UAAU,SAnCc,MAAM,oBAoCe,CAAA;AAC1D,eAAO,MAAM,cAAc,SArBQ,MAAM,WAsByB,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js new file mode 100644 index 00000000000000..fb0846af5c3acf --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js @@ -0,0 +1,55 @@ +// The default temporary folder location for use in the windows algorithm. +// It's TEMPting to use dirname(path), since that's guaranteed to be on the +// same device. However, this means that: +// rimraf(path).then(() => rimraf(dirname(path))) +// will often fail with EBUSY, because the parent dir contains +// marked-for-deletion directory entries (which do not show up in readdir). +// The approach here is to use os.tmpdir() if it's on the same drive letter, +// or resolve(path, '\\temp') if it exists, or the root of the drive if not. +// On Posix (not that you'd be likely to use the windows algorithm there), +// it uses os.tmpdir() always. +import { tmpdir } from 'os'; +import { parse, resolve } from 'path'; +import { promises, statSync } from './fs.js'; +import platform from './platform.js'; +const { stat } = promises; +const isDirSync = (path) => { + try { + return statSync(path).isDirectory(); + } + catch (er) { + return false; + } +}; +const isDir = (path) => stat(path).then(st => st.isDirectory(), () => false); +const win32DefaultTmp = async (path) => { + const { root } = parse(path); + const tmp = tmpdir(); + const { root: tmpRoot } = parse(tmp); + if (root.toLowerCase() === tmpRoot.toLowerCase()) { + return tmp; + } + const driveTmp = resolve(root, '/temp'); + if (await isDir(driveTmp)) { + return driveTmp; + } + return root; +}; +const win32DefaultTmpSync = (path) => { + const { root } = parse(path); + const tmp = tmpdir(); + const { root: tmpRoot } = parse(tmp); + if (root.toLowerCase() === tmpRoot.toLowerCase()) { + return tmp; + } + const driveTmp = resolve(root, '/temp'); + if (isDirSync(driveTmp)) { + return driveTmp; + } + return root; +}; +const posixDefaultTmp = async () => tmpdir(); +const posixDefaultTmpSync = () => tmpdir(); +export const defaultTmp = platform === 'win32' ? win32DefaultTmp : posixDefaultTmp; +export const defaultTmpSync = platform === 'win32' ? win32DefaultTmpSync : posixDefaultTmpSync; +//# sourceMappingURL=default-tmp.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js.map b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js.map new file mode 100644 index 00000000000000..ea6af37802aa83 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/default-tmp.js.map @@ -0,0 +1 @@ +{"version":3,"file":"default-tmp.js","sourceRoot":"","sources":["../../src/default-tmp.ts"],"names":[],"mappings":"AAAA,0EAA0E;AAC1E,2EAA2E;AAC3E,0CAA0C;AAC1C,iDAAiD;AACjD,8DAA8D;AAC9D,2EAA2E;AAC3E,4EAA4E;AAC5E,4EAA4E;AAC5E,0EAA0E;AAC1E,8BAA8B;AAC9B,OAAO,EAAE,MAAM,EAAE,MAAM,IAAI,CAAA;AAC3B,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AACrC,OAAO,EAAE,QAAQ,EAAE,QAAQ,EAAE,MAAM,SAAS,CAAA;AAC5C,OAAO,QAAQ,MAAM,eAAe,CAAA;AACpC,MAAM,EAAE,IAAI,EAAE,GAAG,QAAQ,CAAA;AAEzB,MAAM,SAAS,GAAG,CAAC,IAAY,EAAE,EAAE;IACjC,IAAI,CAAC;QACH,OAAO,QAAQ,CAAC,IAAI,CAAC,CAAC,WAAW,EAAE,CAAA;IACrC,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,OAAO,KAAK,CAAA;IACd,CAAC;AACH,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,CAAC,IAAY,EAAE,EAAE,CAC7B,IAAI,CAAC,IAAI,CAAC,CAAC,IAAI,CACb,EAAE,CAAC,EAAE,CAAC,EAAE,CAAC,WAAW,EAAE,EACtB,GAAG,EAAE,CAAC,KAAK,CACZ,CAAA;AAEH,MAAM,eAAe,GAAG,KAAK,EAAE,IAAY,EAAE,EAAE;IAC7C,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,GAAG,GAAG,MAAM,EAAE,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,KAAK,CAAC,GAAG,CAAC,CAAA;IACpC,IAAI,IAAI,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QACjD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACvC,IAAI,MAAM,KAAK,CAAC,QAAQ,CAAC,EAAE,CAAC;QAC1B,OAAO,QAAQ,CAAA;IACjB,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,CAAC,IAAY,EAAE,EAAE;IAC3C,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;IAC5B,MAAM,GAAG,GAAG,MAAM,EAAE,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,OAAO,EAAE,GAAG,KAAK,CAAC,GAAG,CAAC,CAAA;IACpC,IAAI,IAAI,CAAC,WAAW,EAAE,KAAK,OAAO,CAAC,WAAW,EAAE,EAAE,CAAC;QACjD,OAAO,GAAG,CAAA;IACZ,CAAC;IAED,MAAM,QAAQ,GAAG,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACvC,IAAI,SAAS,CAAC,QAAQ,CAAC,EAAE,CAAC;QACxB,OAAO,QAAQ,CAAA;IACjB,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,eAAe,GAAG,KAAK,IAAI,EAAE,CAAC,MAAM,EAAE,CAAA;AAC5C,MAAM,mBAAmB,GAAG,GAAG,EAAE,CAAC,MAAM,EAAE,CAAA;AAE1C,MAAM,CAAC,MAAM,UAAU,GACrB,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,eAAe,CAAC,CAAC,CAAC,eAAe,CAAA;AAC1D,MAAM,CAAC,MAAM,cAAc,GACzB,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,mBAAmB,CAAC,CAAC,CAAC,mBAAmB,CAAA","sourcesContent":["// The default temporary folder location for use in the windows algorithm.\n// It's TEMPting to use dirname(path), since that's guaranteed to be on the\n// same device. However, this means that:\n// rimraf(path).then(() => rimraf(dirname(path)))\n// will often fail with EBUSY, because the parent dir contains\n// marked-for-deletion directory entries (which do not show up in readdir).\n// The approach here is to use os.tmpdir() if it's on the same drive letter,\n// or resolve(path, '\\\\temp') if it exists, or the root of the drive if not.\n// On Posix (not that you'd be likely to use the windows algorithm there),\n// it uses os.tmpdir() always.\nimport { tmpdir } from 'os'\nimport { parse, resolve } from 'path'\nimport { promises, statSync } from './fs.js'\nimport platform from './platform.js'\nconst { stat } = promises\n\nconst isDirSync = (path: string) => {\n try {\n return statSync(path).isDirectory()\n } catch (er) {\n return false\n }\n}\n\nconst isDir = (path: string) =>\n stat(path).then(\n st => st.isDirectory(),\n () => false,\n )\n\nconst win32DefaultTmp = async (path: string) => {\n const { root } = parse(path)\n const tmp = tmpdir()\n const { root: tmpRoot } = parse(tmp)\n if (root.toLowerCase() === tmpRoot.toLowerCase()) {\n return tmp\n }\n\n const driveTmp = resolve(root, '/temp')\n if (await isDir(driveTmp)) {\n return driveTmp\n }\n\n return root\n}\n\nconst win32DefaultTmpSync = (path: string) => {\n const { root } = parse(path)\n const tmp = tmpdir()\n const { root: tmpRoot } = parse(tmp)\n if (root.toLowerCase() === tmpRoot.toLowerCase()) {\n return tmp\n }\n\n const driveTmp = resolve(root, '/temp')\n if (isDirSync(driveTmp)) {\n return driveTmp\n }\n\n return root\n}\n\nconst posixDefaultTmp = async () => tmpdir()\nconst posixDefaultTmpSync = () => tmpdir()\n\nexport const defaultTmp =\n platform === 'win32' ? win32DefaultTmp : posixDefaultTmp\nexport const defaultTmpSync =\n platform === 'win32' ? win32DefaultTmpSync : posixDefaultTmpSync\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts new file mode 100644 index 00000000000000..20e76a82c4942e --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts @@ -0,0 +1,3 @@ +export declare const fixEPERM: (fn: (path: string) => Promise) => (path: string) => Promise; +export declare const fixEPERMSync: (fn: (path: string) => any) => (path: string) => any; +//# sourceMappingURL=fix-eperm.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts.map new file mode 100644 index 00000000000000..ac17d6f4e060bb --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fix-eperm.d.ts","sourceRoot":"","sources":["../../src/fix-eperm.ts"],"names":[],"mappings":"AAGA,eAAO,MAAM,QAAQ,OACd,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,GAAG,CAAC,YAAkB,MAAM,iBAsB1D,CAAA;AAEH,eAAO,MAAM,YAAY,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,GAAG,YAAY,MAAM,QAsBvE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js new file mode 100644 index 00000000000000..633c0e119df1f7 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js @@ -0,0 +1,53 @@ +import { chmodSync, promises } from './fs.js'; +const { chmod } = promises; +export const fixEPERM = (fn) => async (path) => { + try { + return await fn(path); + } + catch (er) { + const fer = er; + if (fer?.code === 'ENOENT') { + return; + } + if (fer?.code === 'EPERM') { + try { + await chmod(path, 0o666); + } + catch (er2) { + const fer2 = er2; + if (fer2?.code === 'ENOENT') { + return; + } + throw er; + } + return await fn(path); + } + throw er; + } +}; +export const fixEPERMSync = (fn) => (path) => { + try { + return fn(path); + } + catch (er) { + const fer = er; + if (fer?.code === 'ENOENT') { + return; + } + if (fer?.code === 'EPERM') { + try { + chmodSync(path, 0o666); + } + catch (er2) { + const fer2 = er2; + if (fer2?.code === 'ENOENT') { + return; + } + throw er; + } + return fn(path); + } + throw er; + } +}; +//# sourceMappingURL=fix-eperm.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js.map b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js.map new file mode 100644 index 00000000000000..a6aa032b4891e8 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fix-eperm.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fix-eperm.js","sourceRoot":"","sources":["../../src/fix-eperm.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,MAAM,SAAS,CAAA;AAC7C,MAAM,EAAE,KAAK,EAAE,GAAG,QAAQ,CAAA;AAE1B,MAAM,CAAC,MAAM,QAAQ,GACnB,CAAC,EAAkC,EAAE,EAAE,CAAC,KAAK,EAAE,IAAY,EAAE,EAAE;IAC7D,IAAI,CAAC;QACH,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;IACvB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,OAAM;QACR,CAAC;QACD,IAAI,GAAG,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACH,MAAM,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAC1B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,MAAM,IAAI,GAAG,GAA4B,CAAA;gBACzC,IAAI,IAAI,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBAC5B,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;YACD,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;QACvB,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAEH,MAAM,CAAC,MAAM,YAAY,GAAG,CAAC,EAAyB,EAAE,EAAE,CAAC,CAAC,IAAY,EAAE,EAAE;IAC1E,IAAI,CAAC;QACH,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;IACjB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC3B,OAAM;QACR,CAAC;QACD,IAAI,GAAG,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YAC1B,IAAI,CAAC;gBACH,SAAS,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YACxB,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,MAAM,IAAI,GAAG,GAA4B,CAAA;gBACzC,IAAI,IAAI,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBAC5B,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;YACD,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;QACjB,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA","sourcesContent":["import { chmodSync, promises } from './fs.js'\nconst { chmod } = promises\n\nexport const fixEPERM =\n (fn: (path: string) => Promise) => async (path: string) => {\n try {\n return await fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOENT') {\n return\n }\n if (fer?.code === 'EPERM') {\n try {\n await chmod(path, 0o666)\n } catch (er2) {\n const fer2 = er2 as NodeJS.ErrnoException\n if (fer2?.code === 'ENOENT') {\n return\n }\n throw er\n }\n return await fn(path)\n }\n throw er\n }\n }\n\nexport const fixEPERMSync = (fn: (path: string) => any) => (path: string) => {\n try {\n return fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOENT') {\n return\n }\n if (fer?.code === 'EPERM') {\n try {\n chmodSync(path, 0o666)\n } catch (er2) {\n const fer2 = er2 as NodeJS.ErrnoException\n if (fer2?.code === 'ENOENT') {\n return\n }\n throw er\n }\n return fn(path)\n }\n throw er\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts b/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts new file mode 100644 index 00000000000000..9e4e95b4e7a411 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts @@ -0,0 +1,17 @@ +import fs, { Dirent } from 'fs'; +export { chmodSync, mkdirSync, renameSync, rmdirSync, rmSync, statSync, lstatSync, unlinkSync, } from 'fs'; +export declare const readdirSync: (path: fs.PathLike) => Dirent[]; +export declare const promises: { + chmod: (path: fs.PathLike, mode: fs.Mode) => Promise; + mkdir: (path: fs.PathLike, options?: fs.Mode | (fs.MakeDirectoryOptions & { + recursive?: boolean | null; + }) | undefined | null) => Promise; + readdir: (path: fs.PathLike) => Promise; + rename: (oldPath: fs.PathLike, newPath: fs.PathLike) => Promise; + rm: (path: fs.PathLike, options: fs.RmOptions) => Promise; + rmdir: (path: fs.PathLike) => Promise; + stat: (path: fs.PathLike) => Promise; + lstat: (path: fs.PathLike) => Promise; + unlink: (path: fs.PathLike) => Promise; +}; +//# sourceMappingURL=fs.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts.map new file mode 100644 index 00000000000000..8c8b1034cbcd27 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fs.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"fs.d.ts","sourceRoot":"","sources":["../../src/fs.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,EAAE,EAAE,MAAM,EAAE,MAAM,IAAI,CAAA;AAG/B,OAAO,EACL,SAAS,EACT,SAAS,EACT,UAAU,EACV,SAAS,EACT,MAAM,EACN,QAAQ,EACR,SAAS,EACT,UAAU,GACX,MAAM,IAAI,CAAA;AAGX,eAAO,MAAM,WAAW,SAAU,EAAE,CAAC,QAAQ,KAAG,MAAM,EACf,CAAA;AA+DvC,eAAO,MAAM,QAAQ;kBAxDA,EAAE,CAAC,QAAQ,QAAQ,EAAE,CAAC,IAAI,KAAG,OAAO,CAAC,IAAI,CAAC;kBAMvD,EAAE,CAAC,QAAQ,YAEb,EAAE,CAAC,IAAI,GACP,CAAC,EAAE,CAAC,oBAAoB,GAAG;QAAE,SAAS,CAAC,EAAE,OAAO,GAAG,IAAI,CAAA;KAAE,CAAC,GAC1D,SAAS,GACT,IAAI,KACP,OAAO,CAAC,MAAM,GAAG,SAAS,CAAC;oBAKP,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,MAAM,EAAE,CAAC;sBAO7B,EAAE,CAAC,QAAQ,WAAW,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;eAOxD,EAAE,CAAC,QAAQ,WAAW,EAAE,CAAC,SAAS,KAAG,OAAO,CAAC,IAAI,CAAC;kBAK/C,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;iBAK5B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC;kBAK9B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,EAAE,CAAC,KAAK,CAAC;mBAK9B,EAAE,CAAC,QAAQ,KAAG,OAAO,CAAC,IAAI,CAAC;CAehD,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fs.js b/deps/npm/node_modules/rimraf/dist/esm/fs.js new file mode 100644 index 00000000000000..f9422ce992a546 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fs.js @@ -0,0 +1,31 @@ +// promisify ourselves, because older nodes don't have fs.promises +import fs from 'fs'; +// sync ones just take the sync version from node +export { chmodSync, mkdirSync, renameSync, rmdirSync, rmSync, statSync, lstatSync, unlinkSync, } from 'fs'; +import { readdirSync as rdSync } from 'fs'; +export const readdirSync = (path) => rdSync(path, { withFileTypes: true }); +// unrolled for better inlining, this seems to get better performance +// than something like: +// const makeCb = (res, rej) => (er, ...d) => er ? rej(er) : res(...d) +// which would be a bit cleaner. +const chmod = (path, mode) => new Promise((res, rej) => fs.chmod(path, mode, (er, ...d) => (er ? rej(er) : res(...d)))); +const mkdir = (path, options) => new Promise((res, rej) => fs.mkdir(path, options, (er, made) => (er ? rej(er) : res(made)))); +const readdir = (path) => new Promise((res, rej) => fs.readdir(path, { withFileTypes: true }, (er, data) => er ? rej(er) : res(data))); +const rename = (oldPath, newPath) => new Promise((res, rej) => fs.rename(oldPath, newPath, (er, ...d) => er ? rej(er) : res(...d))); +const rm = (path, options) => new Promise((res, rej) => fs.rm(path, options, (er, ...d) => (er ? rej(er) : res(...d)))); +const rmdir = (path) => new Promise((res, rej) => fs.rmdir(path, (er, ...d) => (er ? rej(er) : res(...d)))); +const stat = (path) => new Promise((res, rej) => fs.stat(path, (er, data) => (er ? rej(er) : res(data)))); +const lstat = (path) => new Promise((res, rej) => fs.lstat(path, (er, data) => (er ? rej(er) : res(data)))); +const unlink = (path) => new Promise((res, rej) => fs.unlink(path, (er, ...d) => (er ? rej(er) : res(...d)))); +export const promises = { + chmod, + mkdir, + readdir, + rename, + rm, + rmdir, + stat, + lstat, + unlink, +}; +//# sourceMappingURL=fs.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/fs.js.map b/deps/npm/node_modules/rimraf/dist/esm/fs.js.map new file mode 100644 index 00000000000000..c4c5d0f2cc42b4 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/fs.js.map @@ -0,0 +1 @@ +{"version":3,"file":"fs.js","sourceRoot":"","sources":["../../src/fs.ts"],"names":[],"mappings":"AAAA,kEAAkE;AAElE,OAAO,EAAc,MAAM,IAAI,CAAA;AAE/B,iDAAiD;AACjD,OAAO,EACL,SAAS,EACT,SAAS,EACT,UAAU,EACV,SAAS,EACT,MAAM,EACN,QAAQ,EACR,SAAS,EACT,UAAU,GACX,MAAM,IAAI,CAAA;AAEX,OAAO,EAAE,WAAW,IAAI,MAAM,EAAE,MAAM,IAAI,CAAA;AAC1C,MAAM,CAAC,MAAM,WAAW,GAAG,CAAC,IAAiB,EAAY,EAAE,CACzD,MAAM,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,CAAC,CAAA;AAEvC,qEAAqE;AACrE,uBAAuB;AACvB,sEAAsE;AACtE,gCAAgC;AAEhC,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAE,IAAa,EAAiB,EAAE,CAChE,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACtE,CAAA;AAEH,MAAM,KAAK,GAAG,CACZ,IAAiB,EACjB,OAIQ,EACqB,EAAE,CAC/B,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CAClE,CAAA;AAEH,MAAM,OAAO,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACvD,IAAI,OAAO,CAAW,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACjC,EAAE,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,aAAa,EAAE,IAAI,EAAE,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CACrD,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CACzB,CACF,CAAA;AAEH,MAAM,MAAM,GAAG,CAAC,OAAoB,EAAE,OAAoB,EAAiB,EAAE,CAC3E,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,MAAM,CAAC,OAAO,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAC9C,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CACzB,CACF,CAAA;AAEH,MAAM,EAAE,GAAG,CAAC,IAAiB,EAAE,OAAqB,EAAiB,EAAE,CACrE,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,EAAE,CAAC,IAAI,EAAE,OAAO,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACtE,CAAA;AAEH,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAiB,EAAE,CACjD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CAChE,CAAA;AAEH,MAAM,IAAI,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACpD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,IAAI,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CACxD,CAAA;AAEH,MAAM,KAAK,GAAG,CAAC,IAAiB,EAAqB,EAAE,CACrD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,KAAK,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,IAAI,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC,CAAC,CACzD,CAAA;AAEH,MAAM,MAAM,GAAG,CAAC,IAAiB,EAAiB,EAAE,CAClD,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE,CACvB,EAAE,CAAC,MAAM,CAAC,IAAI,EAAE,CAAC,EAAE,EAAE,GAAG,CAAQ,EAAE,EAAE,CAAC,CAAC,EAAE,CAAC,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,CAAC,CAAC,GAAG,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,CACjE,CAAA;AAEH,MAAM,CAAC,MAAM,QAAQ,GAAG;IACtB,KAAK;IACL,KAAK;IACL,OAAO;IACP,MAAM;IACN,EAAE;IACF,KAAK;IACL,IAAI;IACJ,KAAK;IACL,MAAM;CACP,CAAA","sourcesContent":["// promisify ourselves, because older nodes don't have fs.promises\n\nimport fs, { Dirent } from 'fs'\n\n// sync ones just take the sync version from node\nexport {\n chmodSync,\n mkdirSync,\n renameSync,\n rmdirSync,\n rmSync,\n statSync,\n lstatSync,\n unlinkSync,\n} from 'fs'\n\nimport { readdirSync as rdSync } from 'fs'\nexport const readdirSync = (path: fs.PathLike): Dirent[] =>\n rdSync(path, { withFileTypes: true })\n\n// unrolled for better inlining, this seems to get better performance\n// than something like:\n// const makeCb = (res, rej) => (er, ...d) => er ? rej(er) : res(...d)\n// which would be a bit cleaner.\n\nconst chmod = (path: fs.PathLike, mode: fs.Mode): Promise =>\n new Promise((res, rej) =>\n fs.chmod(path, mode, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst mkdir = (\n path: fs.PathLike,\n options?:\n | fs.Mode\n | (fs.MakeDirectoryOptions & { recursive?: boolean | null })\n | undefined\n | null,\n): Promise =>\n new Promise((res, rej) =>\n fs.mkdir(path, options, (er, made) => (er ? rej(er) : res(made))),\n )\n\nconst readdir = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.readdir(path, { withFileTypes: true }, (er, data) =>\n er ? rej(er) : res(data),\n ),\n )\n\nconst rename = (oldPath: fs.PathLike, newPath: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.rename(oldPath, newPath, (er, ...d: any[]) =>\n er ? rej(er) : res(...d),\n ),\n )\n\nconst rm = (path: fs.PathLike, options: fs.RmOptions): Promise =>\n new Promise((res, rej) =>\n fs.rm(path, options, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst rmdir = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.rmdir(path, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nconst stat = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.stat(path, (er, data) => (er ? rej(er) : res(data))),\n )\n\nconst lstat = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.lstat(path, (er, data) => (er ? rej(er) : res(data))),\n )\n\nconst unlink = (path: fs.PathLike): Promise =>\n new Promise((res, rej) =>\n fs.unlink(path, (er, ...d: any[]) => (er ? rej(er) : res(...d))),\n )\n\nexport const promises = {\n chmod,\n mkdir,\n readdir,\n rename,\n rm,\n rmdir,\n stat,\n lstat,\n unlink,\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts new file mode 100644 index 00000000000000..f158cc27025b16 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts @@ -0,0 +1,3 @@ +export declare const ignoreENOENT: (p: Promise) => Promise; +export declare const ignoreENOENTSync: (fn: () => any) => any; +//# sourceMappingURL=ignore-enoent.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts.map new file mode 100644 index 00000000000000..2cfb3bbac5fab7 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore-enoent.d.ts","sourceRoot":"","sources":["../../src/ignore-enoent.ts"],"names":[],"mappings":"AAAA,eAAO,MAAM,YAAY,MAAa,OAAO,CAAC,GAAG,CAAC,iBAK9C,CAAA;AAEJ,eAAO,MAAM,gBAAgB,OAAQ,MAAM,GAAG,QAQ7C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js new file mode 100644 index 00000000000000..753f4811cd384f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js @@ -0,0 +1,16 @@ +export const ignoreENOENT = async (p) => p.catch(er => { + if (er.code !== 'ENOENT') { + throw er; + } +}); +export const ignoreENOENTSync = (fn) => { + try { + return fn(); + } + catch (er) { + if (er?.code !== 'ENOENT') { + throw er; + } + } +}; +//# sourceMappingURL=ignore-enoent.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js.map b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js.map new file mode 100644 index 00000000000000..acffb146233e13 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/ignore-enoent.js.map @@ -0,0 +1 @@ +{"version":3,"file":"ignore-enoent.js","sourceRoot":"","sources":["../../src/ignore-enoent.ts"],"names":[],"mappings":"AAAA,MAAM,CAAC,MAAM,YAAY,GAAG,KAAK,EAAE,CAAe,EAAE,EAAE,CACpD,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE;IACX,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;QACzB,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAC,CAAA;AAEJ,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAAC,EAAa,EAAE,EAAE;IAChD,IAAI,CAAC;QACH,OAAO,EAAE,EAAE,CAAA;IACb,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YACrD,MAAM,EAAE,CAAA;QACV,CAAC;IACH,CAAC;AACH,CAAC,CAAA","sourcesContent":["export const ignoreENOENT = async (p: Promise) =>\n p.catch(er => {\n if (er.code !== 'ENOENT') {\n throw er\n }\n })\n\nexport const ignoreENOENTSync = (fn: () => any) => {\n try {\n return fn()\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code !== 'ENOENT') {\n throw er\n }\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/index.d.ts b/deps/npm/node_modules/rimraf/dist/esm/index.d.ts new file mode 100644 index 00000000000000..9ec4a124ab613d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/index.d.ts @@ -0,0 +1,50 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './opt-arg.js'; +export { assertRimrafOptions, isRimrafOptions, type RimrafAsyncOptions, type RimrafOptions, type RimrafSyncOptions, } from './opt-arg.js'; +export declare const nativeSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const native: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const manualSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const manual: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const windowsSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const windows: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const posixSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const posix: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const moveRemoveSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const moveRemove: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +export declare const rimrafSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +export declare const rimraf: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + rimraf: (path: string | string[], opt?: RimrafAsyncOptions) => Promise; + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + rimrafSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + manual: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + manualSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + native: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + nativeSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + posix: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + posixSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + windows: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + windowsSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + moveRemove: ((path: string | string[], opt?: RimrafAsyncOptions) => Promise) & { + sync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; + }; + moveRemoveSync: (path: string | string[], opt?: RimrafSyncOptions) => boolean; +}; +//# sourceMappingURL=index.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/index.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/index.d.ts.map new file mode 100644 index 00000000000000..0dc659ca730252 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/index.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"index.d.ts","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AACA,OAAO,EAGL,kBAAkB,EAClB,iBAAiB,EAClB,MAAM,cAAc,CAAA;AASrB,OAAO,EACL,mBAAmB,EACnB,eAAe,EACf,KAAK,kBAAkB,EACvB,KAAK,aAAa,EAClB,KAAK,iBAAiB,GACvB,MAAM,cAAc,CAAA;AAqCrB,eAAO,MAAM,UAAU,SAdd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAcF,CAAA;AACpD,eAAO,MAAM,MAAM,UAjCT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAegB,CAAA;AAE7E,eAAO,MAAM,UAAU,SAjBd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAiBF,CAAA;AACpD,eAAO,MAAM,MAAM,UApCT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAkBgB,CAAA;AAE7E,eAAO,MAAM,WAAW,SApBf,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAoBA,CAAA;AACtD,eAAO,MAAM,OAAO,UAvCV,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAqBmB,CAAA;AAEhF,eAAO,MAAM,SAAS,SAvBb,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAuBJ,CAAA;AAClD,eAAO,MAAM,KAAK,UA1CR,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAwBa,CAAA;AAE1E,eAAO,MAAM,cAAc,SA1BlB,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OA0BM,CAAA;AAC5D,eAAO,MAAM,UAAU,UA7Cb,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CA6B3D,CAAA;AAEF,eAAO,MAAM,UAAU,SA/Bd,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAmCrD,CAAA;AACD,eAAO,MAAM,IAAI,SApCR,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAoCxB,CAAA;AAK9B,eAAO,MAAM,MAAM,UA3DT,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;mBAFX,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;iBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;oBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;oBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;uBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;mBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;sBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;qBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;wBAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;wBAlBnD,MAAM,GAAG,MAAM,EAAE,QACjB,kBAAkB,KACvB,OAAO,CAAC,OAAO,CAAC;qBAgBZ,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;;2BAApD,MAAM,GAAG,MAAM,EAAE,QAAQ,iBAAiB,KAAG,OAAO;CAuD3D,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/index.js b/deps/npm/node_modules/rimraf/dist/esm/index.js new file mode 100644 index 00000000000000..d94d6f81a485c9 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/index.js @@ -0,0 +1,70 @@ +import { glob, globSync } from 'glob'; +import { optArg, optArgSync, } from './opt-arg.js'; +import pathArg from './path-arg.js'; +import { rimrafManual, rimrafManualSync } from './rimraf-manual.js'; +import { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'; +import { rimrafNative, rimrafNativeSync } from './rimraf-native.js'; +import { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'; +import { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'; +import { useNative, useNativeSync } from './use-native.js'; +export { assertRimrafOptions, isRimrafOptions, } from './opt-arg.js'; +const wrap = (fn) => async (path, opt) => { + const options = optArg(opt); + if (options.glob) { + path = await glob(path, options.glob); + } + if (Array.isArray(path)) { + return !!(await Promise.all(path.map(p => fn(pathArg(p, options), options)))).reduce((a, b) => a && b, true); + } + else { + return !!(await fn(pathArg(path, options), options)); + } +}; +const wrapSync = (fn) => (path, opt) => { + const options = optArgSync(opt); + if (options.glob) { + path = globSync(path, options.glob); + } + if (Array.isArray(path)) { + return !!path + .map(p => fn(pathArg(p, options), options)) + .reduce((a, b) => a && b, true); + } + else { + return !!fn(pathArg(path, options), options); + } +}; +export const nativeSync = wrapSync(rimrafNativeSync); +export const native = Object.assign(wrap(rimrafNative), { sync: nativeSync }); +export const manualSync = wrapSync(rimrafManualSync); +export const manual = Object.assign(wrap(rimrafManual), { sync: manualSync }); +export const windowsSync = wrapSync(rimrafWindowsSync); +export const windows = Object.assign(wrap(rimrafWindows), { sync: windowsSync }); +export const posixSync = wrapSync(rimrafPosixSync); +export const posix = Object.assign(wrap(rimrafPosix), { sync: posixSync }); +export const moveRemoveSync = wrapSync(rimrafMoveRemoveSync); +export const moveRemove = Object.assign(wrap(rimrafMoveRemove), { + sync: moveRemoveSync, +}); +export const rimrafSync = wrapSync((path, opt) => useNativeSync(opt) ? + rimrafNativeSync(path, opt) + : rimrafManualSync(path, opt)); +export const sync = rimrafSync; +const rimraf_ = wrap((path, opt) => useNative(opt) ? rimrafNative(path, opt) : rimrafManual(path, opt)); +export const rimraf = Object.assign(rimraf_, { + rimraf: rimraf_, + sync: rimrafSync, + rimrafSync: rimrafSync, + manual, + manualSync, + native, + nativeSync, + posix, + posixSync, + windows, + windowsSync, + moveRemove, + moveRemoveSync, +}); +rimraf.rimraf = rimraf; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/index.js.map b/deps/npm/node_modules/rimraf/dist/esm/index.js.map new file mode 100644 index 00000000000000..0c8ca64c74f236 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js","sourceRoot":"","sources":["../../src/index.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,IAAI,EAAE,QAAQ,EAAE,MAAM,MAAM,CAAA;AACrC,OAAO,EACL,MAAM,EACN,UAAU,GAGX,MAAM,cAAc,CAAA;AACrB,OAAO,OAAO,MAAM,eAAe,CAAA;AACnC,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,gBAAgB,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAA;AAChF,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAA;AAChE,OAAO,EAAE,aAAa,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAA;AACtE,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAE1D,OAAO,EACL,mBAAmB,EACnB,eAAe,GAIhB,MAAM,cAAc,CAAA;AAErB,MAAM,IAAI,GACR,CAAC,EAA0D,EAAE,EAAE,CAC/D,KAAK,EACH,IAAuB,EACvB,GAAwB,EACN,EAAE;IACpB,MAAM,OAAO,GAAG,MAAM,CAAC,GAAG,CAAC,CAAA;IAC3B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;QACjB,IAAI,GAAG,MAAM,IAAI,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACvC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,OAAO,CAAC,CAAC,CACP,MAAM,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,CAAC,CACnE,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAClC,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,CAAC,MAAM,EAAE,CAAC,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC,CAAA;IACtD,CAAC;AACH,CAAC,CAAA;AAEH,MAAM,QAAQ,GACZ,CAAC,EAAgD,EAAE,EAAE,CACrD,CAAC,IAAuB,EAAE,GAAuB,EAAW,EAAE;IAC5D,MAAM,OAAO,GAAG,UAAU,CAAC,GAAG,CAAC,CAAA;IAC/B,IAAI,OAAO,CAAC,IAAI,EAAE,CAAC;QACjB,IAAI,GAAG,QAAQ,CAAC,IAAI,EAAE,OAAO,CAAC,IAAI,CAAC,CAAA;IACrC,CAAC;IACD,IAAI,KAAK,CAAC,OAAO,CAAC,IAAI,CAAC,EAAE,CAAC;QACxB,OAAO,CAAC,CAAC,IAAI;aACV,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,CAAC,OAAO,CAAC,CAAC,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAC;aAC1C,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IACnC,CAAC;SAAM,CAAC;QACN,OAAO,CAAC,CAAC,EAAE,CAAC,OAAO,CAAC,IAAI,EAAE,OAAO,CAAC,EAAE,OAAO,CAAC,CAAA;IAC9C,CAAC;AACH,CAAC,CAAA;AAEH,MAAM,CAAC,MAAM,UAAU,GAAG,QAAQ,CAAC,gBAAgB,CAAC,CAAA;AACpD,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,CAAA;AAE7E,MAAM,CAAC,MAAM,UAAU,GAAG,QAAQ,CAAC,gBAAgB,CAAC,CAAA;AACpD,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,YAAY,CAAC,EAAE,EAAE,IAAI,EAAE,UAAU,EAAE,CAAC,CAAA;AAE7E,MAAM,CAAC,MAAM,WAAW,GAAG,QAAQ,CAAC,iBAAiB,CAAC,CAAA;AACtD,MAAM,CAAC,MAAM,OAAO,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,aAAa,CAAC,EAAE,EAAE,IAAI,EAAE,WAAW,EAAE,CAAC,CAAA;AAEhF,MAAM,CAAC,MAAM,SAAS,GAAG,QAAQ,CAAC,eAAe,CAAC,CAAA;AAClD,MAAM,CAAC,MAAM,KAAK,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,WAAW,CAAC,EAAE,EAAE,IAAI,EAAE,SAAS,EAAE,CAAC,CAAA;AAE1E,MAAM,CAAC,MAAM,cAAc,GAAG,QAAQ,CAAC,oBAAoB,CAAC,CAAA;AAC5D,MAAM,CAAC,MAAM,UAAU,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,gBAAgB,CAAC,EAAE;IAC9D,IAAI,EAAE,cAAc;CACrB,CAAC,CAAA;AAEF,MAAM,CAAC,MAAM,UAAU,GAAG,QAAQ,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CAC/C,aAAa,CAAC,GAAG,CAAC,CAAC,CAAC;IAClB,gBAAgB,CAAC,IAAI,EAAE,GAAG,CAAC;IAC7B,CAAC,CAAC,gBAAgB,CAAC,IAAI,EAAE,GAAG,CAAC,CAC9B,CAAA;AACD,MAAM,CAAC,MAAM,IAAI,GAAG,UAAU,CAAA;AAE9B,MAAM,OAAO,GAAG,IAAI,CAAC,CAAC,IAAI,EAAE,GAAG,EAAE,EAAE,CACjC,SAAS,CAAC,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAC,CAAC,YAAY,CAAC,IAAI,EAAE,GAAG,CAAC,CACnE,CAAA;AACD,MAAM,CAAC,MAAM,MAAM,GAAG,MAAM,CAAC,MAAM,CAAC,OAAO,EAAE;IAC3C,MAAM,EAAE,OAAO;IACf,IAAI,EAAE,UAAU;IAChB,UAAU,EAAE,UAAU;IACtB,MAAM;IACN,UAAU;IACV,MAAM;IACN,UAAU;IACV,KAAK;IACL,SAAS;IACT,OAAO;IACP,WAAW;IACX,UAAU;IACV,cAAc;CACf,CAAC,CAAA;AACF,MAAM,CAAC,MAAM,GAAG,MAAM,CAAA","sourcesContent":["import { glob, globSync } from 'glob'\nimport {\n optArg,\n optArgSync,\n RimrafAsyncOptions,\n RimrafSyncOptions,\n} from './opt-arg.js'\nimport pathArg from './path-arg.js'\nimport { rimrafManual, rimrafManualSync } from './rimraf-manual.js'\nimport { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'\nimport { rimrafNative, rimrafNativeSync } from './rimraf-native.js'\nimport { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'\nimport { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'\nimport { useNative, useNativeSync } from './use-native.js'\n\nexport {\n assertRimrafOptions,\n isRimrafOptions,\n type RimrafAsyncOptions,\n type RimrafOptions,\n type RimrafSyncOptions,\n} from './opt-arg.js'\n\nconst wrap =\n (fn: (p: string, o: RimrafAsyncOptions) => Promise) =>\n async (\n path: string | string[],\n opt?: RimrafAsyncOptions,\n ): Promise => {\n const options = optArg(opt)\n if (options.glob) {\n path = await glob(path, options.glob)\n }\n if (Array.isArray(path)) {\n return !!(\n await Promise.all(path.map(p => fn(pathArg(p, options), options)))\n ).reduce((a, b) => a && b, true)\n } else {\n return !!(await fn(pathArg(path, options), options))\n }\n }\n\nconst wrapSync =\n (fn: (p: string, o: RimrafSyncOptions) => boolean) =>\n (path: string | string[], opt?: RimrafSyncOptions): boolean => {\n const options = optArgSync(opt)\n if (options.glob) {\n path = globSync(path, options.glob)\n }\n if (Array.isArray(path)) {\n return !!path\n .map(p => fn(pathArg(p, options), options))\n .reduce((a, b) => a && b, true)\n } else {\n return !!fn(pathArg(path, options), options)\n }\n }\n\nexport const nativeSync = wrapSync(rimrafNativeSync)\nexport const native = Object.assign(wrap(rimrafNative), { sync: nativeSync })\n\nexport const manualSync = wrapSync(rimrafManualSync)\nexport const manual = Object.assign(wrap(rimrafManual), { sync: manualSync })\n\nexport const windowsSync = wrapSync(rimrafWindowsSync)\nexport const windows = Object.assign(wrap(rimrafWindows), { sync: windowsSync })\n\nexport const posixSync = wrapSync(rimrafPosixSync)\nexport const posix = Object.assign(wrap(rimrafPosix), { sync: posixSync })\n\nexport const moveRemoveSync = wrapSync(rimrafMoveRemoveSync)\nexport const moveRemove = Object.assign(wrap(rimrafMoveRemove), {\n sync: moveRemoveSync,\n})\n\nexport const rimrafSync = wrapSync((path, opt) =>\n useNativeSync(opt) ?\n rimrafNativeSync(path, opt)\n : rimrafManualSync(path, opt),\n)\nexport const sync = rimrafSync\n\nconst rimraf_ = wrap((path, opt) =>\n useNative(opt) ? rimrafNative(path, opt) : rimrafManual(path, opt),\n)\nexport const rimraf = Object.assign(rimraf_, {\n rimraf: rimraf_,\n sync: rimrafSync,\n rimrafSync: rimrafSync,\n manual,\n manualSync,\n native,\n nativeSync,\n posix,\n posixSync,\n windows,\n windowsSync,\n moveRemove,\n moveRemoveSync,\n})\nrimraf.rimraf = rimraf\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts new file mode 100644 index 00000000000000..c869d4ae85251b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts @@ -0,0 +1,34 @@ +import { Dirent, Stats } from 'fs'; +import { GlobOptions } from 'glob'; +export declare const isRimrafOptions: (o: any) => o is RimrafOptions; +export declare const assertRimrafOptions: (o: any) => void; +export interface RimrafAsyncOptions { + preserveRoot?: boolean; + tmp?: string; + maxRetries?: number; + retryDelay?: number; + backoff?: number; + maxBackoff?: number; + signal?: AbortSignal; + glob?: boolean | GlobOptions; + filter?: ((path: string, ent: Dirent | Stats) => boolean) | ((path: string, ent: Dirent | Stats) => Promise); +} +export interface RimrafSyncOptions extends RimrafAsyncOptions { + filter?: (path: string, ent: Dirent | Stats) => boolean; +} +export type RimrafOptions = RimrafSyncOptions | RimrafAsyncOptions; +export declare const optArg: (opt?: RimrafAsyncOptions) => (RimrafAsyncOptions & { + glob: GlobOptions & { + withFileTypes: false; + }; +}) | (RimrafAsyncOptions & { + glob: undefined; +}); +export declare const optArgSync: (opt?: RimrafSyncOptions) => (RimrafSyncOptions & { + glob: GlobOptions & { + withFileTypes: false; + }; +}) | (RimrafSyncOptions & { + glob: undefined; +}); +//# sourceMappingURL=opt-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts.map new file mode 100644 index 00000000000000..89e83b205ac628 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"opt-arg.d.ts","sourceRoot":"","sources":["../../src/opt-arg.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,MAAM,EAAE,KAAK,EAAE,MAAM,IAAI,CAAA;AAClC,OAAO,EAAE,WAAW,EAAE,MAAM,MAAM,CAAA;AAKlC,eAAO,MAAM,eAAe,MAAO,GAAG,KAAG,CAAC,IAAI,aAUX,CAAA;AAEnC,eAAO,MAAM,mBAAmB,EAAE,CAAC,CAAC,EAAE,GAAG,KAAK,IAM7C,CAAA;AAED,MAAM,WAAW,kBAAkB;IACjC,YAAY,CAAC,EAAE,OAAO,CAAA;IACtB,GAAG,CAAC,EAAE,MAAM,CAAA;IACZ,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,OAAO,CAAC,EAAE,MAAM,CAAA;IAChB,UAAU,CAAC,EAAE,MAAM,CAAA;IACnB,MAAM,CAAC,EAAE,WAAW,CAAA;IACpB,IAAI,CAAC,EAAE,OAAO,GAAG,WAAW,CAAA;IAC5B,MAAM,CAAC,EACH,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAC,GAChD,CAAC,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAC,OAAO,CAAC,CAAC,CAAA;CAC9D;AAED,MAAM,WAAW,iBAAkB,SAAQ,kBAAkB;IAC3D,MAAM,CAAC,EAAE,CAAC,IAAI,EAAE,MAAM,EAAE,GAAG,EAAE,MAAM,GAAG,KAAK,KAAK,OAAO,CAAA;CACxD;AAED,MAAM,MAAM,aAAa,GAAG,iBAAiB,GAAG,kBAAkB,CAAA;AAqClE,eAAO,MAAM,MAAM,SAAS,kBAAkB;UA/BlC,WAAW,GAAG;QAAE,aAAa,EAAE,KAAK,CAAA;KAAE;;UAEjC,SAAS;EA6B0C,CAAA;AACpE,eAAO,MAAM,UAAU,SAAS,iBAAiB;UAhCrC,WAAW,GAAG;QAAE,aAAa,EAAE,KAAK,CAAA;KAAE;;UAEjC,SAAS;EA8B6C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js new file mode 100644 index 00000000000000..eacfe6c4325e22 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js @@ -0,0 +1,46 @@ +const typeOrUndef = (val, t) => typeof val === 'undefined' || typeof val === t; +export const isRimrafOptions = (o) => !!o && + typeof o === 'object' && + typeOrUndef(o.preserveRoot, 'boolean') && + typeOrUndef(o.tmp, 'string') && + typeOrUndef(o.maxRetries, 'number') && + typeOrUndef(o.retryDelay, 'number') && + typeOrUndef(o.backoff, 'number') && + typeOrUndef(o.maxBackoff, 'number') && + (typeOrUndef(o.glob, 'boolean') || (o.glob && typeof o.glob === 'object')) && + typeOrUndef(o.filter, 'function'); +export const assertRimrafOptions = (o) => { + if (!isRimrafOptions(o)) { + throw new Error('invalid rimraf options'); + } +}; +const optArgT = (opt) => { + assertRimrafOptions(opt); + const { glob, ...options } = opt; + if (!glob) { + return options; + } + const globOpt = glob === true ? + opt.signal ? + { signal: opt.signal } + : {} + : opt.signal ? + { + signal: opt.signal, + ...glob, + } + : glob; + return { + ...options, + glob: { + ...globOpt, + // always get absolute paths from glob, to ensure + // that we are referencing the correct thing. + absolute: true, + withFileTypes: false, + }, + }; +}; +export const optArg = (opt = {}) => optArgT(opt); +export const optArgSync = (opt = {}) => optArgT(opt); +//# sourceMappingURL=opt-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js.map b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js.map new file mode 100644 index 00000000000000..82ff94f6e05b8e --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/opt-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"opt-arg.js","sourceRoot":"","sources":["../../src/opt-arg.ts"],"names":[],"mappings":"AAGA,MAAM,WAAW,GAAG,CAAC,GAAQ,EAAE,CAAS,EAAE,EAAE,CAC1C,OAAO,GAAG,KAAK,WAAW,IAAI,OAAO,GAAG,KAAK,CAAC,CAAA;AAEhD,MAAM,CAAC,MAAM,eAAe,GAAG,CAAC,CAAM,EAAsB,EAAE,CAC5D,CAAC,CAAC,CAAC;IACH,OAAO,CAAC,KAAK,QAAQ;IACrB,WAAW,CAAC,CAAC,CAAC,YAAY,EAAE,SAAS,CAAC;IACtC,WAAW,CAAC,CAAC,CAAC,GAAG,EAAE,QAAQ,CAAC;IAC5B,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,WAAW,CAAC,CAAC,CAAC,OAAO,EAAE,QAAQ,CAAC;IAChC,WAAW,CAAC,CAAC,CAAC,UAAU,EAAE,QAAQ,CAAC;IACnC,CAAC,WAAW,CAAC,CAAC,CAAC,IAAI,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,IAAI,OAAO,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC;IAC1E,WAAW,CAAC,CAAC,CAAC,MAAM,EAAE,UAAU,CAAC,CAAA;AAEnC,MAAM,CAAC,MAAM,mBAAmB,GAAqB,CACnD,CAAM,EACsB,EAAE;IAC9B,IAAI,CAAC,eAAe,CAAC,CAAC,CAAC,EAAE,CAAC;QACxB,MAAM,IAAI,KAAK,CAAC,wBAAwB,CAAC,CAAA;IAC3C,CAAC;AACH,CAAC,CAAA;AAsBD,MAAM,OAAO,GAAG,CACd,GAAM,EAKsB,EAAE;IAC9B,mBAAmB,CAAC,GAAG,CAAC,CAAA;IACxB,MAAM,EAAE,IAAI,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAChC,IAAI,CAAC,IAAI,EAAE,CAAC;QACV,OAAO,OAAkC,CAAA;IAC3C,CAAC;IACD,MAAM,OAAO,GACX,IAAI,KAAK,IAAI,CAAC,CAAC;QACb,GAAG,CAAC,MAAM,CAAC,CAAC;YACV,EAAE,MAAM,EAAE,GAAG,CAAC,MAAM,EAAE;YACxB,CAAC,CAAC,EAAE;QACN,CAAC,CAAC,GAAG,CAAC,MAAM,CAAC,CAAC;YACZ;gBACE,MAAM,EAAE,GAAG,CAAC,MAAM;gBAClB,GAAG,IAAI;aACR;YACH,CAAC,CAAC,IAAI,CAAA;IACR,OAAO;QACL,GAAG,OAAO;QACV,IAAI,EAAE;YACJ,GAAG,OAAO;YACV,iDAAiD;YACjD,6CAA6C;YAC7C,QAAQ,EAAE,IAAI;YACd,aAAa,EAAE,KAAK;SACrB;KACsD,CAAA;AAC3D,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,MAAM,GAAG,CAAC,MAA0B,EAAE,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA;AACpE,MAAM,CAAC,MAAM,UAAU,GAAG,CAAC,MAAyB,EAAE,EAAE,EAAE,CAAC,OAAO,CAAC,GAAG,CAAC,CAAA","sourcesContent":["import { Dirent, Stats } from 'fs'\nimport { GlobOptions } from 'glob'\n\nconst typeOrUndef = (val: any, t: string) =>\n typeof val === 'undefined' || typeof val === t\n\nexport const isRimrafOptions = (o: any): o is RimrafOptions =>\n !!o &&\n typeof o === 'object' &&\n typeOrUndef(o.preserveRoot, 'boolean') &&\n typeOrUndef(o.tmp, 'string') &&\n typeOrUndef(o.maxRetries, 'number') &&\n typeOrUndef(o.retryDelay, 'number') &&\n typeOrUndef(o.backoff, 'number') &&\n typeOrUndef(o.maxBackoff, 'number') &&\n (typeOrUndef(o.glob, 'boolean') || (o.glob && typeof o.glob === 'object')) &&\n typeOrUndef(o.filter, 'function')\n\nexport const assertRimrafOptions: (o: any) => void = (\n o: any,\n): asserts o is RimrafOptions => {\n if (!isRimrafOptions(o)) {\n throw new Error('invalid rimraf options')\n }\n}\n\nexport interface RimrafAsyncOptions {\n preserveRoot?: boolean\n tmp?: string\n maxRetries?: number\n retryDelay?: number\n backoff?: number\n maxBackoff?: number\n signal?: AbortSignal\n glob?: boolean | GlobOptions\n filter?:\n | ((path: string, ent: Dirent | Stats) => boolean)\n | ((path: string, ent: Dirent | Stats) => Promise)\n}\n\nexport interface RimrafSyncOptions extends RimrafAsyncOptions {\n filter?: (path: string, ent: Dirent | Stats) => boolean\n}\n\nexport type RimrafOptions = RimrafSyncOptions | RimrafAsyncOptions\n\nconst optArgT = (\n opt: T,\n):\n | (T & {\n glob: GlobOptions & { withFileTypes: false }\n })\n | (T & { glob: undefined }) => {\n assertRimrafOptions(opt)\n const { glob, ...options } = opt\n if (!glob) {\n return options as T & { glob: undefined }\n }\n const globOpt =\n glob === true ?\n opt.signal ?\n { signal: opt.signal }\n : {}\n : opt.signal ?\n {\n signal: opt.signal,\n ...glob,\n }\n : glob\n return {\n ...options,\n glob: {\n ...globOpt,\n // always get absolute paths from glob, to ensure\n // that we are referencing the correct thing.\n absolute: true,\n withFileTypes: false,\n },\n } as T & { glob: GlobOptions & { withFileTypes: false } }\n}\n\nexport const optArg = (opt: RimrafAsyncOptions = {}) => optArgT(opt)\nexport const optArgSync = (opt: RimrafSyncOptions = {}) => optArgT(opt)\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/package.json b/deps/npm/node_modules/rimraf/dist/esm/package.json new file mode 100644 index 00000000000000..3dbc1ca591c055 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/package.json @@ -0,0 +1,3 @@ +{ + "type": "module" +} diff --git a/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts b/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts new file mode 100644 index 00000000000000..c0b7e7cb4b15e3 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions } from './index.js'; +declare const pathArg: (path: string, opt?: RimrafAsyncOptions) => string; +export default pathArg; +//# sourceMappingURL=path-arg.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts.map new file mode 100644 index 00000000000000..4fe93c3a8aec47 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/path-arg.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.d.ts","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,MAAM,YAAY,CAAA;AAG/C,QAAA,MAAM,OAAO,SAAU,MAAM,QAAO,kBAAkB,WAgDrD,CAAA;AAED,eAAe,OAAO,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/path-arg.js b/deps/npm/node_modules/rimraf/dist/esm/path-arg.js new file mode 100644 index 00000000000000..f32cb106756dbc --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/path-arg.js @@ -0,0 +1,47 @@ +import { parse, resolve } from 'path'; +import { inspect } from 'util'; +import platform from './platform.js'; +const pathArg = (path, opt = {}) => { + const type = typeof path; + if (type !== 'string') { + const ctor = path && type === 'object' && path.constructor; + const received = ctor && ctor.name ? `an instance of ${ctor.name}` + : type === 'object' ? inspect(path) + : `type ${type} ${path}`; + const msg = 'The "path" argument must be of type string. ' + `Received ${received}`; + throw Object.assign(new TypeError(msg), { + path, + code: 'ERR_INVALID_ARG_TYPE', + }); + } + if (/\0/.test(path)) { + // simulate same failure that node raises + const msg = 'path must be a string without null bytes'; + throw Object.assign(new TypeError(msg), { + path, + code: 'ERR_INVALID_ARG_VALUE', + }); + } + path = resolve(path); + const { root } = parse(path); + if (path === root && opt.preserveRoot !== false) { + const msg = 'refusing to remove root directory without preserveRoot:false'; + throw Object.assign(new Error(msg), { + path, + code: 'ERR_PRESERVE_ROOT', + }); + } + if (platform === 'win32') { + const badWinChars = /[*|"<>?:]/; + const { root } = parse(path); + if (badWinChars.test(path.substring(root.length))) { + throw Object.assign(new Error('Illegal characters in path.'), { + path, + code: 'EINVAL', + }); + } + } + return path; +}; +export default pathArg; +//# sourceMappingURL=path-arg.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/path-arg.js.map b/deps/npm/node_modules/rimraf/dist/esm/path-arg.js.map new file mode 100644 index 00000000000000..2f73f5f38929be --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/path-arg.js.map @@ -0,0 +1 @@ +{"version":3,"file":"path-arg.js","sourceRoot":"","sources":["../../src/path-arg.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AACrC,OAAO,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAE9B,OAAO,QAAQ,MAAM,eAAe,CAAA;AAEpC,MAAM,OAAO,GAAG,CAAC,IAAY,EAAE,MAA0B,EAAE,EAAE,EAAE;IAC7D,MAAM,IAAI,GAAG,OAAO,IAAI,CAAA;IACxB,IAAI,IAAI,KAAK,QAAQ,EAAE,CAAC;QACtB,MAAM,IAAI,GAAG,IAAI,IAAI,IAAI,KAAK,QAAQ,IAAI,IAAI,CAAC,WAAW,CAAA;QAC1D,MAAM,QAAQ,GACZ,IAAI,IAAI,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,kBAAkB,IAAI,CAAC,IAAI,EAAE;YACjD,CAAC,CAAC,IAAI,KAAK,QAAQ,CAAC,CAAC,CAAC,OAAO,CAAC,IAAI,CAAC;gBACnC,CAAC,CAAC,QAAQ,IAAI,IAAI,IAAI,EAAE,CAAA;QAC1B,MAAM,GAAG,GACP,8CAA8C,GAAG,YAAY,QAAQ,EAAE,CAAA;QACzE,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;YACtC,IAAI;YACJ,IAAI,EAAE,sBAAsB;SAC7B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,IAAI,CAAC,IAAI,CAAC,IAAI,CAAC,EAAE,CAAC;QACpB,yCAAyC;QACzC,MAAM,GAAG,GAAG,0CAA0C,CAAA;QACtD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,SAAS,CAAC,GAAG,CAAC,EAAE;YACtC,IAAI;YACJ,IAAI,EAAE,uBAAuB;SAC9B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,GAAG,OAAO,CAAC,IAAI,CAAC,CAAA;IACpB,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;IAE5B,IAAI,IAAI,KAAK,IAAI,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,EAAE,CAAC;QAChD,MAAM,GAAG,GAAG,8DAA8D,CAAA;QAC1E,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,GAAG,CAAC,EAAE;YAClC,IAAI;YACJ,IAAI,EAAE,mBAAmB;SAC1B,CAAC,CAAA;IACJ,CAAC;IAED,IAAI,QAAQ,KAAK,OAAO,EAAE,CAAC;QACzB,MAAM,WAAW,GAAG,WAAW,CAAA;QAC/B,MAAM,EAAE,IAAI,EAAE,GAAG,KAAK,CAAC,IAAI,CAAC,CAAA;QAC5B,IAAI,WAAW,CAAC,IAAI,CAAC,IAAI,CAAC,SAAS,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,EAAE,CAAC;YAClD,MAAM,MAAM,CAAC,MAAM,CAAC,IAAI,KAAK,CAAC,6BAA6B,CAAC,EAAE;gBAC5D,IAAI;gBACJ,IAAI,EAAE,QAAQ;aACf,CAAC,CAAA;QACJ,CAAC;IACH,CAAC;IAED,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,eAAe,OAAO,CAAA","sourcesContent":["import { parse, resolve } from 'path'\nimport { inspect } from 'util'\nimport { RimrafAsyncOptions } from './index.js'\nimport platform from './platform.js'\n\nconst pathArg = (path: string, opt: RimrafAsyncOptions = {}) => {\n const type = typeof path\n if (type !== 'string') {\n const ctor = path && type === 'object' && path.constructor\n const received =\n ctor && ctor.name ? `an instance of ${ctor.name}`\n : type === 'object' ? inspect(path)\n : `type ${type} ${path}`\n const msg =\n 'The \"path\" argument must be of type string. ' + `Received ${received}`\n throw Object.assign(new TypeError(msg), {\n path,\n code: 'ERR_INVALID_ARG_TYPE',\n })\n }\n\n if (/\\0/.test(path)) {\n // simulate same failure that node raises\n const msg = 'path must be a string without null bytes'\n throw Object.assign(new TypeError(msg), {\n path,\n code: 'ERR_INVALID_ARG_VALUE',\n })\n }\n\n path = resolve(path)\n const { root } = parse(path)\n\n if (path === root && opt.preserveRoot !== false) {\n const msg = 'refusing to remove root directory without preserveRoot:false'\n throw Object.assign(new Error(msg), {\n path,\n code: 'ERR_PRESERVE_ROOT',\n })\n }\n\n if (platform === 'win32') {\n const badWinChars = /[*|\"<>?:]/\n const { root } = parse(path)\n if (badWinChars.test(path.substring(root.length))) {\n throw Object.assign(new Error('Illegal characters in path.'), {\n path,\n code: 'EINVAL',\n })\n }\n }\n\n return path\n}\n\nexport default pathArg\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts b/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts new file mode 100644 index 00000000000000..e127a8e529ffd2 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts @@ -0,0 +1,3 @@ +declare const _default: string; +export default _default; +//# sourceMappingURL=platform.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts.map new file mode 100644 index 00000000000000..ef2e6734f8cfbb --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/platform.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"platform.d.ts","sourceRoot":"","sources":["../../src/platform.ts"],"names":[],"mappings":";AAAA,wBAA0E"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/platform.js b/deps/npm/node_modules/rimraf/dist/esm/platform.js new file mode 100644 index 00000000000000..a2641721b78190 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/platform.js @@ -0,0 +1,2 @@ +export default process.env.__TESTING_RIMRAF_PLATFORM__ || process.platform; +//# sourceMappingURL=platform.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/platform.js.map b/deps/npm/node_modules/rimraf/dist/esm/platform.js.map new file mode 100644 index 00000000000000..c5fdaf9c0e428c --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/platform.js.map @@ -0,0 +1 @@ +{"version":3,"file":"platform.js","sourceRoot":"","sources":["../../src/platform.ts"],"names":[],"mappings":"AAAA,eAAe,OAAO,CAAC,GAAG,CAAC,2BAA2B,IAAI,OAAO,CAAC,QAAQ,CAAA","sourcesContent":["export default process.env.__TESTING_RIMRAF_PLATFORM__ || process.platform\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts new file mode 100644 index 00000000000000..cce73097f1681f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts @@ -0,0 +1,3 @@ +export declare const readdirOrError: (path: string) => Promise; +export declare const readdirOrErrorSync: (path: string) => import("fs").Dirent[] | NodeJS.ErrnoException; +//# sourceMappingURL=readdir-or-error.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts.map new file mode 100644 index 00000000000000..8a19f6bdfd0706 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"readdir-or-error.d.ts","sourceRoot":"","sources":["../../src/readdir-or-error.ts"],"names":[],"mappings":"AAIA,eAAO,MAAM,cAAc,SAAU,MAAM,2DACa,CAAA;AACxD,eAAO,MAAM,kBAAkB,SAAU,MAAM,kDAM9C,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js new file mode 100644 index 00000000000000..71235135c63009 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js @@ -0,0 +1,14 @@ +// returns an array of entries if readdir() works, +// or the error that readdir() raised if not. +import { promises, readdirSync } from './fs.js'; +const { readdir } = promises; +export const readdirOrError = (path) => readdir(path).catch(er => er); +export const readdirOrErrorSync = (path) => { + try { + return readdirSync(path); + } + catch (er) { + return er; + } +}; +//# sourceMappingURL=readdir-or-error.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js.map b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js.map new file mode 100644 index 00000000000000..1d0c00efde2e76 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/readdir-or-error.js.map @@ -0,0 +1 @@ +{"version":3,"file":"readdir-or-error.js","sourceRoot":"","sources":["../../src/readdir-or-error.ts"],"names":[],"mappings":"AAAA,kDAAkD;AAClD,6CAA6C;AAC7C,OAAO,EAAE,QAAQ,EAAE,WAAW,EAAE,MAAM,SAAS,CAAA;AAC/C,MAAM,EAAE,OAAO,EAAE,GAAG,QAAQ,CAAA;AAC5B,MAAM,CAAC,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAC7C,OAAO,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,EAAE,CAAC,EAA2B,CAAC,CAAA;AACxD,MAAM,CAAC,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,EAAE;IACjD,IAAI,CAAC;QACH,OAAO,WAAW,CAAC,IAAI,CAAC,CAAA;IAC1B,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,OAAO,EAA2B,CAAA;IACpC,CAAC;AACH,CAAC,CAAA","sourcesContent":["// returns an array of entries if readdir() works,\n// or the error that readdir() raised if not.\nimport { promises, readdirSync } from './fs.js'\nconst { readdir } = promises\nexport const readdirOrError = (path: string) =>\n readdir(path).catch(er => er as NodeJS.ErrnoException)\nexport const readdirOrErrorSync = (path: string) => {\n try {\n return readdirSync(path)\n } catch (er) {\n return er as NodeJS.ErrnoException\n }\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts new file mode 100644 index 00000000000000..c0af0dd62f0df9 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts @@ -0,0 +1,8 @@ +import { RimrafAsyncOptions, RimrafOptions } from './index.js'; +export declare const MAXBACKOFF = 200; +export declare const RATE = 1.2; +export declare const MAXRETRIES = 10; +export declare const codes: Set; +export declare const retryBusy: (fn: (path: string) => Promise) => (path: string, opt: RimrafAsyncOptions, backoff?: number, total?: number) => Promise; +export declare const retryBusySync: (fn: (path: string) => any) => (path: string, opt: RimrafOptions) => any; +//# sourceMappingURL=retry-busy.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts.map new file mode 100644 index 00000000000000..21960c58914b4b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"retry-busy.d.ts","sourceRoot":"","sources":["../../src/retry-busy.ts"],"names":[],"mappings":"AAEA,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAE9D,eAAO,MAAM,UAAU,MAAM,CAAA;AAC7B,eAAO,MAAM,IAAI,MAAM,CAAA;AACvB,eAAO,MAAM,UAAU,KAAK,CAAA;AAC5B,eAAO,MAAM,KAAK,aAAyC,CAAA;AAE3D,eAAO,MAAM,SAAS,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,OAAO,CAAC,GAAG,CAAC,YAElD,MAAM,OACP,kBAAkB,mDAkC1B,CAAA;AAGD,eAAO,MAAM,aAAa,OAAQ,CAAC,IAAI,EAAE,MAAM,KAAK,GAAG,YAC/B,MAAM,OAAO,aAAa,QAsBjD,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js new file mode 100644 index 00000000000000..17e336a4d583f5 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js @@ -0,0 +1,63 @@ +// note: max backoff is the maximum that any *single* backoff will do +export const MAXBACKOFF = 200; +export const RATE = 1.2; +export const MAXRETRIES = 10; +export const codes = new Set(['EMFILE', 'ENFILE', 'EBUSY']); +export const retryBusy = (fn) => { + const method = async (path, opt, backoff = 1, total = 0) => { + const mbo = opt.maxBackoff || MAXBACKOFF; + const rate = opt.backoff || RATE; + const max = opt.maxRetries || MAXRETRIES; + let retries = 0; + while (true) { + try { + return await fn(path); + } + catch (er) { + const fer = er; + if (fer?.path === path && fer?.code && codes.has(fer.code)) { + backoff = Math.ceil(backoff * rate); + total = backoff + total; + if (total < mbo) { + return new Promise((res, rej) => { + setTimeout(() => { + method(path, opt, backoff, total).then(res, rej); + }, backoff); + }); + } + if (retries < max) { + retries++; + continue; + } + } + throw er; + } + } + }; + return method; +}; +// just retries, no async so no backoff +export const retryBusySync = (fn) => { + const method = (path, opt) => { + const max = opt.maxRetries || MAXRETRIES; + let retries = 0; + while (true) { + try { + return fn(path); + } + catch (er) { + const fer = er; + if (fer?.path === path && + fer?.code && + codes.has(fer.code) && + retries < max) { + retries++; + continue; + } + throw er; + } + } + }; + return method; +}; +//# sourceMappingURL=retry-busy.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js.map b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js.map new file mode 100644 index 00000000000000..f4239eb4142234 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/retry-busy.js.map @@ -0,0 +1 @@ +{"version":3,"file":"retry-busy.js","sourceRoot":"","sources":["../../src/retry-busy.ts"],"names":[],"mappings":"AAAA,qEAAqE;AAIrE,MAAM,CAAC,MAAM,UAAU,GAAG,GAAG,CAAA;AAC7B,MAAM,CAAC,MAAM,IAAI,GAAG,GAAG,CAAA;AACvB,MAAM,CAAC,MAAM,UAAU,GAAG,EAAE,CAAA;AAC5B,MAAM,CAAC,MAAM,KAAK,GAAG,IAAI,GAAG,CAAC,CAAC,QAAQ,EAAE,QAAQ,EAAE,OAAO,CAAC,CAAC,CAAA;AAE3D,MAAM,CAAC,MAAM,SAAS,GAAG,CAAC,EAAkC,EAAE,EAAE;IAC9D,MAAM,MAAM,GAAG,KAAK,EAClB,IAAY,EACZ,GAAuB,EACvB,OAAO,GAAG,CAAC,EACX,KAAK,GAAG,CAAC,EACT,EAAE;QACF,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,UAAU,CAAA;QACxC,MAAM,IAAI,GAAG,GAAG,CAAC,OAAO,IAAI,IAAI,CAAA;QAChC,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,UAAU,CAAA;QACxC,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,MAAM,EAAE,CAAC,IAAI,CAAC,CAAA;YACvB,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;gBACvC,IAAI,GAAG,EAAE,IAAI,KAAK,IAAI,IAAI,GAAG,EAAE,IAAI,IAAI,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC,EAAE,CAAC;oBAC3D,OAAO,GAAG,IAAI,CAAC,IAAI,CAAC,OAAO,GAAG,IAAI,CAAC,CAAA;oBACnC,KAAK,GAAG,OAAO,GAAG,KAAK,CAAA;oBACvB,IAAI,KAAK,GAAG,GAAG,EAAE,CAAC;wBAChB,OAAO,IAAI,OAAO,CAAC,CAAC,GAAG,EAAE,GAAG,EAAE,EAAE;4BAC9B,UAAU,CAAC,GAAG,EAAE;gCACd,MAAM,CAAC,IAAI,EAAE,GAAG,EAAE,OAAO,EAAE,KAAK,CAAC,CAAC,IAAI,CAAC,GAAG,EAAE,GAAG,CAAC,CAAA;4BAClD,CAAC,EAAE,OAAO,CAAC,CAAA;wBACb,CAAC,CAAC,CAAA;oBACJ,CAAC;oBACD,IAAI,OAAO,GAAG,GAAG,EAAE,CAAC;wBAClB,OAAO,EAAE,CAAA;wBACT,SAAQ;oBACV,CAAC;gBACH,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA;IAED,OAAO,MAAM,CAAA;AACf,CAAC,CAAA;AAED,uCAAuC;AACvC,MAAM,CAAC,MAAM,aAAa,GAAG,CAAC,EAAyB,EAAE,EAAE;IACzD,MAAM,MAAM,GAAG,CAAC,IAAY,EAAE,GAAkB,EAAE,EAAE;QAClD,MAAM,GAAG,GAAG,GAAG,CAAC,UAAU,IAAI,UAAU,CAAA;QACxC,IAAI,OAAO,GAAG,CAAC,CAAA;QACf,OAAO,IAAI,EAAE,CAAC;YACZ,IAAI,CAAC;gBACH,OAAO,EAAE,CAAC,IAAI,CAAC,CAAA;YACjB,CAAC;YAAC,OAAO,EAAE,EAAE,CAAC;gBACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;gBACvC,IACE,GAAG,EAAE,IAAI,KAAK,IAAI;oBAClB,GAAG,EAAE,IAAI;oBACT,KAAK,CAAC,GAAG,CAAC,GAAG,CAAC,IAAI,CAAC;oBACnB,OAAO,GAAG,GAAG,EACb,CAAC;oBACD,OAAO,EAAE,CAAA;oBACT,SAAQ;gBACV,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;IACH,CAAC,CAAA;IACD,OAAO,MAAM,CAAA;AACf,CAAC,CAAA","sourcesContent":["// note: max backoff is the maximum that any *single* backoff will do\n\nimport { RimrafAsyncOptions, RimrafOptions } from './index.js'\n\nexport const MAXBACKOFF = 200\nexport const RATE = 1.2\nexport const MAXRETRIES = 10\nexport const codes = new Set(['EMFILE', 'ENFILE', 'EBUSY'])\n\nexport const retryBusy = (fn: (path: string) => Promise) => {\n const method = async (\n path: string,\n opt: RimrafAsyncOptions,\n backoff = 1,\n total = 0,\n ) => {\n const mbo = opt.maxBackoff || MAXBACKOFF\n const rate = opt.backoff || RATE\n const max = opt.maxRetries || MAXRETRIES\n let retries = 0\n while (true) {\n try {\n return await fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.path === path && fer?.code && codes.has(fer.code)) {\n backoff = Math.ceil(backoff * rate)\n total = backoff + total\n if (total < mbo) {\n return new Promise((res, rej) => {\n setTimeout(() => {\n method(path, opt, backoff, total).then(res, rej)\n }, backoff)\n })\n }\n if (retries < max) {\n retries++\n continue\n }\n }\n throw er\n }\n }\n }\n\n return method\n}\n\n// just retries, no async so no backoff\nexport const retryBusySync = (fn: (path: string) => any) => {\n const method = (path: string, opt: RimrafOptions) => {\n const max = opt.maxRetries || MAXRETRIES\n let retries = 0\n while (true) {\n try {\n return fn(path)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (\n fer?.path === path &&\n fer?.code &&\n codes.has(fer.code) &&\n retries < max\n ) {\n retries++\n continue\n }\n throw er\n }\n }\n }\n return method\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts new file mode 100644 index 00000000000000..35c5c86844c7fa --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts @@ -0,0 +1,3 @@ +export declare const rimrafManual: (path: string, opt: import("./opt-arg.js").RimrafAsyncOptions) => Promise; +export declare const rimrafManualSync: (path: string, opt: import("./opt-arg.js").RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-manual.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts.map new file mode 100644 index 00000000000000..19bd25149ceb07 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-manual.d.ts","sourceRoot":"","sources":["../../src/rimraf-manual.ts"],"names":[],"mappings":"AAKA,eAAO,MAAM,YAAY,oFAAqD,CAAA;AAC9E,eAAO,MAAM,gBAAgB,0EAC+B,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js new file mode 100644 index 00000000000000..132708ffaa5870 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js @@ -0,0 +1,6 @@ +import platform from './platform.js'; +import { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'; +import { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'; +export const rimrafManual = platform === 'win32' ? rimrafWindows : rimrafPosix; +export const rimrafManualSync = platform === 'win32' ? rimrafWindowsSync : rimrafPosixSync; +//# sourceMappingURL=rimraf-manual.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js.map new file mode 100644 index 00000000000000..212c815dc356d0 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-manual.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-manual.js","sourceRoot":"","sources":["../../src/rimraf-manual.ts"],"names":[],"mappings":"AAAA,OAAO,QAAQ,MAAM,eAAe,CAAA;AAEpC,OAAO,EAAE,WAAW,EAAE,eAAe,EAAE,MAAM,mBAAmB,CAAA;AAChE,OAAO,EAAE,aAAa,EAAE,iBAAiB,EAAE,MAAM,qBAAqB,CAAA;AAEtE,MAAM,CAAC,MAAM,YAAY,GAAG,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,aAAa,CAAC,CAAC,CAAC,WAAW,CAAA;AAC9E,MAAM,CAAC,MAAM,gBAAgB,GAC3B,QAAQ,KAAK,OAAO,CAAC,CAAC,CAAC,iBAAiB,CAAC,CAAC,CAAC,eAAe,CAAA","sourcesContent":["import platform from './platform.js'\n\nimport { rimrafPosix, rimrafPosixSync } from './rimraf-posix.js'\nimport { rimrafWindows, rimrafWindowsSync } from './rimraf-windows.js'\n\nexport const rimrafManual = platform === 'win32' ? rimrafWindows : rimrafPosix\nexport const rimrafManualSync =\n platform === 'win32' ? rimrafWindowsSync : rimrafPosixSync\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts new file mode 100644 index 00000000000000..5d41d40825e4c7 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafMoveRemove: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafMoveRemoveSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-move-remove.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts.map new file mode 100644 index 00000000000000..4062eaebbb1302 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-move-remove.d.ts","sourceRoot":"","sources":["../../src/rimraf-move-remove.ts"],"names":[],"mappings":"AA6BA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AA4ClE,eAAO,MAAM,gBAAgB,SACrB,MAAM,OACP,kBAAkB,qBAWxB,CAAA;AA4ED,eAAO,MAAM,oBAAoB,SAAU,MAAM,OAAO,iBAAiB,YAUxE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js new file mode 100644 index 00000000000000..093e40f49f5a2e --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js @@ -0,0 +1,187 @@ +// https://youtu.be/uhRWMGBjlO8?t=537 +// +// 1. readdir +// 2. for each entry +// a. if a non-empty directory, recurse +// b. if an empty directory, move to random hidden file name in $TEMP +// c. unlink/rmdir $TEMP +// +// This works around the fact that unlink/rmdir is non-atomic and takes +// a non-deterministic amount of time to complete. +// +// However, it is HELLA SLOW, like 2-10x slower than a naive recursive rm. +import { basename, parse, resolve } from 'path'; +import { defaultTmp, defaultTmpSync } from './default-tmp.js'; +import { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'; +import { chmodSync, lstatSync, promises as fsPromises, renameSync, rmdirSync, unlinkSync, } from './fs.js'; +const { lstat, rename, unlink, rmdir, chmod } = fsPromises; +import { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'; +// crypto.randomBytes is much slower, and Math.random() is enough here +const uniqueFilename = (path) => `.${basename(path)}.${Math.random()}`; +const unlinkFixEPERM = async (path) => unlink(path).catch((er) => { + if (er.code === 'EPERM') { + return chmod(path, 0o666).then(() => unlink(path), er2 => { + if (er2.code === 'ENOENT') { + return; + } + throw er; + }); + } + else if (er.code === 'ENOENT') { + return; + } + throw er; +}); +const unlinkFixEPERMSync = (path) => { + try { + unlinkSync(path); + } + catch (er) { + if (er?.code === 'EPERM') { + try { + return chmodSync(path, 0o666); + } + catch (er2) { + if (er2?.code === 'ENOENT') { + return; + } + throw er; + } + } + else if (er?.code === 'ENOENT') { + return; + } + throw er; + } +}; +export const rimrafMoveRemove = async (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return await rimrafMoveRemoveDir(path, opt, await lstat(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +const rimrafMoveRemoveDir = async (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + if (!opt.tmp) { + return rimrafMoveRemoveDir(path, { ...opt, tmp: await defaultTmp(path) }, ent); + } + if (path === opt.tmp && parse(path).root !== path) { + throw new Error('cannot delete temp directory used for deletion'); + } + const entries = ent.isDirectory() ? await readdirOrError(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await ignoreENOENT(tmpUnlink(path, opt.tmp, unlinkFixEPERM)); + return true; + } + const removedAll = (await Promise.all(entries.map(ent => rimrafMoveRemoveDir(resolve(path, ent.name), opt, ent)))).reduce((a, b) => a && b, true); + if (!removedAll) { + return false; + } + // we don't ever ACTUALLY try to unlink /, because that can never work + // but when preserveRoot is false, we could be operating on it. + // No need to check if preserveRoot is not false. + if (opt.preserveRoot === false && path === parse(path).root) { + return false; + } + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await ignoreENOENT(tmpUnlink(path, opt.tmp, rmdir)); + return true; +}; +const tmpUnlink = async (path, tmp, rm) => { + const tmpFile = resolve(tmp, uniqueFilename(path)); + await rename(path, tmpFile); + return await rm(tmpFile); +}; +export const rimrafMoveRemoveSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return rimrafMoveRemoveDirSync(path, opt, lstatSync(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +const rimrafMoveRemoveDirSync = (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + if (!opt.tmp) { + return rimrafMoveRemoveDirSync(path, { ...opt, tmp: defaultTmpSync(path) }, ent); + } + const tmp = opt.tmp; + if (path === opt.tmp && parse(path).root !== path) { + throw new Error('cannot delete temp directory used for deletion'); + } + const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, unlinkFixEPERMSync)); + return true; + } + let removedAll = true; + for (const ent of entries) { + const p = resolve(path, ent.name); + removedAll = rimrafMoveRemoveDirSync(p, opt, ent) && removedAll; + } + if (!removedAll) { + return false; + } + if (opt.preserveRoot === false && path === parse(path).root) { + return false; + } + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, rmdirSync)); + return true; +}; +const tmpUnlinkSync = (path, tmp, rmSync) => { + const tmpFile = resolve(tmp, uniqueFilename(path)); + renameSync(path, tmpFile); + return rmSync(tmpFile); +}; +//# sourceMappingURL=rimraf-move-remove.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js.map new file mode 100644 index 00000000000000..b9a1fd57490501 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-move-remove.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-move-remove.js","sourceRoot":"","sources":["../../src/rimraf-move-remove.ts"],"names":[],"mappings":"AAAA,qCAAqC;AACrC,EAAE;AACF,aAAa;AACb,oBAAoB;AACpB,yCAAyC;AACzC,uEAAuE;AACvE,0BAA0B;AAC1B,EAAE;AACF,uEAAuE;AACvE,kDAAkD;AAClD,EAAE;AACF,0EAA0E;AAE1E,OAAO,EAAE,QAAQ,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAC/C,OAAO,EAAE,UAAU,EAAE,cAAc,EAAE,MAAM,kBAAkB,CAAA;AAE7D,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AAEnE,OAAO,EACL,SAAS,EACT,SAAS,EACT,QAAQ,IAAI,UAAU,EACtB,UAAU,EACV,SAAS,EACT,UAAU,GACX,MAAM,SAAS,CAAA;AAChB,MAAM,EAAE,KAAK,EAAE,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,UAAU,CAAA;AAI1D,OAAO,EAAE,cAAc,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAA;AAE1E,sEAAsE;AACtE,MAAM,cAAc,GAAG,CAAC,IAAY,EAAE,EAAE,CAAC,IAAI,QAAQ,CAAC,IAAI,CAAC,IAAI,IAAI,CAAC,MAAM,EAAE,EAAE,CAAA;AAE9E,MAAM,cAAc,GAAG,KAAK,EAAE,IAAY,EAAE,EAAE,CAC5C,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,EAA6B,EAAE,EAAE;IACnD,IAAI,EAAE,CAAC,IAAI,KAAK,OAAO,EAAE,CAAC;QACxB,OAAO,KAAK,CAAC,IAAI,EAAE,KAAK,CAAC,CAAC,IAAI,CAC5B,GAAG,EAAE,CAAC,MAAM,CAAC,IAAI,CAAC,EAClB,GAAG,CAAC,EAAE;YACJ,IAAI,GAAG,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC1B,OAAM;YACR,CAAC;YACD,MAAM,EAAE,CAAA;QACV,CAAC,CACF,CAAA;IACH,CAAC;SAAM,IAAI,EAAE,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;QAChC,OAAM;IACR,CAAC;IACD,MAAM,EAAE,CAAA;AACV,CAAC,CAAC,CAAA;AAEJ,MAAM,kBAAkB,GAAG,CAAC,IAAY,EAAE,EAAE;IAC1C,IAAI,CAAC;QACH,UAAU,CAAC,IAAI,CAAC,CAAA;IAClB,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,OAAO,EAAE,CAAC;YACpD,IAAI,CAAC;gBACH,OAAO,SAAS,CAAC,IAAI,EAAE,KAAK,CAAC,CAAA;YAC/B,CAAC;YAAC,OAAO,GAAG,EAAE,CAAC;gBACb,IAAK,GAA6B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;oBACtD,OAAM;gBACR,CAAC;gBACD,MAAM,EAAE,CAAA;YACV,CAAC;QACH,CAAC;aAAM,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ,EAAE,CAAC;YAC5D,OAAM;QACR,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,gBAAgB,GAAG,KAAK,EACnC,IAAY,EACZ,GAAuB,EACvB,EAAE;IACF,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,mBAAmB,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAChE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,mBAAmB,GAAG,KAAK,EAC/B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACD,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QACb,OAAO,mBAAmB,CACxB,IAAI,EACJ,EAAE,GAAG,GAAG,EAAE,GAAG,EAAE,MAAM,UAAU,CAAC,IAAI,CAAC,EAAE,EACvC,GAAG,CACJ,CAAA;IACH,CAAC;IACD,IAAI,IAAI,KAAK,GAAG,CAAC,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,YAAY,CAAC,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,cAAc,CAAC,CAAC,CAAA;QAC5D,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAChB,mBAAmB,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,CACvD,CACF,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAChC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,sEAAsE;IACtE,+DAA+D;IAC/D,iDAAiD;IACjD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,KAAK,CAAA;IACd,CAAC;IACD,MAAM,YAAY,CAAC,SAAS,CAAC,IAAI,EAAE,GAAG,CAAC,GAAG,EAAE,KAAK,CAAC,CAAC,CAAA;IACnD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,SAAS,GAAG,KAAK,EACrB,IAAY,EACZ,GAAW,EACX,EAA+B,EAC/B,EAAE;IACF,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC,CAAA;IAClD,MAAM,MAAM,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IAC3B,OAAO,MAAM,EAAE,CAAC,OAAO,CAAC,CAAA;AAC1B,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,oBAAoB,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IAC3E,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,uBAAuB,CAAC,IAAI,EAAE,GAAG,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;IAC5D,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,uBAAuB,GAAG,CAC9B,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACV,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC,GAAG,CAAC,GAAG,EAAE,CAAC;QACb,OAAO,uBAAuB,CAC5B,IAAI,EACJ,EAAE,GAAG,GAAG,EAAE,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,EAAE,EACrC,GAAG,CACJ,CAAA;IACH,CAAC;IACD,MAAM,GAAG,GAAW,GAAG,CAAC,GAAG,CAAA;IAE3B,IAAI,IAAI,KAAK,GAAG,CAAC,GAAG,IAAI,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,KAAK,IAAI,EAAE,CAAC;QAClD,MAAM,IAAI,KAAK,CAAC,gDAAgD,CAAC,CAAA;IACnE,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,gBAAgB,CAAC,GAAG,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,kBAAkB,CAAC,CAAC,CAAA;QACpE,OAAO,IAAI,CAAA;IACb,CAAC;IAED,IAAI,UAAU,GAAG,IAAI,CAAA;IACrB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,uBAAuB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,IAAI,UAAU,CAAA;IACjE,CAAC;IACD,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACzC,OAAO,KAAK,CAAA;IACd,CAAC;IACD,gBAAgB,CAAC,GAAG,EAAE,CAAC,aAAa,CAAC,IAAI,EAAE,GAAG,EAAE,SAAS,CAAC,CAAC,CAAA;IAC3D,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,aAAa,GAAG,CACpB,IAAY,EACZ,GAAW,EACX,MAA2B,EAC3B,EAAE;IACF,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,EAAE,cAAc,CAAC,IAAI,CAAC,CAAC,CAAA;IAClD,UAAU,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACzB,OAAO,MAAM,CAAC,OAAO,CAAC,CAAA;AACxB,CAAC,CAAA","sourcesContent":["// https://youtu.be/uhRWMGBjlO8?t=537\n//\n// 1. readdir\n// 2. for each entry\n// a. if a non-empty directory, recurse\n// b. if an empty directory, move to random hidden file name in $TEMP\n// c. unlink/rmdir $TEMP\n//\n// This works around the fact that unlink/rmdir is non-atomic and takes\n// a non-deterministic amount of time to complete.\n//\n// However, it is HELLA SLOW, like 2-10x slower than a naive recursive rm.\n\nimport { basename, parse, resolve } from 'path'\nimport { defaultTmp, defaultTmpSync } from './default-tmp.js'\n\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\n\nimport {\n chmodSync,\n lstatSync,\n promises as fsPromises,\n renameSync,\n rmdirSync,\n unlinkSync,\n} from './fs.js'\nconst { lstat, rename, unlink, rmdir, chmod } = fsPromises\n\nimport { Dirent, Stats } from 'fs'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\n\n// crypto.randomBytes is much slower, and Math.random() is enough here\nconst uniqueFilename = (path: string) => `.${basename(path)}.${Math.random()}`\n\nconst unlinkFixEPERM = async (path: string) =>\n unlink(path).catch((er: Error & { code?: string }) => {\n if (er.code === 'EPERM') {\n return chmod(path, 0o666).then(\n () => unlink(path),\n er2 => {\n if (er2.code === 'ENOENT') {\n return\n }\n throw er\n },\n )\n } else if (er.code === 'ENOENT') {\n return\n }\n throw er\n })\n\nconst unlinkFixEPERMSync = (path: string) => {\n try {\n unlinkSync(path)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'EPERM') {\n try {\n return chmodSync(path, 0o666)\n } catch (er2) {\n if ((er2 as NodeJS.ErrnoException)?.code === 'ENOENT') {\n return\n }\n throw er\n }\n } else if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') {\n return\n }\n throw er\n }\n}\n\nexport const rimrafMoveRemove = async (\n path: string,\n opt: RimrafAsyncOptions,\n) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafMoveRemoveDir(path, opt, await lstat(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafMoveRemoveDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n if (!opt.tmp) {\n return rimrafMoveRemoveDir(\n path,\n { ...opt, tmp: await defaultTmp(path) },\n ent,\n )\n }\n if (path === opt.tmp && parse(path).root !== path) {\n throw new Error('cannot delete temp directory used for deletion')\n }\n\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(tmpUnlink(path, opt.tmp, unlinkFixEPERM))\n return true\n }\n\n const removedAll = (\n await Promise.all(\n entries.map(ent =>\n rimrafMoveRemoveDir(resolve(path, ent.name), opt, ent),\n ),\n )\n ).reduce((a, b) => a && b, true)\n if (!removedAll) {\n return false\n }\n\n // we don't ever ACTUALLY try to unlink /, because that can never work\n // but when preserveRoot is false, we could be operating on it.\n // No need to check if preserveRoot is not false.\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(tmpUnlink(path, opt.tmp, rmdir))\n return true\n}\n\nconst tmpUnlink = async (\n path: string,\n tmp: string,\n rm: (p: string) => Promise,\n) => {\n const tmpFile = resolve(tmp, uniqueFilename(path))\n await rename(path, tmpFile)\n return await rm(tmpFile)\n}\n\nexport const rimrafMoveRemoveSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafMoveRemoveDirSync(path, opt, lstatSync(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafMoveRemoveDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n if (!opt.tmp) {\n return rimrafMoveRemoveDirSync(\n path,\n { ...opt, tmp: defaultTmpSync(path) },\n ent,\n )\n }\n const tmp: string = opt.tmp\n\n if (path === opt.tmp && parse(path).root !== path) {\n throw new Error('cannot delete temp directory used for deletion')\n }\n\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, unlinkFixEPERMSync))\n return true\n }\n\n let removedAll = true\n for (const ent of entries) {\n const p = resolve(path, ent.name)\n removedAll = rimrafMoveRemoveDirSync(p, opt, ent) && removedAll\n }\n if (!removedAll) {\n return false\n }\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => tmpUnlinkSync(path, tmp, rmdirSync))\n return true\n}\n\nconst tmpUnlinkSync = (\n path: string,\n tmp: string,\n rmSync: (p: string) => void,\n) => {\n const tmpFile = resolve(tmp, uniqueFilename(path))\n renameSync(path, tmpFile)\n return rmSync(tmpFile)\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts new file mode 100644 index 00000000000000..cc84bf7ffd34d0 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafNative: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafNativeSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts.map new file mode 100644 index 00000000000000..bea6b79965192f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-native.d.ts","sourceRoot":"","sources":["../../src/rimraf-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAIlE,eAAO,MAAM,YAAY,SACjB,MAAM,OACP,kBAAkB,KACtB,OAAO,CAAC,OAAO,CAOjB,CAAA;AAED,eAAO,MAAM,gBAAgB,SACrB,MAAM,OACP,iBAAiB,KACrB,OAOF,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js new file mode 100644 index 00000000000000..719161fc9e85ca --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js @@ -0,0 +1,19 @@ +import { promises, rmSync } from './fs.js'; +const { rm } = promises; +export const rimrafNative = async (path, opt) => { + await rm(path, { + ...opt, + force: true, + recursive: true, + }); + return true; +}; +export const rimrafNativeSync = (path, opt) => { + rmSync(path, { + ...opt, + force: true, + recursive: true, + }); + return true; +}; +//# sourceMappingURL=rimraf-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js.map new file mode 100644 index 00000000000000..fde373ba4ea13f --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-native.js","sourceRoot":"","sources":["../../src/rimraf-native.ts"],"names":[],"mappings":"AACA,OAAO,EAAE,QAAQ,EAAE,MAAM,EAAE,MAAM,SAAS,CAAA;AAC1C,MAAM,EAAE,EAAE,EAAE,GAAG,QAAQ,CAAA;AAEvB,MAAM,CAAC,MAAM,YAAY,GAAG,KAAK,EAC/B,IAAY,EACZ,GAAuB,EACL,EAAE;IACpB,MAAM,EAAE,CAAC,IAAI,EAAE;QACb,GAAG,GAAG;QACN,KAAK,EAAE,IAAI;QACX,SAAS,EAAE,IAAI;KAChB,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,gBAAgB,GAAG,CAC9B,IAAY,EACZ,GAAsB,EACb,EAAE;IACX,MAAM,CAAC,IAAI,EAAE;QACX,GAAG,GAAG;QACN,KAAK,EAAE,IAAI;QACX,SAAS,EAAE,IAAI;KAChB,CAAC,CAAA;IACF,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { promises, rmSync } from './fs.js'\nconst { rm } = promises\n\nexport const rimrafNative = async (\n path: string,\n opt: RimrafAsyncOptions,\n): Promise => {\n await rm(path, {\n ...opt,\n force: true,\n recursive: true,\n })\n return true\n}\n\nexport const rimrafNativeSync = (\n path: string,\n opt: RimrafSyncOptions,\n): boolean => {\n rmSync(path, {\n ...opt,\n force: true,\n recursive: true,\n })\n return true\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts new file mode 100644 index 00000000000000..8e532efe9aba21 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafPosix: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafPosixSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-posix.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts.map new file mode 100644 index 00000000000000..3f9b8084ed470b --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-posix.d.ts","sourceRoot":"","sources":["../../src/rimraf-posix.ts"],"names":[],"mappings":"AAcA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AAGlE,eAAO,MAAM,WAAW,SAAgB,MAAM,OAAO,kBAAkB,qBAUtE,CAAA;AAED,eAAO,MAAM,eAAe,SAAU,MAAM,OAAO,iBAAiB,YAUnE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js new file mode 100644 index 00000000000000..356a477765a665 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js @@ -0,0 +1,118 @@ +// the simple recursive removal, where unlink and rmdir are atomic +// Note that this approach does NOT work on Windows! +// We stat first and only unlink if the Dirent isn't a directory, +// because sunos will let root unlink a directory, and some +// SUPER weird breakage happens as a result. +import { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'; +const { lstat, rmdir, unlink } = promises; +import { parse, resolve } from 'path'; +import { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'; +import { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'; +export const rimrafPosix = async (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return await rimrafPosixDir(path, opt, await lstat(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +export const rimrafPosixSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return rimrafPosixDirSync(path, opt, lstatSync(path)); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +const rimrafPosixDir = async (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + const entries = ent.isDirectory() ? await readdirOrError(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await ignoreENOENT(unlink(path)); + return true; + } + const removedAll = (await Promise.all(entries.map(ent => rimrafPosixDir(resolve(path, ent.name), opt, ent)))).reduce((a, b) => a && b, true); + if (!removedAll) { + return false; + } + // we don't ever ACTUALLY try to unlink /, because that can never work + // but when preserveRoot is false, we could be operating on it. + // No need to check if preserveRoot is not false. + if (opt.preserveRoot === false && path === parse(path).root) { + return false; + } + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await ignoreENOENT(rmdir(path)); + return true; +}; +const rimrafPosixDirSync = (path, opt, ent) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + ignoreENOENTSync(() => unlinkSync(path)); + return true; + } + let removedAll = true; + for (const ent of entries) { + const p = resolve(path, ent.name); + removedAll = rimrafPosixDirSync(p, opt, ent) && removedAll; + } + if (opt.preserveRoot === false && path === parse(path).root) { + return false; + } + if (!removedAll) { + return false; + } + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + ignoreENOENTSync(() => rmdirSync(path)); + return true; +}; +//# sourceMappingURL=rimraf-posix.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js.map new file mode 100644 index 00000000000000..e301324d3f2649 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-posix.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-posix.js","sourceRoot":"","sources":["../../src/rimraf-posix.ts"],"names":[],"mappings":"AAAA,kEAAkE;AAClE,oDAAoD;AACpD,iEAAiE;AACjE,2DAA2D;AAC3D,4CAA4C;AAE5C,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,SAAS,CAAA;AACpE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,MAAM,EAAE,GAAG,QAAQ,CAAA;AAEzC,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAErC,OAAO,EAAE,cAAc,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAA;AAI1E,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AAEnE,MAAM,CAAC,MAAM,WAAW,GAAG,KAAK,EAAE,IAAY,EAAE,GAAuB,EAAE,EAAE;IACzE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,cAAc,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAC3D,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,eAAe,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IACtE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,kBAAkB,CAAC,IAAI,EAAE,GAAG,EAAE,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;IACvD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,cAAc,GAAG,KAAK,EAC1B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACD,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,YAAY,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAA;QAChC,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAAC,cAAc,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,CAAC,CACtE,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAEhC,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,sEAAsE;IACtE,+DAA+D;IAC/D,iDAAiD;IACjD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;QACjD,OAAO,KAAK,CAAA;IACd,CAAC;IAED,MAAM,YAAY,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAA;IAC/B,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,kBAAkB,GAAG,CACzB,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACV,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,gBAAgB,CAAC,GAAG,EAAE,CAAC,UAAU,CAAC,IAAI,CAAC,CAAC,CAAA;QACxC,OAAO,IAAI,CAAA;IACb,CAAC;IACD,IAAI,UAAU,GAAY,IAAI,CAAA;IAC9B,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,kBAAkB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,CAAC,IAAI,UAAU,CAAA;IAC5D,CAAC;IACD,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;QAC5D,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,CAAC,UAAU,EAAE,CAAC;QAChB,OAAO,KAAK,CAAA;IACd,CAAC;IAED,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;QACzC,OAAO,KAAK,CAAA;IACd,CAAC;IAED,gBAAgB,CAAC,GAAG,EAAE,CAAC,SAAS,CAAC,IAAI,CAAC,CAAC,CAAA;IACvC,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["// the simple recursive removal, where unlink and rmdir are atomic\n// Note that this approach does NOT work on Windows!\n// We stat first and only unlink if the Dirent isn't a directory,\n// because sunos will let root unlink a directory, and some\n// SUPER weird breakage happens as a result.\n\nimport { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'\nconst { lstat, rmdir, unlink } = promises\n\nimport { parse, resolve } from 'path'\n\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\n\nimport { Dirent, Stats } from 'fs'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\n\nexport const rimrafPosix = async (path: string, opt: RimrafAsyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafPosixDir(path, opt, await lstat(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nexport const rimrafPosixSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafPosixDirSync(path, opt, lstatSync(path))\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafPosixDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(unlink(path))\n return true\n }\n\n const removedAll = (\n await Promise.all(\n entries.map(ent => rimrafPosixDir(resolve(path, ent.name), opt, ent)),\n )\n ).reduce((a, b) => a && b, true)\n\n if (!removedAll) {\n return false\n }\n\n // we don't ever ACTUALLY try to unlink /, because that can never work\n // but when preserveRoot is false, we could be operating on it.\n // No need to check if preserveRoot is not false.\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n\n await ignoreENOENT(rmdir(path))\n return true\n}\n\nconst rimrafPosixDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => unlinkSync(path))\n return true\n }\n let removedAll: boolean = true\n for (const ent of entries) {\n const p = resolve(path, ent.name)\n removedAll = rimrafPosixDirSync(p, opt, ent) && removedAll\n }\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n\n if (!removedAll) {\n return false\n }\n\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n\n ignoreENOENTSync(() => rmdirSync(path))\n return true\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts new file mode 100644 index 00000000000000..555689073ffe75 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'; +export declare const rimrafWindows: (path: string, opt: RimrafAsyncOptions) => Promise; +export declare const rimrafWindowsSync: (path: string, opt: RimrafSyncOptions) => boolean; +//# sourceMappingURL=rimraf-windows.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts.map new file mode 100644 index 00000000000000..56f00d9f2e3d13 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-windows.d.ts","sourceRoot":"","sources":["../../src/rimraf-windows.ts"],"names":[],"mappings":"AAYA,OAAO,EAAE,kBAAkB,EAAE,iBAAiB,EAAE,MAAM,YAAY,CAAA;AA2DlE,eAAO,MAAM,aAAa,SAAgB,MAAM,OAAO,kBAAkB,qBAUxE,CAAA;AAED,eAAO,MAAM,iBAAiB,SAAU,MAAM,OAAO,iBAAiB,YAUrE,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js new file mode 100644 index 00000000000000..bd2fa80657848d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js @@ -0,0 +1,177 @@ +// This is the same as rimrafPosix, with the following changes: +// +// 1. EBUSY, ENFILE, EMFILE trigger retries and/or exponential backoff +// 2. All non-directories are removed first and then all directories are +// removed in a second sweep. +// 3. If we hit ENOTEMPTY in the second sweep, fall back to move-remove on +// the that folder. +// +// Note: "move then remove" is 2-10 times slower, and just as unreliable. +import { parse, resolve } from 'path'; +import { fixEPERM, fixEPERMSync } from './fix-eperm.js'; +import { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'; +import { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'; +import { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'; +import { retryBusy, retryBusySync } from './retry-busy.js'; +import { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'; +const { unlink, rmdir, lstat } = promises; +const rimrafWindowsFile = retryBusy(fixEPERM(unlink)); +const rimrafWindowsFileSync = retryBusySync(fixEPERMSync(unlinkSync)); +const rimrafWindowsDirRetry = retryBusy(fixEPERM(rmdir)); +const rimrafWindowsDirRetrySync = retryBusySync(fixEPERMSync(rmdirSync)); +const rimrafWindowsDirMoveRemoveFallback = async (path, opt) => { + /* c8 ignore start */ + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + /* c8 ignore stop */ + // already filtered, remove from options so we don't call unnecessarily + const { filter, ...options } = opt; + try { + return await rimrafWindowsDirRetry(path, options); + } + catch (er) { + if (er?.code === 'ENOTEMPTY') { + return await rimrafMoveRemove(path, options); + } + throw er; + } +}; +const rimrafWindowsDirMoveRemoveFallbackSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + // already filtered, remove from options so we don't call unnecessarily + const { filter, ...options } = opt; + try { + return rimrafWindowsDirRetrySync(path, options); + } + catch (er) { + const fer = er; + if (fer?.code === 'ENOTEMPTY') { + return rimrafMoveRemoveSync(path, options); + } + throw er; + } +}; +const START = Symbol('start'); +const CHILD = Symbol('child'); +const FINISH = Symbol('finish'); +export const rimrafWindows = async (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return await rimrafWindowsDir(path, opt, await lstat(path), START); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +export const rimrafWindowsSync = (path, opt) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + try { + return rimrafWindowsDirSync(path, opt, lstatSync(path), START); + } + catch (er) { + if (er?.code === 'ENOENT') + return true; + throw er; + } +}; +const rimrafWindowsDir = async (path, opt, ent, state = START) => { + if (opt?.signal?.aborted) { + throw opt.signal.reason; + } + const entries = ent.isDirectory() ? await readdirOrError(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + // is a file + await ignoreENOENT(rimrafWindowsFile(path, opt)); + return true; + } + const s = state === START ? CHILD : state; + const removedAll = (await Promise.all(entries.map(ent => rimrafWindowsDir(resolve(path, ent.name), opt, ent, s)))).reduce((a, b) => a && b, true); + if (state === START) { + return rimrafWindowsDir(path, opt, ent, FINISH); + } + else if (state === FINISH) { + if (opt.preserveRoot === false && path === parse(path).root) { + return false; + } + if (!removedAll) { + return false; + } + if (opt.filter && !(await opt.filter(path, ent))) { + return false; + } + await ignoreENOENT(rimrafWindowsDirMoveRemoveFallback(path, opt)); + } + return true; +}; +const rimrafWindowsDirSync = (path, opt, ent, state = START) => { + const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null; + if (!Array.isArray(entries)) { + // this can only happen if lstat/readdir lied, or if the dir was + // swapped out with a file at just the right moment. + /* c8 ignore start */ + if (entries) { + if (entries.code === 'ENOENT') { + return true; + } + if (entries.code !== 'ENOTDIR') { + throw entries; + } + } + /* c8 ignore stop */ + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + // is a file + ignoreENOENTSync(() => rimrafWindowsFileSync(path, opt)); + return true; + } + let removedAll = true; + for (const ent of entries) { + const s = state === START ? CHILD : state; + const p = resolve(path, ent.name); + removedAll = rimrafWindowsDirSync(p, opt, ent, s) && removedAll; + } + if (state === START) { + return rimrafWindowsDirSync(path, opt, ent, FINISH); + } + else if (state === FINISH) { + if (opt.preserveRoot === false && path === parse(path).root) { + return false; + } + if (!removedAll) { + return false; + } + if (opt.filter && !opt.filter(path, ent)) { + return false; + } + ignoreENOENTSync(() => { + rimrafWindowsDirMoveRemoveFallbackSync(path, opt); + }); + } + return true; +}; +//# sourceMappingURL=rimraf-windows.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js.map b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js.map new file mode 100644 index 00000000000000..4b136c1e7f1f78 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/rimraf-windows.js.map @@ -0,0 +1 @@ +{"version":3,"file":"rimraf-windows.js","sourceRoot":"","sources":["../../src/rimraf-windows.ts"],"names":[],"mappings":"AAAA,+DAA+D;AAC/D,EAAE;AACF,sEAAsE;AACtE,wEAAwE;AACxE,gCAAgC;AAChC,0EAA0E;AAC1E,sBAAsB;AACtB,EAAE;AACF,yEAAyE;AAGzE,OAAO,EAAE,KAAK,EAAE,OAAO,EAAE,MAAM,MAAM,CAAA;AAErC,OAAO,EAAE,QAAQ,EAAE,YAAY,EAAE,MAAM,gBAAgB,CAAA;AACvD,OAAO,EAAE,SAAS,EAAE,QAAQ,EAAE,SAAS,EAAE,UAAU,EAAE,MAAM,SAAS,CAAA;AACpE,OAAO,EAAE,YAAY,EAAE,gBAAgB,EAAE,MAAM,oBAAoB,CAAA;AACnE,OAAO,EAAE,cAAc,EAAE,kBAAkB,EAAE,MAAM,uBAAuB,CAAA;AAC1E,OAAO,EAAE,SAAS,EAAE,aAAa,EAAE,MAAM,iBAAiB,CAAA;AAC1D,OAAO,EAAE,gBAAgB,EAAE,oBAAoB,EAAE,MAAM,yBAAyB,CAAA;AAChF,MAAM,EAAE,MAAM,EAAE,KAAK,EAAE,KAAK,EAAE,GAAG,QAAQ,CAAA;AAEzC,MAAM,iBAAiB,GAAG,SAAS,CAAC,QAAQ,CAAC,MAAM,CAAC,CAAC,CAAA;AACrD,MAAM,qBAAqB,GAAG,aAAa,CAAC,YAAY,CAAC,UAAU,CAAC,CAAC,CAAA;AACrE,MAAM,qBAAqB,GAAG,SAAS,CAAC,QAAQ,CAAC,KAAK,CAAC,CAAC,CAAA;AACxD,MAAM,yBAAyB,GAAG,aAAa,CAAC,YAAY,CAAC,SAAS,CAAC,CAAC,CAAA;AAExE,MAAM,kCAAkC,GAAG,KAAK,EAC9C,IAAY,EACZ,GAAuB,EACL,EAAE;IACpB,qBAAqB;IACrB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,oBAAoB;IACpB,uEAAuE;IACvE,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAClC,IAAI,CAAC;QACH,OAAO,MAAM,qBAAqB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACnD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,WAAW,EAAE,CAAC;YACxD,OAAO,MAAM,gBAAgB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;QAC9C,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,sCAAsC,GAAG,CAC7C,IAAY,EACZ,GAAsB,EACb,EAAE;IACX,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,uEAAuE;IACvE,MAAM,EAAE,MAAM,EAAE,GAAG,OAAO,EAAE,GAAG,GAAG,CAAA;IAClC,IAAI,CAAC;QACH,OAAO,yBAAyB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;IACjD,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,MAAM,GAAG,GAAG,EAA2B,CAAA;QACvC,IAAI,GAAG,EAAE,IAAI,KAAK,WAAW,EAAE,CAAC;YAC9B,OAAO,oBAAoB,CAAC,IAAI,EAAE,OAAO,CAAC,CAAA;QAC5C,CAAC;QACD,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,CAAA;AAC7B,MAAM,KAAK,GAAG,MAAM,CAAC,OAAO,CAAC,CAAA;AAC7B,MAAM,MAAM,GAAG,MAAM,CAAC,QAAQ,CAAC,CAAA;AAE/B,MAAM,CAAC,MAAM,aAAa,GAAG,KAAK,EAAE,IAAY,EAAE,GAAuB,EAAE,EAAE;IAC3E,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,MAAM,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,MAAM,KAAK,CAAC,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;IACpE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,CAAC,MAAM,iBAAiB,GAAG,CAAC,IAAY,EAAE,GAAsB,EAAE,EAAE;IACxE,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IACD,IAAI,CAAC;QACH,OAAO,oBAAoB,CAAC,IAAI,EAAE,GAAG,EAAE,SAAS,CAAC,IAAI,CAAC,EAAE,KAAK,CAAC,CAAA;IAChE,CAAC;IAAC,OAAO,EAAE,EAAE,CAAC;QACZ,IAAK,EAA4B,EAAE,IAAI,KAAK,QAAQ;YAAE,OAAO,IAAI,CAAA;QACjE,MAAM,EAAE,CAAA;IACV,CAAC;AACH,CAAC,CAAA;AAED,MAAM,gBAAgB,GAAG,KAAK,EAC5B,IAAY,EACZ,GAAuB,EACvB,GAAmB,EACnB,KAAK,GAAG,KAAK,EACK,EAAE;IACpB,IAAI,GAAG,EAAE,MAAM,EAAE,OAAO,EAAE,CAAC;QACzB,MAAM,GAAG,CAAC,MAAM,CAAC,MAAM,CAAA;IACzB,CAAC;IAED,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,MAAM,cAAc,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACrE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,YAAY;QACZ,MAAM,YAAY,CAAC,iBAAiB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QAChD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,MAAM,CAAC,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;IACzC,MAAM,UAAU,GAAG,CACjB,MAAM,OAAO,CAAC,GAAG,CACf,OAAO,CAAC,GAAG,CAAC,GAAG,CAAC,EAAE,CAChB,gBAAgB,CAAC,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,CACvD,CACF,CACF,CAAC,MAAM,CAAC,CAAC,CAAC,EAAE,CAAC,EAAE,EAAE,CAAC,CAAC,IAAI,CAAC,EAAE,IAAI,CAAC,CAAA;IAEhC,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,OAAO,gBAAgB,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;IACjD,CAAC;SAAM,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;YAC5D,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,CAAC,MAAM,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,EAAE,CAAC;YACjD,OAAO,KAAK,CAAA;QACd,CAAC;QACD,MAAM,YAAY,CAAC,kCAAkC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;IACnE,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA;AAED,MAAM,oBAAoB,GAAG,CAC3B,IAAY,EACZ,GAAsB,EACtB,GAAmB,EACnB,KAAK,GAAG,KAAK,EACJ,EAAE;IACX,MAAM,OAAO,GAAG,GAAG,CAAC,WAAW,EAAE,CAAC,CAAC,CAAC,kBAAkB,CAAC,IAAI,CAAC,CAAC,CAAC,CAAC,IAAI,CAAA;IACnE,IAAI,CAAC,KAAK,CAAC,OAAO,CAAC,OAAO,CAAC,EAAE,CAAC;QAC5B,gEAAgE;QAChE,oDAAoD;QACpD,qBAAqB;QACrB,IAAI,OAAO,EAAE,CAAC;YACZ,IAAI,OAAO,CAAC,IAAI,KAAK,QAAQ,EAAE,CAAC;gBAC9B,OAAO,IAAI,CAAA;YACb,CAAC;YACD,IAAI,OAAO,CAAC,IAAI,KAAK,SAAS,EAAE,CAAC;gBAC/B,MAAM,OAAO,CAAA;YACf,CAAC;QACH,CAAC;QACD,oBAAoB;QACpB,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,YAAY;QACZ,gBAAgB,CAAC,GAAG,EAAE,CAAC,qBAAqB,CAAC,IAAI,EAAE,GAAG,CAAC,CAAC,CAAA;QACxD,OAAO,IAAI,CAAA;IACb,CAAC;IAED,IAAI,UAAU,GAAG,IAAI,CAAA;IACrB,KAAK,MAAM,GAAG,IAAI,OAAO,EAAE,CAAC;QAC1B,MAAM,CAAC,GAAG,KAAK,KAAK,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,CAAC,CAAC,KAAK,CAAA;QACzC,MAAM,CAAC,GAAG,OAAO,CAAC,IAAI,EAAE,GAAG,CAAC,IAAI,CAAC,CAAA;QACjC,UAAU,GAAG,oBAAoB,CAAC,CAAC,EAAE,GAAG,EAAE,GAAG,EAAE,CAAC,CAAC,IAAI,UAAU,CAAA;IACjE,CAAC;IAED,IAAI,KAAK,KAAK,KAAK,EAAE,CAAC;QACpB,OAAO,oBAAoB,CAAC,IAAI,EAAE,GAAG,EAAE,GAAG,EAAE,MAAM,CAAC,CAAA;IACrD,CAAC;SAAM,IAAI,KAAK,KAAK,MAAM,EAAE,CAAC;QAC5B,IAAI,GAAG,CAAC,YAAY,KAAK,KAAK,IAAI,IAAI,KAAK,KAAK,CAAC,IAAI,CAAC,CAAC,IAAI,EAAE,CAAC;YAC5D,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,CAAC,UAAU,EAAE,CAAC;YAChB,OAAO,KAAK,CAAA;QACd,CAAC;QACD,IAAI,GAAG,CAAC,MAAM,IAAI,CAAC,GAAG,CAAC,MAAM,CAAC,IAAI,EAAE,GAAG,CAAC,EAAE,CAAC;YACzC,OAAO,KAAK,CAAA;QACd,CAAC;QACD,gBAAgB,CAAC,GAAG,EAAE;YACpB,sCAAsC,CAAC,IAAI,EAAE,GAAG,CAAC,CAAA;QACnD,CAAC,CAAC,CAAA;IACJ,CAAC;IACD,OAAO,IAAI,CAAA;AACb,CAAC,CAAA","sourcesContent":["// This is the same as rimrafPosix, with the following changes:\n//\n// 1. EBUSY, ENFILE, EMFILE trigger retries and/or exponential backoff\n// 2. All non-directories are removed first and then all directories are\n// removed in a second sweep.\n// 3. If we hit ENOTEMPTY in the second sweep, fall back to move-remove on\n// the that folder.\n//\n// Note: \"move then remove\" is 2-10 times slower, and just as unreliable.\n\nimport { Dirent, Stats } from 'fs'\nimport { parse, resolve } from 'path'\nimport { RimrafAsyncOptions, RimrafSyncOptions } from './index.js'\nimport { fixEPERM, fixEPERMSync } from './fix-eperm.js'\nimport { lstatSync, promises, rmdirSync, unlinkSync } from './fs.js'\nimport { ignoreENOENT, ignoreENOENTSync } from './ignore-enoent.js'\nimport { readdirOrError, readdirOrErrorSync } from './readdir-or-error.js'\nimport { retryBusy, retryBusySync } from './retry-busy.js'\nimport { rimrafMoveRemove, rimrafMoveRemoveSync } from './rimraf-move-remove.js'\nconst { unlink, rmdir, lstat } = promises\n\nconst rimrafWindowsFile = retryBusy(fixEPERM(unlink))\nconst rimrafWindowsFileSync = retryBusySync(fixEPERMSync(unlinkSync))\nconst rimrafWindowsDirRetry = retryBusy(fixEPERM(rmdir))\nconst rimrafWindowsDirRetrySync = retryBusySync(fixEPERMSync(rmdirSync))\n\nconst rimrafWindowsDirMoveRemoveFallback = async (\n path: string,\n opt: RimrafAsyncOptions,\n): Promise => {\n /* c8 ignore start */\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n /* c8 ignore stop */\n // already filtered, remove from options so we don't call unnecessarily\n const { filter, ...options } = opt\n try {\n return await rimrafWindowsDirRetry(path, options)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOTEMPTY') {\n return await rimrafMoveRemove(path, options)\n }\n throw er\n }\n}\n\nconst rimrafWindowsDirMoveRemoveFallbackSync = (\n path: string,\n opt: RimrafSyncOptions,\n): boolean => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n // already filtered, remove from options so we don't call unnecessarily\n const { filter, ...options } = opt\n try {\n return rimrafWindowsDirRetrySync(path, options)\n } catch (er) {\n const fer = er as NodeJS.ErrnoException\n if (fer?.code === 'ENOTEMPTY') {\n return rimrafMoveRemoveSync(path, options)\n }\n throw er\n }\n}\n\nconst START = Symbol('start')\nconst CHILD = Symbol('child')\nconst FINISH = Symbol('finish')\n\nexport const rimrafWindows = async (path: string, opt: RimrafAsyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return await rimrafWindowsDir(path, opt, await lstat(path), START)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nexport const rimrafWindowsSync = (path: string, opt: RimrafSyncOptions) => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n try {\n return rimrafWindowsDirSync(path, opt, lstatSync(path), START)\n } catch (er) {\n if ((er as NodeJS.ErrnoException)?.code === 'ENOENT') return true\n throw er\n }\n}\n\nconst rimrafWindowsDir = async (\n path: string,\n opt: RimrafAsyncOptions,\n ent: Dirent | Stats,\n state = START,\n): Promise => {\n if (opt?.signal?.aborted) {\n throw opt.signal.reason\n }\n\n const entries = ent.isDirectory() ? await readdirOrError(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n // is a file\n await ignoreENOENT(rimrafWindowsFile(path, opt))\n return true\n }\n\n const s = state === START ? CHILD : state\n const removedAll = (\n await Promise.all(\n entries.map(ent =>\n rimrafWindowsDir(resolve(path, ent.name), opt, ent, s),\n ),\n )\n ).reduce((a, b) => a && b, true)\n\n if (state === START) {\n return rimrafWindowsDir(path, opt, ent, FINISH)\n } else if (state === FINISH) {\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (!removedAll) {\n return false\n }\n if (opt.filter && !(await opt.filter(path, ent))) {\n return false\n }\n await ignoreENOENT(rimrafWindowsDirMoveRemoveFallback(path, opt))\n }\n return true\n}\n\nconst rimrafWindowsDirSync = (\n path: string,\n opt: RimrafSyncOptions,\n ent: Dirent | Stats,\n state = START,\n): boolean => {\n const entries = ent.isDirectory() ? readdirOrErrorSync(path) : null\n if (!Array.isArray(entries)) {\n // this can only happen if lstat/readdir lied, or if the dir was\n // swapped out with a file at just the right moment.\n /* c8 ignore start */\n if (entries) {\n if (entries.code === 'ENOENT') {\n return true\n }\n if (entries.code !== 'ENOTDIR') {\n throw entries\n }\n }\n /* c8 ignore stop */\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n // is a file\n ignoreENOENTSync(() => rimrafWindowsFileSync(path, opt))\n return true\n }\n\n let removedAll = true\n for (const ent of entries) {\n const s = state === START ? CHILD : state\n const p = resolve(path, ent.name)\n removedAll = rimrafWindowsDirSync(p, opt, ent, s) && removedAll\n }\n\n if (state === START) {\n return rimrafWindowsDirSync(path, opt, ent, FINISH)\n } else if (state === FINISH) {\n if (opt.preserveRoot === false && path === parse(path).root) {\n return false\n }\n if (!removedAll) {\n return false\n }\n if (opt.filter && !opt.filter(path, ent)) {\n return false\n }\n ignoreENOENTSync(() => {\n rimrafWindowsDirMoveRemoveFallbackSync(path, opt)\n })\n }\n return true\n}\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts b/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts new file mode 100644 index 00000000000000..e191fd90da93d3 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts @@ -0,0 +1,4 @@ +import { RimrafAsyncOptions, RimrafOptions } from './index.js'; +export declare const useNative: (opt?: RimrafAsyncOptions) => boolean; +export declare const useNativeSync: (opt?: RimrafOptions) => boolean; +//# sourceMappingURL=use-native.d.ts.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts.map b/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts.map new file mode 100644 index 00000000000000..b182beb1707a7d --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/use-native.d.ts.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.d.ts","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AAAA,OAAO,EAAE,kBAAkB,EAAE,aAAa,EAAE,MAAM,YAAY,CAAA;AAa9D,eAAO,MAAM,SAAS,EAAE,CAAC,GAAG,CAAC,EAAE,kBAAkB,KAAK,OAGf,CAAA;AACvC,eAAO,MAAM,aAAa,EAAE,CAAC,GAAG,CAAC,EAAE,aAAa,KAAK,OAGd,CAAA"} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/use-native.js b/deps/npm/node_modules/rimraf/dist/esm/use-native.js new file mode 100644 index 00000000000000..bf1ea5a14c5aa8 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/use-native.js @@ -0,0 +1,16 @@ +import platform from './platform.js'; +const version = process.env.__TESTING_RIMRAF_NODE_VERSION__ || process.version; +const versArr = version.replace(/^v/, '').split('.'); +/* c8 ignore start */ +const [major = 0, minor = 0] = versArr.map(v => parseInt(v, 10)); +/* c8 ignore stop */ +const hasNative = major > 14 || (major === 14 && minor >= 14); +// we do NOT use native by default on Windows, because Node's native +// rm implementation is less advanced. Change this code if that changes. +export const useNative = !hasNative || platform === 'win32' ? + () => false + : opt => !opt?.signal && !opt?.filter; +export const useNativeSync = !hasNative || platform === 'win32' ? + () => false + : opt => !opt?.signal && !opt?.filter; +//# sourceMappingURL=use-native.js.map \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/dist/esm/use-native.js.map b/deps/npm/node_modules/rimraf/dist/esm/use-native.js.map new file mode 100644 index 00000000000000..32da371e4181b6 --- /dev/null +++ b/deps/npm/node_modules/rimraf/dist/esm/use-native.js.map @@ -0,0 +1 @@ +{"version":3,"file":"use-native.js","sourceRoot":"","sources":["../../src/use-native.ts"],"names":[],"mappings":"AACA,OAAO,QAAQ,MAAM,eAAe,CAAA;AAEpC,MAAM,OAAO,GAAG,OAAO,CAAC,GAAG,CAAC,+BAA+B,IAAI,OAAO,CAAC,OAAO,CAAA;AAC9E,MAAM,OAAO,GAAG,OAAO,CAAC,OAAO,CAAC,IAAI,EAAE,EAAE,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAA;AAEpD,qBAAqB;AACrB,MAAM,CAAC,KAAK,GAAG,CAAC,EAAE,KAAK,GAAG,CAAC,CAAC,GAAG,OAAO,CAAC,GAAG,CAAC,CAAC,CAAC,EAAE,CAAC,QAAQ,CAAC,CAAC,EAAE,EAAE,CAAC,CAAC,CAAA;AAChE,oBAAoB;AACpB,MAAM,SAAS,GAAG,KAAK,GAAG,EAAE,IAAI,CAAC,KAAK,KAAK,EAAE,IAAI,KAAK,IAAI,EAAE,CAAC,CAAA;AAE7D,oEAAoE;AACpE,yEAAyE;AACzE,MAAM,CAAC,MAAM,SAAS,GACpB,CAAC,SAAS,IAAI,QAAQ,KAAK,OAAO,CAAC,CAAC;IAClC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE,MAAM,IAAI,CAAC,GAAG,EAAE,MAAM,CAAA;AACvC,MAAM,CAAC,MAAM,aAAa,GACxB,CAAC,SAAS,IAAI,QAAQ,KAAK,OAAO,CAAC,CAAC;IAClC,GAAG,EAAE,CAAC,KAAK;IACb,CAAC,CAAC,GAAG,CAAC,EAAE,CAAC,CAAC,GAAG,EAAE,MAAM,IAAI,CAAC,GAAG,EAAE,MAAM,CAAA","sourcesContent":["import { RimrafAsyncOptions, RimrafOptions } from './index.js'\nimport platform from './platform.js'\n\nconst version = process.env.__TESTING_RIMRAF_NODE_VERSION__ || process.version\nconst versArr = version.replace(/^v/, '').split('.')\n\n/* c8 ignore start */\nconst [major = 0, minor = 0] = versArr.map(v => parseInt(v, 10))\n/* c8 ignore stop */\nconst hasNative = major > 14 || (major === 14 && minor >= 14)\n\n// we do NOT use native by default on Windows, because Node's native\n// rm implementation is less advanced. Change this code if that changes.\nexport const useNative: (opt?: RimrafAsyncOptions) => boolean =\n !hasNative || platform === 'win32' ?\n () => false\n : opt => !opt?.signal && !opt?.filter\nexport const useNativeSync: (opt?: RimrafOptions) => boolean =\n !hasNative || platform === 'win32' ?\n () => false\n : opt => !opt?.signal && !opt?.filter\n"]} \ No newline at end of file diff --git a/deps/npm/node_modules/rimraf/package.json b/deps/npm/node_modules/rimraf/package.json new file mode 100644 index 00000000000000..212180c8e3fcc4 --- /dev/null +++ b/deps/npm/node_modules/rimraf/package.json @@ -0,0 +1,89 @@ +{ + "name": "rimraf", + "version": "5.0.10", + "publishConfig": { + "tag": "v5-legacy" + }, + "type": "module", + "tshy": { + "main": true, + "exports": { + "./package.json": "./package.json", + ".": "./src/index.ts" + } + }, + "bin": "./dist/esm/bin.mjs", + "main": "./dist/commonjs/index.js", + "types": "./dist/commonjs/index.d.ts", + "exports": { + "./package.json": "./package.json", + ".": { + "import": { + "types": "./dist/esm/index.d.ts", + "default": "./dist/esm/index.js" + }, + "require": { + "types": "./dist/commonjs/index.d.ts", + "default": "./dist/commonjs/index.js" + } + } + }, + "files": [ + "dist" + ], + "description": "A deep deletion module for node (like `rm -rf`)", + "author": "Isaac Z. Schlueter (http://blog.izs.me/)", + "license": "ISC", + "repository": "git://github.com/isaacs/rimraf.git", + "scripts": { + "preversion": "npm test", + "postversion": "npm publish", + "prepublishOnly": "git push origin --follow-tags", + "prepare": "tshy", + "pretest": "npm run prepare", + "presnap": "npm run prepare", + "test": "tap", + "snap": "tap", + "format": "prettier --write . --log-level warn", + "benchmark": "node benchmark/index.js", + "typedoc": "typedoc --tsconfig .tshy/esm.json ./src/*.ts" + }, + "prettier": { + "experimentalTernaries": true, + "semi": false, + "printWidth": 80, + "tabWidth": 2, + "useTabs": false, + "singleQuote": true, + "jsxSingleQuote": false, + "bracketSameLine": true, + "arrowParens": "avoid", + "endOfLine": "lf" + }, + "devDependencies": { + "@types/node": "^20.12.11", + "mkdirp": "^3.0.1", + "prettier": "^3.2.5", + "tap": "^19.0.1", + "tshy": "^1.14.0", + "typedoc": "^0.25.13", + "typescript": "^5.4.5" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "dependencies": { + "glob": "^10.3.7" + }, + "keywords": [ + "rm", + "rm -rf", + "rm -fr", + "remove", + "directory", + "cli", + "rmdir", + "recursive" + ], + "module": "./dist/esm/index.js" +} diff --git a/deps/npm/node_modules/ssri/package.json b/deps/npm/node_modules/ssri/package.json index 28395414e4643c..83306cd044ec3c 100644 --- a/deps/npm/node_modules/ssri/package.json +++ b/deps/npm/node_modules/ssri/package.json @@ -1,6 +1,6 @@ { "name": "ssri", - "version": "10.0.6", + "version": "12.0.0", "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", "main": "lib/index.js", "files": [ @@ -13,11 +13,12 @@ "posttest": "npm run lint", "test": "tap", "coverage": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", - "snap": "tap" + "lintfix": "npm run eslint -- --fix", + "snap": "tap", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "tap": { "check-coverage": true, @@ -50,16 +51,16 @@ "minipass": "^7.0.3" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js new file mode 100644 index 00000000000000..c541b93001517e --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/agents.js @@ -0,0 +1,206 @@ +'use strict' + +const net = require('net') +const tls = require('tls') +const { once } = require('events') +const timers = require('timers/promises') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, getProxyAgent, proxyCache } = require('./proxy.js') +const Errors = require('./errors.js') +const { Agent: AgentBase } = require('agent-base') + +module.exports = class Agent extends AgentBase { + #options + #timeouts + #proxy + #noProxy + #ProxyAgent + + constructor (options = {}) { + const { timeouts, proxy, noProxy, ...normalizedOptions } = normalizeOptions(options) + + super(normalizedOptions) + + this.#options = normalizedOptions + this.#timeouts = timeouts + + if (proxy) { + this.#proxy = new URL(proxy) + this.#noProxy = noProxy + this.#ProxyAgent = getProxyAgent(proxy) + } + } + + get proxy () { + return this.#proxy ? { url: this.#proxy } : {} + } + + #getProxy (options) { + if (!this.#proxy) { + return + } + + const proxy = getProxy(`${options.protocol}//${options.host}:${options.port}`, { + proxy: this.#proxy, + noProxy: this.#noProxy, + }) + + if (!proxy) { + return + } + + const cacheKey = cacheOptions({ + ...options, + ...this.#options, + timeouts: this.#timeouts, + proxy, + }) + + if (proxyCache.has(cacheKey)) { + return proxyCache.get(cacheKey) + } + + let ProxyAgent = this.#ProxyAgent + if (Array.isArray(ProxyAgent)) { + ProxyAgent = this.isSecureEndpoint(options) ? ProxyAgent[1] : ProxyAgent[0] + } + + const proxyAgent = new ProxyAgent(proxy, { + ...this.#options, + socketOptions: { family: this.#options.family }, + }) + proxyCache.set(cacheKey, proxyAgent) + + return proxyAgent + } + + // takes an array of promises and races them against the connection timeout + // which will throw the necessary error if it is hit. This will return the + // result of the promise race. + async #timeoutConnection ({ promises, options, timeout }, ac = new AbortController()) { + if (timeout) { + const connectionTimeout = timers.setTimeout(timeout, null, { signal: ac.signal }) + .then(() => { + throw new Errors.ConnectionTimeoutError(`${options.host}:${options.port}`) + }).catch((err) => { + if (err.name === 'AbortError') { + return + } + throw err + }) + promises.push(connectionTimeout) + } + + let result + try { + result = await Promise.race(promises) + ac.abort() + } catch (err) { + ac.abort() + throw err + } + return result + } + + async connect (request, options) { + // if the connection does not have its own lookup function + // set, then use the one from our options + options.lookup ??= this.#options.lookup + + let socket + let timeout = this.#timeouts.connection + const isSecureEndpoint = this.isSecureEndpoint(options) + + const proxy = this.#getProxy(options) + if (proxy) { + // some of the proxies will wait for the socket to fully connect before + // returning so we have to await this while also racing it against the + // connection timeout. + const start = Date.now() + socket = await this.#timeoutConnection({ + options, + timeout, + promises: [proxy.connect(request, options)], + }) + // see how much time proxy.connect took and subtract it from + // the timeout + if (timeout) { + timeout = timeout - (Date.now() - start) + } + } else { + socket = (isSecureEndpoint ? tls : net).connect(options) + } + + socket.setKeepAlive(this.keepAlive, this.keepAliveMsecs) + socket.setNoDelay(this.keepAlive) + + const abortController = new AbortController() + const { signal } = abortController + + const connectPromise = socket[isSecureEndpoint ? 'secureConnecting' : 'connecting'] + ? once(socket, isSecureEndpoint ? 'secureConnect' : 'connect', { signal }) + : Promise.resolve() + + await this.#timeoutConnection({ + options, + timeout, + promises: [ + connectPromise, + once(socket, 'error', { signal }).then((err) => { + throw err[0] + }), + ], + }, abortController) + + if (this.#timeouts.idle) { + socket.setTimeout(this.#timeouts.idle, () => { + socket.destroy(new Errors.IdleTimeoutError(`${options.host}:${options.port}`)) + }) + } + + return socket + } + + addRequest (request, options) { + const proxy = this.#getProxy(options) + // it would be better to call proxy.addRequest here but this causes the + // http-proxy-agent to call its super.addRequest which causes the request + // to be added to the agent twice. since we only support 3 agents + // currently (see the required agents in proxy.js) we have manually + // checked that the only public methods we need to call are called in the + // next block. this could change in the future and presumably we would get + // failing tests until we have properly called the necessary methods on + // each of our proxy agents + if (proxy?.setRequestProps) { + proxy.setRequestProps(request, options) + } + + request.setHeader('connection', this.keepAlive ? 'keep-alive' : 'close') + + if (this.#timeouts.response) { + let responseTimeout + request.once('finish', () => { + setTimeout(() => { + request.destroy(new Errors.ResponseTimeoutError(request, this.#proxy)) + }, this.#timeouts.response) + }) + request.once('response', () => { + clearTimeout(responseTimeout) + }) + } + + if (this.#timeouts.transfer) { + let transferTimeout + request.once('response', (res) => { + setTimeout(() => { + res.destroy(new Errors.TransferTimeoutError(request, this.#proxy)) + }, this.#timeouts.transfer) + res.once('close', () => { + clearTimeout(transferTimeout) + }) + }) + } + + return super.addRequest(request, options) + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js new file mode 100644 index 00000000000000..3c6946c566d736 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/dns.js @@ -0,0 +1,53 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const dns = require('dns') + +// this is a factory so that each request can have its own opts (i.e. ttl) +// while still sharing the cache across all requests +const cache = new LRUCache({ max: 50 }) + +const getOptions = ({ + family = 0, + hints = dns.ADDRCONFIG, + all = false, + verbatim = undefined, + ttl = 5 * 60 * 1000, + lookup = dns.lookup, +}) => ({ + // hints and lookup are returned since both are top level properties to (net|tls).connect + hints, + lookup: (hostname, ...args) => { + const callback = args.pop() // callback is always last arg + const lookupOptions = args[0] ?? {} + + const options = { + family, + hints, + all, + verbatim, + ...(typeof lookupOptions === 'number' ? { family: lookupOptions } : lookupOptions), + } + + const key = JSON.stringify({ hostname, ...options }) + + if (cache.has(key)) { + const cached = cache.get(key) + return process.nextTick(callback, null, ...cached) + } + + lookup(hostname, options, (err, ...result) => { + if (err) { + return callback(err) + } + + cache.set(key, result, { ttl }) + return callback(null, ...result) + }) + }, +}) + +module.exports = { + cache, + getOptions, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js new file mode 100644 index 00000000000000..70475aec8eb357 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/errors.js @@ -0,0 +1,61 @@ +'use strict' + +class InvalidProxyProtocolError extends Error { + constructor (url) { + super(`Invalid protocol \`${url.protocol}\` connecting to proxy \`${url.host}\``) + this.code = 'EINVALIDPROXY' + this.proxy = url + } +} + +class ConnectionTimeoutError extends Error { + constructor (host) { + super(`Timeout connecting to host \`${host}\``) + this.code = 'ECONNECTIONTIMEOUT' + this.host = host + } +} + +class IdleTimeoutError extends Error { + constructor (host) { + super(`Idle timeout reached for host \`${host}\``) + this.code = 'EIDLETIMEOUT' + this.host = host + } +} + +class ResponseTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Response timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `connecting to host \`${request.host}\`` + super(msg) + this.code = 'ERESPONSETIMEOUT' + this.proxy = proxy + this.request = request + } +} + +class TransferTimeoutError extends Error { + constructor (request, proxy) { + let msg = 'Transfer timeout ' + if (proxy) { + msg += `from proxy \`${proxy.host}\` ` + } + msg += `for \`${request.host}\`` + super(msg) + this.code = 'ETRANSFERTIMEOUT' + this.proxy = proxy + this.request = request + } +} + +module.exports = { + InvalidProxyProtocolError, + ConnectionTimeoutError, + IdleTimeoutError, + ResponseTimeoutError, + TransferTimeoutError, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js new file mode 100644 index 00000000000000..b33d6eaef07a21 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/index.js @@ -0,0 +1,56 @@ +'use strict' + +const { LRUCache } = require('lru-cache') +const { normalizeOptions, cacheOptions } = require('./options') +const { getProxy, proxyCache } = require('./proxy.js') +const dns = require('./dns.js') +const Agent = require('./agents.js') + +const agentCache = new LRUCache({ max: 20 }) + +const getAgent = (url, { agent, proxy, noProxy, ...options } = {}) => { + // false has meaning so this can't be a simple truthiness check + if (agent != null) { + return agent + } + + url = new URL(url) + + const proxyForUrl = getProxy(url, { proxy, noProxy }) + const normalizedOptions = { + ...normalizeOptions(options), + proxy: proxyForUrl, + } + + const cacheKey = cacheOptions({ + ...normalizedOptions, + secureEndpoint: url.protocol === 'https:', + }) + + if (agentCache.has(cacheKey)) { + return agentCache.get(cacheKey) + } + + const newAgent = new Agent(normalizedOptions) + agentCache.set(cacheKey, newAgent) + + return newAgent +} + +module.exports = { + getAgent, + Agent, + // these are exported for backwards compatability + HttpAgent: Agent, + HttpsAgent: Agent, + cache: { + proxy: proxyCache, + agent: agentCache, + dns: dns.cache, + clear: () => { + proxyCache.clear() + agentCache.clear() + dns.cache.clear() + }, + }, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js new file mode 100644 index 00000000000000..0bf53f725f0846 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/options.js @@ -0,0 +1,86 @@ +'use strict' + +const dns = require('./dns') + +const normalizeOptions = (opts) => { + const family = parseInt(opts.family ?? '0', 10) + const keepAlive = opts.keepAlive ?? true + + const normalized = { + // nodejs http agent options. these are all the defaults + // but kept here to increase the likelihood of cache hits + // https://nodejs.org/api/http.html#new-agentoptions + keepAliveMsecs: keepAlive ? 1000 : undefined, + maxSockets: opts.maxSockets ?? 15, + maxTotalSockets: Infinity, + maxFreeSockets: keepAlive ? 256 : undefined, + scheduling: 'fifo', + // then spread the rest of the options + ...opts, + // we already set these to their defaults that we want + family, + keepAlive, + // our custom timeout options + timeouts: { + // the standard timeout option is mapped to our idle timeout + // and then deleted below + idle: opts.timeout ?? 0, + connection: 0, + response: 0, + transfer: 0, + ...opts.timeouts, + }, + // get the dns options that go at the top level of socket connection + ...dns.getOptions({ family, ...opts.dns }), + } + + // remove timeout since we already used it to set our own idle timeout + delete normalized.timeout + + return normalized +} + +const createKey = (obj) => { + let key = '' + const sorted = Object.entries(obj).sort((a, b) => a[0] - b[0]) + for (let [k, v] of sorted) { + if (v == null) { + v = 'null' + } else if (v instanceof URL) { + v = v.toString() + } else if (typeof v === 'object') { + v = createKey(v) + } + key += `${k}:${v}:` + } + return key +} + +const cacheOptions = ({ secureEndpoint, ...options }) => createKey({ + secureEndpoint: !!secureEndpoint, + // socket connect options + family: options.family, + hints: options.hints, + localAddress: options.localAddress, + // tls specific connect options + strictSsl: secureEndpoint ? !!options.rejectUnauthorized : false, + ca: secureEndpoint ? options.ca : null, + cert: secureEndpoint ? options.cert : null, + key: secureEndpoint ? options.key : null, + // http agent options + keepAlive: options.keepAlive, + keepAliveMsecs: options.keepAliveMsecs, + maxSockets: options.maxSockets, + maxTotalSockets: options.maxTotalSockets, + maxFreeSockets: options.maxFreeSockets, + scheduling: options.scheduling, + // timeout options + timeouts: options.timeouts, + // proxy + proxy: options.proxy, +}) + +module.exports = { + normalizeOptions, + cacheOptions, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js new file mode 100644 index 00000000000000..6272e929e57bcf --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/lib/proxy.js @@ -0,0 +1,88 @@ +'use strict' + +const { HttpProxyAgent } = require('http-proxy-agent') +const { HttpsProxyAgent } = require('https-proxy-agent') +const { SocksProxyAgent } = require('socks-proxy-agent') +const { LRUCache } = require('lru-cache') +const { InvalidProxyProtocolError } = require('./errors.js') + +const PROXY_CACHE = new LRUCache({ max: 20 }) + +const SOCKS_PROTOCOLS = new Set(SocksProxyAgent.protocols) + +const PROXY_ENV_KEYS = new Set(['https_proxy', 'http_proxy', 'proxy', 'no_proxy']) + +const PROXY_ENV = Object.entries(process.env).reduce((acc, [key, value]) => { + key = key.toLowerCase() + if (PROXY_ENV_KEYS.has(key)) { + acc[key] = value + } + return acc +}, {}) + +const getProxyAgent = (url) => { + url = new URL(url) + + const protocol = url.protocol.slice(0, -1) + if (SOCKS_PROTOCOLS.has(protocol)) { + return SocksProxyAgent + } + if (protocol === 'https' || protocol === 'http') { + return [HttpProxyAgent, HttpsProxyAgent] + } + + throw new InvalidProxyProtocolError(url) +} + +const isNoProxy = (url, noProxy) => { + if (typeof noProxy === 'string') { + noProxy = noProxy.split(',').map((p) => p.trim()).filter(Boolean) + } + + if (!noProxy || !noProxy.length) { + return false + } + + const hostSegments = url.hostname.split('.').reverse() + + return noProxy.some((no) => { + const noSegments = no.split('.').filter(Boolean).reverse() + if (!noSegments.length) { + return false + } + + for (let i = 0; i < noSegments.length; i++) { + if (hostSegments[i] !== noSegments[i]) { + return false + } + } + + return true + }) +} + +const getProxy = (url, { proxy, noProxy }) => { + url = new URL(url) + + if (!proxy) { + proxy = url.protocol === 'https:' + ? PROXY_ENV.https_proxy + : PROXY_ENV.https_proxy || PROXY_ENV.http_proxy || PROXY_ENV.proxy + } + + if (!noProxy) { + noProxy = PROXY_ENV.no_proxy + } + + if (!proxy || isNoProxy(url, noProxy)) { + return null + } + + return new URL(proxy) +} + +module.exports = { + getProxyAgent, + getProxy, + proxyCache: PROXY_CACHE, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/package.json b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/package.json new file mode 100644 index 00000000000000..ef5b4e3228cc46 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/agent/package.json @@ -0,0 +1,60 @@ +{ + "name": "@npmcli/agent", + "version": "2.2.2", + "description": "the http/https agent used by the npm cli", + "main": "lib/index.js", + "scripts": { + "gencerts": "bash scripts/create-cert.sh", + "test": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/npm/agent/issues" + }, + "homepage": "https://github.com/npm/agent#readme", + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "publish": "true" + }, + "dependencies": { + "agent-base": "^7.1.0", + "http-proxy-agent": "^7.0.0", + "https-proxy-agent": "^7.0.1", + "lru-cache": "^10.0.1", + "socks-proxy-agent": "^8.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "minipass-fetch": "^3.0.3", + "nock": "^13.2.7", + "semver": "^7.5.4", + "simple-socks": "^3.1.0", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/agent.git" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md new file mode 100644 index 00000000000000..5fc208ff122e08 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/LICENSE.md @@ -0,0 +1,20 @@ + + +ISC License + +Copyright npm, Inc. + +Permission to use, copy, modify, and/or distribute this +software for any purpose with or without fee is hereby +granted, provided that the above copyright notice and this +permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND NPM DISCLAIMS ALL +WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL +IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO +EVENT SHALL NPM BE LIABLE FOR ANY SPECIAL, DIRECT, +INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, +WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER +TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js new file mode 100644 index 00000000000000..cb5982f79077ac --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/get-options.js @@ -0,0 +1,20 @@ +// given an input that may or may not be an object, return an object that has +// a copy of every defined property listed in 'copy'. if the input is not an +// object, assign it to the property named by 'wrap' +const getOptions = (input, { copy, wrap }) => { + const result = {} + + if (input && typeof input === 'object') { + for (const prop of copy) { + if (input[prop] !== undefined) { + result[prop] = input[prop] + } + } + } else { + result[wrap] = input + } + + return result +} + +module.exports = getOptions diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js new file mode 100644 index 00000000000000..4d13bc037359d7 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/common/node.js @@ -0,0 +1,9 @@ +const semver = require('semver') + +const satisfies = (range) => { + return semver.satisfies(process.version, range, { includePrerelease: true }) +} + +module.exports = { + satisfies, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE new file mode 100644 index 00000000000000..93546dfb7655bf --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/LICENSE @@ -0,0 +1,15 @@ +(The MIT License) + +Copyright (c) 2011-2017 JP Richardson + +Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files +(the 'Software'), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, + merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is + furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED 'AS IS', WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE +WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS +OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, + ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js new file mode 100644 index 00000000000000..1cd1e05d0c533d --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/errors.js @@ -0,0 +1,129 @@ +'use strict' +const { inspect } = require('util') + +// adapted from node's internal/errors +// https://github.com/nodejs/node/blob/c8a04049/lib/internal/errors.js + +// close copy of node's internal SystemError class. +class SystemError { + constructor (code, prefix, context) { + // XXX context.code is undefined in all constructors used in cp/polyfill + // that may be a bug copied from node, maybe the constructor should use + // `code` not `errno`? nodejs/node#41104 + let message = `${prefix}: ${context.syscall} returned ` + + `${context.code} (${context.message})` + + if (context.path !== undefined) { + message += ` ${context.path}` + } + if (context.dest !== undefined) { + message += ` => ${context.dest}` + } + + this.code = code + Object.defineProperties(this, { + name: { + value: 'SystemError', + enumerable: false, + writable: true, + configurable: true, + }, + message: { + value: message, + enumerable: false, + writable: true, + configurable: true, + }, + info: { + value: context, + enumerable: true, + configurable: true, + writable: false, + }, + errno: { + get () { + return context.errno + }, + set (value) { + context.errno = value + }, + enumerable: true, + configurable: true, + }, + syscall: { + get () { + return context.syscall + }, + set (value) { + context.syscall = value + }, + enumerable: true, + configurable: true, + }, + }) + + if (context.path !== undefined) { + Object.defineProperty(this, 'path', { + get () { + return context.path + }, + set (value) { + context.path = value + }, + enumerable: true, + configurable: true, + }) + } + + if (context.dest !== undefined) { + Object.defineProperty(this, 'dest', { + get () { + return context.dest + }, + set (value) { + context.dest = value + }, + enumerable: true, + configurable: true, + }) + } + } + + toString () { + return `${this.name} [${this.code}]: ${this.message}` + } + + [Symbol.for('nodejs.util.inspect.custom')] (_recurseTimes, ctx) { + return inspect(this, { + ...ctx, + getters: true, + customInspect: false, + }) + } +} + +function E (code, message) { + module.exports[code] = class NodeError extends SystemError { + constructor (ctx) { + super(code, message, ctx) + } + } +} + +E('ERR_FS_CP_DIR_TO_NON_DIR', 'Cannot overwrite directory with non-directory') +E('ERR_FS_CP_EEXIST', 'Target already exists') +E('ERR_FS_CP_EINVAL', 'Invalid src or dest') +E('ERR_FS_CP_FIFO_PIPE', 'Cannot copy a FIFO pipe') +E('ERR_FS_CP_NON_DIR_TO_DIR', 'Cannot overwrite non-directory with directory') +E('ERR_FS_CP_SOCKET', 'Cannot copy a socket file') +E('ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY', 'Cannot overwrite symlink in subdirectory of self') +E('ERR_FS_CP_UNKNOWN', 'Cannot copy an unknown file type') +E('ERR_FS_EISDIR', 'Path is a directory') + +module.exports.ERR_INVALID_ARG_TYPE = class ERR_INVALID_ARG_TYPE extends Error { + constructor (name, expected, actual) { + super() + this.code = 'ERR_INVALID_ARG_TYPE' + this.message = `The ${name} argument must be ${expected}. Received ${typeof actual}` + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js new file mode 100644 index 00000000000000..972ce7aa12abef --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/index.js @@ -0,0 +1,22 @@ +const fs = require('fs/promises') +const getOptions = require('../common/get-options.js') +const node = require('../common/node.js') +const polyfill = require('./polyfill.js') + +// node 16.7.0 added fs.cp +const useNative = node.satisfies('>=16.7.0') + +const cp = async (src, dest, opts) => { + const options = getOptions(opts, { + copy: ['dereference', 'errorOnExist', 'filter', 'force', 'preserveTimestamps', 'recursive'], + }) + + // the polyfill is tested separately from this module, no need to hack + // process.version to try to trigger it just for coverage + // istanbul ignore next + return useNative + ? fs.cp(src, dest, options) + : polyfill(src, dest, options) +} + +module.exports = cp diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js new file mode 100644 index 00000000000000..80eb10de971918 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/cp/polyfill.js @@ -0,0 +1,428 @@ +// this file is a modified version of the code in node 17.2.0 +// which is, in turn, a modified version of the fs-extra module on npm +// node core changes: +// - Use of the assert module has been replaced with core's error system. +// - All code related to the glob dependency has been removed. +// - Bring your own custom fs module is not currently supported. +// - Some basic code cleanup. +// changes here: +// - remove all callback related code +// - drop sync support +// - change assertions back to non-internal methods (see options.js) +// - throws ENOTDIR when rmdir gets an ENOENT for a path that exists in Windows +'use strict' + +const { + ERR_FS_CP_DIR_TO_NON_DIR, + ERR_FS_CP_EEXIST, + ERR_FS_CP_EINVAL, + ERR_FS_CP_FIFO_PIPE, + ERR_FS_CP_NON_DIR_TO_DIR, + ERR_FS_CP_SOCKET, + ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY, + ERR_FS_CP_UNKNOWN, + ERR_FS_EISDIR, + ERR_INVALID_ARG_TYPE, +} = require('./errors.js') +const { + constants: { + errno: { + EEXIST, + EISDIR, + EINVAL, + ENOTDIR, + }, + }, +} = require('os') +const { + chmod, + copyFile, + lstat, + mkdir, + readdir, + readlink, + stat, + symlink, + unlink, + utimes, +} = require('fs/promises') +const { + dirname, + isAbsolute, + join, + parse, + resolve, + sep, + toNamespacedPath, +} = require('path') +const { fileURLToPath } = require('url') + +const defaultOptions = { + dereference: false, + errorOnExist: false, + filter: undefined, + force: true, + preserveTimestamps: false, + recursive: false, +} + +async function cp (src, dest, opts) { + if (opts != null && typeof opts !== 'object') { + throw new ERR_INVALID_ARG_TYPE('options', ['Object'], opts) + } + return cpFn( + toNamespacedPath(getValidatedPath(src)), + toNamespacedPath(getValidatedPath(dest)), + { ...defaultOptions, ...opts }) +} + +function getValidatedPath (fileURLOrPath) { + const path = fileURLOrPath != null && fileURLOrPath.href + && fileURLOrPath.origin + ? fileURLToPath(fileURLOrPath) + : fileURLOrPath + return path +} + +async function cpFn (src, dest, opts) { + // Warn about using preserveTimestamps on 32-bit node + // istanbul ignore next + if (opts.preserveTimestamps && process.arch === 'ia32') { + const warning = 'Using the preserveTimestamps option in 32-bit ' + + 'node is not recommended' + process.emitWarning(warning, 'TimestampPrecisionWarning') + } + const stats = await checkPaths(src, dest, opts) + const { srcStat, destStat } = stats + await checkParentPaths(src, srcStat, dest) + if (opts.filter) { + return handleFilter(checkParentDir, destStat, src, dest, opts) + } + return checkParentDir(destStat, src, dest, opts) +} + +async function checkPaths (src, dest, opts) { + const { 0: srcStat, 1: destStat } = await getStats(src, dest, opts) + if (destStat) { + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: 'src and dest cannot be the same', + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + if (srcStat.isDirectory() && !destStat.isDirectory()) { + throw new ERR_FS_CP_DIR_TO_NON_DIR({ + message: `cannot overwrite directory ${src} ` + + `with non-directory ${dest}`, + path: dest, + syscall: 'cp', + errno: EISDIR, + }) + } + if (!srcStat.isDirectory() && destStat.isDirectory()) { + throw new ERR_FS_CP_NON_DIR_TO_DIR({ + message: `cannot overwrite non-directory ${src} ` + + `with directory ${dest}`, + path: dest, + syscall: 'cp', + errno: ENOTDIR, + }) + } + } + + if (srcStat.isDirectory() && isSrcSubdir(src, dest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return { srcStat, destStat } +} + +function areIdentical (srcStat, destStat) { + return destStat.ino && destStat.dev && destStat.ino === srcStat.ino && + destStat.dev === srcStat.dev +} + +function getStats (src, dest, opts) { + const statFunc = opts.dereference ? + (file) => stat(file, { bigint: true }) : + (file) => lstat(file, { bigint: true }) + return Promise.all([ + statFunc(src), + statFunc(dest).catch((err) => { + // istanbul ignore next: unsure how to cover. + if (err.code === 'ENOENT') { + return null + } + // istanbul ignore next: unsure how to cover. + throw err + }), + ]) +} + +async function checkParentDir (destStat, src, dest, opts) { + const destParent = dirname(dest) + const dirExists = await pathExists(destParent) + if (dirExists) { + return getStatsForCopy(destStat, src, dest, opts) + } + await mkdir(destParent, { recursive: true }) + return getStatsForCopy(destStat, src, dest, opts) +} + +function pathExists (dest) { + return stat(dest).then( + () => true, + // istanbul ignore next: not sure when this would occur + (err) => (err.code === 'ENOENT' ? false : Promise.reject(err))) +} + +// Recursively check if dest parent is a subdirectory of src. +// It works for all file types including symlinks since it +// checks the src and dest inodes. It starts from the deepest +// parent and stops once it reaches the src parent or the root path. +async function checkParentPaths (src, srcStat, dest) { + const srcParent = resolve(dirname(src)) + const destParent = resolve(dirname(dest)) + if (destParent === srcParent || destParent === parse(destParent).root) { + return + } + let destStat + try { + destStat = await stat(destParent, { bigint: true }) + } catch (err) { + // istanbul ignore else: not sure when this would occur + if (err.code === 'ENOENT') { + return + } + // istanbul ignore next: not sure when this would occur + throw err + } + if (areIdentical(srcStat, destStat)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${src} to a subdirectory of self ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return checkParentPaths(src, srcStat, destParent) +} + +const normalizePathToArray = (path) => + resolve(path).split(sep).filter(Boolean) + +// Return true if dest is a subdir of src, otherwise false. +// It only checks the path strings. +function isSrcSubdir (src, dest) { + const srcArr = normalizePathToArray(src) + const destArr = normalizePathToArray(dest) + return srcArr.every((cur, i) => destArr[i] === cur) +} + +async function handleFilter (onInclude, destStat, src, dest, opts, cb) { + const include = await opts.filter(src, dest) + if (include) { + return onInclude(destStat, src, dest, opts, cb) + } +} + +function startCopy (destStat, src, dest, opts) { + if (opts.filter) { + return handleFilter(getStatsForCopy, destStat, src, dest, opts) + } + return getStatsForCopy(destStat, src, dest, opts) +} + +async function getStatsForCopy (destStat, src, dest, opts) { + const statFn = opts.dereference ? stat : lstat + const srcStat = await statFn(src) + // istanbul ignore else: can't portably test FIFO + if (srcStat.isDirectory() && opts.recursive) { + return onDir(srcStat, destStat, src, dest, opts) + } else if (srcStat.isDirectory()) { + throw new ERR_FS_EISDIR({ + message: `${src} is a directory (not copied)`, + path: src, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFile() || + srcStat.isCharacterDevice() || + srcStat.isBlockDevice()) { + return onFile(srcStat, destStat, src, dest, opts) + } else if (srcStat.isSymbolicLink()) { + return onLink(destStat, src, dest) + } else if (srcStat.isSocket()) { + throw new ERR_FS_CP_SOCKET({ + message: `cannot copy a socket file: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } else if (srcStat.isFIFO()) { + throw new ERR_FS_CP_FIFO_PIPE({ + message: `cannot copy a FIFO pipe: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // istanbul ignore next: should be unreachable + throw new ERR_FS_CP_UNKNOWN({ + message: `cannot copy an unknown file type: ${dest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) +} + +function onFile (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return _copyFile(srcStat, src, dest, opts) + } + return mayCopyFile(srcStat, src, dest, opts) +} + +async function mayCopyFile (srcStat, src, dest, opts) { + if (opts.force) { + await unlink(dest) + return _copyFile(srcStat, src, dest, opts) + } else if (opts.errorOnExist) { + throw new ERR_FS_CP_EEXIST({ + message: `${dest} already exists`, + path: dest, + syscall: 'cp', + errno: EEXIST, + }) + } +} + +async function _copyFile (srcStat, src, dest, opts) { + await copyFile(src, dest) + if (opts.preserveTimestamps) { + return handleTimestampsAndMode(srcStat.mode, src, dest) + } + return setDestMode(dest, srcStat.mode) +} + +async function handleTimestampsAndMode (srcMode, src, dest) { + // Make sure the file is writable before setting the timestamp + // otherwise open fails with EPERM when invoked with 'r+' + // (through utimes call) + if (fileIsNotWritable(srcMode)) { + await makeFileWritable(dest, srcMode) + return setDestTimestampsAndMode(srcMode, src, dest) + } + return setDestTimestampsAndMode(srcMode, src, dest) +} + +function fileIsNotWritable (srcMode) { + return (srcMode & 0o200) === 0 +} + +function makeFileWritable (dest, srcMode) { + return setDestMode(dest, srcMode | 0o200) +} + +async function setDestTimestampsAndMode (srcMode, src, dest) { + await setDestTimestamps(src, dest) + return setDestMode(dest, srcMode) +} + +function setDestMode (dest, srcMode) { + return chmod(dest, srcMode) +} + +async function setDestTimestamps (src, dest) { + // The initial srcStat.atime cannot be trusted + // because it is modified by the read(2) system call + // (See https://nodejs.org/api/fs.html#fs_stat_time_values) + const updatedSrcStat = await stat(src) + return utimes(dest, updatedSrcStat.atime, updatedSrcStat.mtime) +} + +function onDir (srcStat, destStat, src, dest, opts) { + if (!destStat) { + return mkDirAndCopy(srcStat.mode, src, dest, opts) + } + return copyDir(src, dest, opts) +} + +async function mkDirAndCopy (srcMode, src, dest, opts) { + await mkdir(dest) + await copyDir(src, dest, opts) + return setDestMode(dest, srcMode) +} + +async function copyDir (src, dest, opts) { + const dir = await readdir(src) + for (let i = 0; i < dir.length; i++) { + const item = dir[i] + const srcItem = join(src, item) + const destItem = join(dest, item) + const { destStat } = await checkPaths(srcItem, destItem, opts) + await startCopy(destStat, srcItem, destItem, opts) + } +} + +async function onLink (destStat, src, dest) { + let resolvedSrc = await readlink(src) + if (!isAbsolute(resolvedSrc)) { + resolvedSrc = resolve(dirname(src), resolvedSrc) + } + if (!destStat) { + return symlink(resolvedSrc, dest) + } + let resolvedDest + try { + resolvedDest = await readlink(dest) + } catch (err) { + // Dest exists and is a regular file or directory, + // Windows may throw UNKNOWN error. If dest already exists, + // fs throws error anyway, so no need to guard against it here. + // istanbul ignore next: can only test on windows + if (err.code === 'EINVAL' || err.code === 'UNKNOWN') { + return symlink(resolvedSrc, dest) + } + // istanbul ignore next: should not be possible + throw err + } + if (!isAbsolute(resolvedDest)) { + resolvedDest = resolve(dirname(dest), resolvedDest) + } + if (isSrcSubdir(resolvedSrc, resolvedDest)) { + throw new ERR_FS_CP_EINVAL({ + message: `cannot copy ${resolvedSrc} to a subdirectory of self ` + + `${resolvedDest}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + // Do not copy if src is a subdir of dest since unlinking + // dest in this case would result in removing src contents + // and therefore a broken symlink would be created. + const srcStat = await stat(src) + if (srcStat.isDirectory() && isSrcSubdir(resolvedDest, resolvedSrc)) { + throw new ERR_FS_CP_SYMLINK_TO_SUBDIRECTORY({ + message: `cannot overwrite ${resolvedDest} with ${resolvedSrc}`, + path: dest, + syscall: 'cp', + errno: EINVAL, + }) + } + return copyLink(resolvedSrc, dest) +} + +async function copyLink (resolvedSrc, dest) { + await unlink(dest) + return symlink(resolvedSrc, dest) +} + +module.exports = cp diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js new file mode 100644 index 00000000000000..81c746304cc428 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/index.js @@ -0,0 +1,13 @@ +'use strict' + +const cp = require('./cp/index.js') +const withTempDir = require('./with-temp-dir.js') +const readdirScoped = require('./readdir-scoped.js') +const moveFile = require('./move-file.js') + +module.exports = { + cp, + withTempDir, + readdirScoped, + moveFile, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js new file mode 100644 index 00000000000000..d56e06d384659a --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/move-file.js @@ -0,0 +1,78 @@ +const { dirname, join, resolve, relative, isAbsolute } = require('path') +const fs = require('fs/promises') + +const pathExists = async path => { + try { + await fs.access(path) + return true + } catch (er) { + return er.code !== 'ENOENT' + } +} + +const moveFile = async (source, destination, options = {}, root = true, symlinks = []) => { + if (!source || !destination) { + throw new TypeError('`source` and `destination` file required') + } + + options = { + overwrite: true, + ...options, + } + + if (!options.overwrite && await pathExists(destination)) { + throw new Error(`The destination file exists: ${destination}`) + } + + await fs.mkdir(dirname(destination), { recursive: true }) + + try { + await fs.rename(source, destination) + } catch (error) { + if (error.code === 'EXDEV' || error.code === 'EPERM') { + const sourceStat = await fs.lstat(source) + if (sourceStat.isDirectory()) { + const files = await fs.readdir(source) + await Promise.all(files.map((file) => + moveFile(join(source, file), join(destination, file), options, false, symlinks) + )) + } else if (sourceStat.isSymbolicLink()) { + symlinks.push({ source, destination }) + } else { + await fs.copyFile(source, destination) + } + } else { + throw error + } + } + + if (root) { + await Promise.all(symlinks.map(async ({ source: symSource, destination: symDestination }) => { + let target = await fs.readlink(symSource) + // junction symlinks in windows will be absolute paths, so we need to + // make sure they point to the symlink destination + if (isAbsolute(target)) { + target = resolve(symDestination, relative(symSource, target)) + } + // try to determine what the actual file is so we can create the correct + // type of symlink in windows + let targetStat = 'file' + try { + targetStat = await fs.stat(resolve(dirname(symSource), target)) + if (targetStat.isDirectory()) { + targetStat = 'junction' + } + } catch { + // targetStat remains 'file' + } + await fs.symlink( + target, + symDestination, + targetStat + ) + })) + await fs.rm(source, { recursive: true, force: true }) + } +} + +module.exports = moveFile diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js new file mode 100644 index 00000000000000..cd601dfbe7486b --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/readdir-scoped.js @@ -0,0 +1,20 @@ +const { readdir } = require('fs/promises') +const { join } = require('path') + +const readdirScoped = async (dir) => { + const results = [] + + for (const item of await readdir(dir)) { + if (item.startsWith('@')) { + for (const scopedItem of await readdir(join(dir, item))) { + results.push(join(item, scopedItem)) + } + } else { + results.push(item) + } + } + + return results +} + +module.exports = readdirScoped diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js new file mode 100644 index 00000000000000..0738ac4f29e1be --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/lib/with-temp-dir.js @@ -0,0 +1,39 @@ +const { join, sep } = require('path') + +const getOptions = require('./common/get-options.js') +const { mkdir, mkdtemp, rm } = require('fs/promises') + +// create a temp directory, ensure its permissions match its parent, then call +// the supplied function passing it the path to the directory. clean up after +// the function finishes, whether it throws or not +const withTempDir = async (root, fn, opts) => { + const options = getOptions(opts, { + copy: ['tmpPrefix'], + }) + // create the directory + await mkdir(root, { recursive: true }) + + const target = await mkdtemp(join(`${root}${sep}`, options.tmpPrefix || '')) + let err + let result + + try { + result = await fn(target) + } catch (_err) { + err = _err + } + + try { + await rm(target, { force: true, recursive: true }) + } catch { + // ignore errors + } + + if (err) { + throw err + } + + return result +} + +module.exports = withTempDir diff --git a/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/package.json b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/package.json new file mode 100644 index 00000000000000..5261a11b78000e --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/@npmcli/fs/package.json @@ -0,0 +1,52 @@ +{ + "name": "@npmcli/fs", + "version": "3.1.1", + "description": "filesystem utilities for the npm cli", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "snap": "tap", + "test": "tap", + "npmclilint": "npmcli-lint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/fs.git" + }, + "keywords": [ + "npm", + "oss" + ], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.1" + }, + "dependencies": { + "semver": "^7.3.5" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/LICENSE.md b/deps/npm/node_modules/tuf-js/node_modules/cacache/LICENSE.md new file mode 100644 index 00000000000000..8d28acf866d932 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/path.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/path.js new file mode 100644 index 00000000000000..ad5a76a4f73f26 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/path.js @@ -0,0 +1,29 @@ +'use strict' + +const contentVer = require('../../package.json')['cache-version'].content +const hashToSegments = require('../util/hash-to-segments') +const path = require('path') +const ssri = require('ssri') + +// Current format of content file path: +// +// sha512-BaSE64Hex= -> +// ~/.my-cache/content-v2/sha512/ba/da/55deadbeefc0ffee +// +module.exports = contentPath + +function contentPath (cache, integrity) { + const sri = ssri.parse(integrity, { single: true }) + // contentPath is the *strongest* algo given + return path.join( + contentDir(cache), + sri.algorithm, + ...hashToSegments(sri.hexDigest()) + ) +} + +module.exports.contentDir = contentDir + +function contentDir (cache) { + return path.join(cache, `content-v${contentVer}`) +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/read.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/read.js new file mode 100644 index 00000000000000..5f6192c3cec566 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/read.js @@ -0,0 +1,165 @@ +'use strict' + +const fs = require('fs/promises') +const fsm = require('fs-minipass') +const ssri = require('ssri') +const contentPath = require('./path') +const Pipeline = require('minipass-pipeline') + +module.exports = read + +const MAX_SINGLE_READ_SIZE = 64 * 1024 * 1024 +async function read (cache, integrity, opts = {}) { + const { size } = opts + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = size ? { size } : await fs.stat(cpath) + return { stat, cpath, sri } + }) + + if (stat.size > MAX_SINGLE_READ_SIZE) { + return readPipeline(cpath, stat.size, sri, new Pipeline()).concat() + } + + const data = await fs.readFile(cpath, { encoding: null }) + + if (stat.size !== data.length) { + throw sizeError(stat.size, data.length) + } + + if (!ssri.checkData(data, sri)) { + throw integrityError(sri, cpath) + } + + return data +} + +const readPipeline = (cpath, size, sri, stream) => { + stream.push( + new fsm.ReadStream(cpath, { + size, + readSize: MAX_SINGLE_READ_SIZE, + }), + ssri.integrityStream({ + integrity: sri, + size, + }) + ) + return stream +} + +module.exports.stream = readStream +module.exports.readStream = readStream + +function readStream (cache, integrity, opts = {}) { + const { size } = opts + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const { stat, cpath, sri } = await withContentSri(cache, integrity, async (cpath, sri) => { + // get size + const stat = size ? { size } : await fs.stat(cpath) + return { stat, cpath, sri } + }) + + return readPipeline(cpath, stat.size, sri, stream) + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.copy = copy + +function copy (cache, integrity, dest) { + return withContentSri(cache, integrity, (cpath) => { + return fs.copyFile(cpath, dest) + }) +} + +module.exports.hasContent = hasContent + +async function hasContent (cache, integrity) { + if (!integrity) { + return false + } + + try { + return await withContentSri(cache, integrity, async (cpath, sri) => { + const stat = await fs.stat(cpath) + return { size: stat.size, sri, stat } + }) + } catch (err) { + if (err.code === 'ENOENT') { + return false + } + + if (err.code === 'EPERM') { + /* istanbul ignore else */ + if (process.platform !== 'win32') { + throw err + } else { + return false + } + } + } +} + +async function withContentSri (cache, integrity, fn) { + const sri = ssri.parse(integrity) + // If `integrity` has multiple entries, pick the first digest + // with available local data. + const algo = sri.pickAlgorithm() + const digests = sri[algo] + + if (digests.length <= 1) { + const cpath = contentPath(cache, digests[0]) + return fn(cpath, digests[0]) + } else { + // Can't use race here because a generic error can happen before + // a ENOENT error, and can happen before a valid result + const results = await Promise.all(digests.map(async (meta) => { + try { + return await withContentSri(cache, meta, fn) + } catch (err) { + if (err.code === 'ENOENT') { + return Object.assign( + new Error('No matching content found for ' + sri.toString()), + { code: 'ENOENT' } + ) + } + return err + } + })) + // Return the first non error if it is found + const result = results.find((r) => !(r instanceof Error)) + if (result) { + return result + } + + // Throw the No matching content found error + const enoentError = results.find((r) => r.code === 'ENOENT') + if (enoentError) { + throw enoentError + } + + // Throw generic error + throw results.find((r) => r instanceof Error) + } +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function integrityError (sri, path) { + const err = new Error(`Integrity verification failed for ${sri} (${path})`) + err.code = 'EINTEGRITY' + err.sri = sri + err.path = path + return err +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js new file mode 100644 index 00000000000000..ce58d679e4cb25 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/rm.js @@ -0,0 +1,18 @@ +'use strict' + +const fs = require('fs/promises') +const contentPath = require('./path') +const { hasContent } = require('./read') + +module.exports = rm + +async function rm (cache, integrity) { + const content = await hasContent(cache, integrity) + // ~pretty~ sure we can't end up with a content lacking sri, but be safe + if (content && content.sri) { + await fs.rm(contentPath(cache, content.sri), { recursive: true, force: true }) + return true + } else { + return false + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/write.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/write.js new file mode 100644 index 00000000000000..e7187abca8788a --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/content/write.js @@ -0,0 +1,206 @@ +'use strict' + +const events = require('events') + +const contentPath = require('./path') +const fs = require('fs/promises') +const { moveFile } = require('@npmcli/fs') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') +const Flush = require('minipass-flush') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') +const fsm = require('fs-minipass') + +module.exports = write + +// Cache of move operations in process so we don't duplicate +const moveOperations = new Map() + +async function write (cache, data, opts = {}) { + const { algorithms, size, integrity } = opts + + if (typeof size === 'number' && data.length !== size) { + throw sizeError(size, data.length) + } + + const sri = ssri.fromData(data, algorithms ? { algorithms } : {}) + if (integrity && !ssri.checkData(data, integrity, opts)) { + throw checksumError(integrity, sri) + } + + for (const algo in sri) { + const tmp = await makeTmp(cache, opts) + const hash = sri[algo].toString() + try { + await fs.writeFile(tmp.target, data, { flag: 'wx' }) + await moveToDestination(tmp, cache, hash, opts) + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } + } + return { integrity: sri, size: data.length } +} + +module.exports.stream = writeStream + +// writes proxied to the 'inputStream' that is passed to the Promise +// 'end' is deferred until content is handled. +class CacacheWriteStream extends Flush { + constructor (cache, opts) { + super() + this.opts = opts + this.cache = cache + this.inputStream = new Minipass() + this.inputStream.on('error', er => this.emit('error', er)) + this.inputStream.on('drain', () => this.emit('drain')) + this.handleContentP = null + } + + write (chunk, encoding, cb) { + if (!this.handleContentP) { + this.handleContentP = handleContent( + this.inputStream, + this.cache, + this.opts + ) + this.handleContentP.catch(error => this.emit('error', error)) + } + return this.inputStream.write(chunk, encoding, cb) + } + + flush (cb) { + this.inputStream.end(() => { + if (!this.handleContentP) { + const e = new Error('Cache input stream was empty') + e.code = 'ENODATA' + // empty streams are probably emitting end right away. + // defer this one tick by rejecting a promise on it. + return Promise.reject(e).catch(cb) + } + // eslint-disable-next-line promise/catch-or-return + this.handleContentP.then( + (res) => { + res.integrity && this.emit('integrity', res.integrity) + // eslint-disable-next-line promise/always-return + res.size !== null && this.emit('size', res.size) + cb() + }, + (er) => cb(er) + ) + }) + } +} + +function writeStream (cache, opts = {}) { + return new CacacheWriteStream(cache, opts) +} + +async function handleContent (inputStream, cache, opts) { + const tmp = await makeTmp(cache, opts) + try { + const res = await pipeToTmp(inputStream, cache, tmp.target, opts) + await moveToDestination( + tmp, + cache, + res.integrity, + opts + ) + return res + } finally { + if (!tmp.moved) { + await fs.rm(tmp.target, { recursive: true, force: true }) + } + } +} + +async function pipeToTmp (inputStream, cache, tmpTarget, opts) { + const outStream = new fsm.WriteStream(tmpTarget, { + flags: 'wx', + }) + + if (opts.integrityEmitter) { + // we need to create these all simultaneously since they can fire in any order + const [integrity, size] = await Promise.all([ + events.once(opts.integrityEmitter, 'integrity').then(res => res[0]), + events.once(opts.integrityEmitter, 'size').then(res => res[0]), + new Pipeline(inputStream, outStream).promise(), + ]) + return { integrity, size } + } + + let integrity + let size + const hashStream = ssri.integrityStream({ + integrity: opts.integrity, + algorithms: opts.algorithms, + size: opts.size, + }) + hashStream.on('integrity', i => { + integrity = i + }) + hashStream.on('size', s => { + size = s + }) + + const pipeline = new Pipeline(inputStream, hashStream, outStream) + await pipeline.promise() + return { integrity, size } +} + +async function makeTmp (cache, opts) { + const tmpTarget = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await fs.mkdir(path.dirname(tmpTarget), { recursive: true }) + return { + target: tmpTarget, + moved: false, + } +} + +async function moveToDestination (tmp, cache, sri) { + const destination = contentPath(cache, sri) + const destDir = path.dirname(destination) + if (moveOperations.has(destination)) { + return moveOperations.get(destination) + } + moveOperations.set( + destination, + fs.mkdir(destDir, { recursive: true }) + .then(async () => { + await moveFile(tmp.target, destination, { overwrite: false }) + tmp.moved = true + return tmp.moved + }) + .catch(err => { + if (!err.message.startsWith('The destination file exists')) { + throw Object.assign(err, { code: 'EEXIST' }) + } + }).finally(() => { + moveOperations.delete(destination) + }) + + ) + return moveOperations.get(destination) +} + +function sizeError (expected, found) { + /* eslint-disable-next-line max-len */ + const err = new Error(`Bad data size: expected inserted data to be ${expected} bytes, but got ${found} instead`) + err.expected = expected + err.found = found + err.code = 'EBADSIZE' + return err +} + +function checksumError (expected, found) { + const err = new Error(`Integrity check failed: + Wanted: ${expected} + Found: ${found}`) + err.code = 'EINTEGRITY' + err.expected = expected + err.found = found + return err +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js new file mode 100644 index 00000000000000..89c28f2f257d48 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/entry-index.js @@ -0,0 +1,336 @@ +'use strict' + +const crypto = require('crypto') +const { + appendFile, + mkdir, + readFile, + readdir, + rm, + writeFile, +} = require('fs/promises') +const { Minipass } = require('minipass') +const path = require('path') +const ssri = require('ssri') +const uniqueFilename = require('unique-filename') + +const contentPath = require('./content/path') +const hashToSegments = require('./util/hash-to-segments') +const indexV = require('../package.json')['cache-version'].index +const { moveFile } = require('@npmcli/fs') + +const pMap = require('p-map') +const lsStreamConcurrency = 5 + +module.exports.NotFoundError = class NotFoundError extends Error { + constructor (cache, key) { + super(`No cache entry for ${key} found in ${cache}`) + this.code = 'ENOENT' + this.cache = cache + this.key = key + } +} + +module.exports.compact = compact + +async function compact (cache, key, matchFn, opts = {}) { + const bucket = bucketPath(cache, key) + const entries = await bucketEntries(bucket) + const newEntries = [] + // we loop backwards because the bottom-most result is the newest + // since we add new entries with appendFile + for (let i = entries.length - 1; i >= 0; --i) { + const entry = entries[i] + // a null integrity could mean either a delete was appended + // or the user has simply stored an index that does not map + // to any content. we determine if the user wants to keep the + // null integrity based on the validateEntry function passed in options. + // if the integrity is null and no validateEntry is provided, we break + // as we consider the null integrity to be a deletion of everything + // that came before it. + if (entry.integrity === null && !opts.validateEntry) { + break + } + + // if this entry is valid, and it is either the first entry or + // the newEntries array doesn't already include an entry that + // matches this one based on the provided matchFn, then we add + // it to the beginning of our list + if ((!opts.validateEntry || opts.validateEntry(entry) === true) && + (newEntries.length === 0 || + !newEntries.find((oldEntry) => matchFn(oldEntry, entry)))) { + newEntries.unshift(entry) + } + } + + const newIndex = '\n' + newEntries.map((entry) => { + const stringified = JSON.stringify(entry) + const hash = hashEntry(stringified) + return `${hash}\t${stringified}` + }).join('\n') + + const setup = async () => { + const target = uniqueFilename(path.join(cache, 'tmp'), opts.tmpPrefix) + await mkdir(path.dirname(target), { recursive: true }) + return { + target, + moved: false, + } + } + + const teardown = async (tmp) => { + if (!tmp.moved) { + return rm(tmp.target, { recursive: true, force: true }) + } + } + + const write = async (tmp) => { + await writeFile(tmp.target, newIndex, { flag: 'wx' }) + await mkdir(path.dirname(bucket), { recursive: true }) + // we use @npmcli/move-file directly here because we + // want to overwrite the existing file + await moveFile(tmp.target, bucket) + tmp.moved = true + } + + // write the file atomically + const tmp = await setup() + try { + await write(tmp) + } finally { + await teardown(tmp) + } + + // we reverse the list we generated such that the newest + // entries come first in order to make looping through them easier + // the true passed to formatEntry tells it to keep null + // integrity values, if they made it this far it's because + // validateEntry returned true, and as such we should return it + return newEntries.reverse().map((entry) => formatEntry(cache, entry, true)) +} + +module.exports.insert = insert + +async function insert (cache, key, integrity, opts = {}) { + const { metadata, size, time } = opts + const bucket = bucketPath(cache, key) + const entry = { + key, + integrity: integrity && ssri.stringify(integrity), + time: time || Date.now(), + size, + metadata, + } + try { + await mkdir(path.dirname(bucket), { recursive: true }) + const stringified = JSON.stringify(entry) + // NOTE - Cleverness ahoy! + // + // This works because it's tremendously unlikely for an entry to corrupt + // another while still preserving the string length of the JSON in + // question. So, we just slap the length in there and verify it on read. + // + // Thanks to @isaacs for the whiteboarding session that ended up with + // this. + await appendFile(bucket, `\n${hashEntry(stringified)}\t${stringified}`) + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + + throw err + } + return formatEntry(cache, entry) +} + +module.exports.find = find + +async function find (cache, key) { + const bucket = bucketPath(cache, key) + try { + const entries = await bucketEntries(bucket) + return entries.reduce((latest, next) => { + if (next && next.key === key) { + return formatEntry(cache, next) + } else { + return latest + } + }, null) + } catch (err) { + if (err.code === 'ENOENT') { + return null + } else { + throw err + } + } +} + +module.exports.delete = del + +function del (cache, key, opts = {}) { + if (!opts.removeFully) { + return insert(cache, key, null, opts) + } + + const bucket = bucketPath(cache, key) + return rm(bucket, { recursive: true, force: true }) +} + +module.exports.lsStream = lsStream + +function lsStream (cache) { + const indexDir = bucketDir(cache) + const stream = new Minipass({ objectMode: true }) + + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const buckets = await readdirOrEmpty(indexDir) + await pMap(buckets, async (bucket) => { + const bucketPath = path.join(indexDir, bucket) + const subbuckets = await readdirOrEmpty(bucketPath) + await pMap(subbuckets, async (subbucket) => { + const subbucketPath = path.join(bucketPath, subbucket) + + // "/cachename//./*" + const subbucketEntries = await readdirOrEmpty(subbucketPath) + await pMap(subbucketEntries, async (entry) => { + const entryPath = path.join(subbucketPath, entry) + try { + const entries = await bucketEntries(entryPath) + // using a Map here prevents duplicate keys from showing up + // twice, I guess? + const reduced = entries.reduce((acc, entry) => { + acc.set(entry.key, entry) + return acc + }, new Map()) + // reduced is a map of key => entry + for (const entry of reduced.values()) { + const formatted = formatEntry(cache, entry) + if (formatted) { + stream.write(formatted) + } + } + } catch (err) { + if (err.code === 'ENOENT') { + return undefined + } + throw err + } + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + }, + { concurrency: lsStreamConcurrency }) + stream.end() + return stream + }).catch(err => stream.emit('error', err)) + + return stream +} + +module.exports.ls = ls + +async function ls (cache) { + const entries = await lsStream(cache).collect() + return entries.reduce((acc, xs) => { + acc[xs.key] = xs + return acc + }, {}) +} + +module.exports.bucketEntries = bucketEntries + +async function bucketEntries (bucket, filter) { + const data = await readFile(bucket, 'utf8') + return _bucketEntries(data, filter) +} + +function _bucketEntries (data) { + const entries = [] + data.split('\n').forEach((entry) => { + if (!entry) { + return + } + + const pieces = entry.split('\t') + if (!pieces[1] || hashEntry(pieces[1]) !== pieces[0]) { + // Hash is no good! Corruption or malice? Doesn't matter! + // EJECT EJECT + return + } + let obj + try { + obj = JSON.parse(pieces[1]) + } catch (_) { + // eslint-ignore-next-line no-empty-block + } + // coverage disabled here, no need to test with an entry that parses to something falsey + // istanbul ignore else + if (obj) { + entries.push(obj) + } + }) + return entries +} + +module.exports.bucketDir = bucketDir + +function bucketDir (cache) { + return path.join(cache, `index-v${indexV}`) +} + +module.exports.bucketPath = bucketPath + +function bucketPath (cache, key) { + const hashed = hashKey(key) + return path.join.apply( + path, + [bucketDir(cache)].concat(hashToSegments(hashed)) + ) +} + +module.exports.hashKey = hashKey + +function hashKey (key) { + return hash(key, 'sha256') +} + +module.exports.hashEntry = hashEntry + +function hashEntry (str) { + return hash(str, 'sha1') +} + +function hash (str, digest) { + return crypto + .createHash(digest) + .update(str) + .digest('hex') +} + +function formatEntry (cache, entry, keepAll) { + // Treat null digests as deletions. They'll shadow any previous entries. + if (!entry.integrity && !keepAll) { + return null + } + + return { + key: entry.key, + integrity: entry.integrity, + path: entry.integrity ? contentPath(cache, entry.integrity) : undefined, + size: entry.size, + time: entry.time, + metadata: entry.metadata, + } +} + +function readdirOrEmpty (dir) { + return readdir(dir).catch((err) => { + if (err.code === 'ENOENT' || err.code === 'ENOTDIR') { + return [] + } + + throw err + }) +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/get.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/get.js new file mode 100644 index 00000000000000..80ec206c7ecaaa --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/get.js @@ -0,0 +1,170 @@ +'use strict' + +const Collect = require('minipass-collect') +const { Minipass } = require('minipass') +const Pipeline = require('minipass-pipeline') + +const index = require('./entry-index') +const memo = require('./memoization') +const read = require('./content/read') + +async function getData (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return { + metadata: memoized.entry.metadata, + data: memoized.data, + integrity: memoized.entry.integrity, + size: memoized.entry.size, + } + } + + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + const data = await read(cache, entry.integrity, { integrity, size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return { + data, + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} +module.exports = getData + +async function getDataByDigest (cache, key, opts = {}) { + const { integrity, memoize, size } = opts + const memoized = memo.get.byDigest(cache, key, opts) + if (memoized && memoize !== false) { + return memoized + } + + const res = await read(cache, key, { integrity, size }) + if (memoize) { + memo.put.byDigest(cache, key, res, opts) + } + return res +} +module.exports.byDigest = getDataByDigest + +const getMemoizedStream = (memoized) => { + const stream = new Minipass() + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(memoized.entry.metadata) + ev === 'integrity' && cb(memoized.entry.integrity) + ev === 'size' && cb(memoized.entry.size) + }) + stream.end(memoized.data) + return stream +} + +function getStream (cache, key, opts = {}) { + const { memoize, size } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return getMemoizedStream(memoized) + } + + const stream = new Pipeline() + // Set all this up to run on the stream and then just return the stream + Promise.resolve().then(async () => { + const entry = await index.find(cache, key) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + + stream.emit('metadata', entry.metadata) + stream.emit('integrity', entry.integrity) + stream.emit('size', entry.size) + stream.on('newListener', function (ev, cb) { + ev === 'metadata' && cb(entry.metadata) + ev === 'integrity' && cb(entry.integrity) + ev === 'size' && cb(entry.size) + }) + + const src = read.readStream( + cache, + entry.integrity, + { ...opts, size: typeof size !== 'number' ? entry.size : size } + ) + + if (memoize) { + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put(cache, entry, data, opts)) + stream.unshift(memoStream) + } + stream.unshift(src) + return stream + }).catch((err) => stream.emit('error', err)) + + return stream +} + +module.exports.stream = getStream + +function getStreamDigest (cache, integrity, opts = {}) { + const { memoize } = opts + const memoized = memo.get.byDigest(cache, integrity, opts) + if (memoized && memoize !== false) { + const stream = new Minipass() + stream.end(memoized) + return stream + } else { + const stream = read.readStream(cache, integrity, opts) + if (!memoize) { + return stream + } + + const memoStream = new Collect.PassThrough() + memoStream.on('collect', data => memo.put.byDigest( + cache, + integrity, + data, + opts + )) + return new Pipeline(stream, memoStream) + } +} + +module.exports.stream.byDigest = getStreamDigest + +function info (cache, key, opts = {}) { + const { memoize } = opts + const memoized = memo.get(cache, key, opts) + if (memoized && memoize !== false) { + return Promise.resolve(memoized.entry) + } else { + return index.find(cache, key) + } +} +module.exports.info = info + +async function copy (cache, key, dest, opts = {}) { + const entry = await index.find(cache, key, opts) + if (!entry) { + throw new index.NotFoundError(cache, key) + } + await read.copy(cache, entry.integrity, dest, opts) + return { + metadata: entry.metadata, + size: entry.size, + integrity: entry.integrity, + } +} + +module.exports.copy = copy + +async function copyByDigest (cache, key, dest, opts = {}) { + await read.copy(cache, key, dest, opts) + return key +} + +module.exports.copy.byDigest = copyByDigest + +module.exports.hasContent = read.hasContent diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/index.js new file mode 100644 index 00000000000000..c9b0da5f3a271b --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/index.js @@ -0,0 +1,42 @@ +'use strict' + +const get = require('./get.js') +const put = require('./put.js') +const rm = require('./rm.js') +const verify = require('./verify.js') +const { clearMemoized } = require('./memoization.js') +const tmp = require('./util/tmp.js') +const index = require('./entry-index.js') + +module.exports.index = {} +module.exports.index.compact = index.compact +module.exports.index.insert = index.insert + +module.exports.ls = index.ls +module.exports.ls.stream = index.lsStream + +module.exports.get = get +module.exports.get.byDigest = get.byDigest +module.exports.get.stream = get.stream +module.exports.get.stream.byDigest = get.stream.byDigest +module.exports.get.copy = get.copy +module.exports.get.copy.byDigest = get.copy.byDigest +module.exports.get.info = get.info +module.exports.get.hasContent = get.hasContent + +module.exports.put = put +module.exports.put.stream = put.stream + +module.exports.rm = rm.entry +module.exports.rm.all = rm.all +module.exports.rm.entry = module.exports.rm +module.exports.rm.content = rm.content + +module.exports.clearMemoized = clearMemoized + +module.exports.tmp = {} +module.exports.tmp.mkdir = tmp.mkdir +module.exports.tmp.withTmp = tmp.withTmp + +module.exports.verify = verify +module.exports.verify.lastRun = verify.lastRun diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/memoization.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/memoization.js new file mode 100644 index 00000000000000..2ecc60912e4563 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/memoization.js @@ -0,0 +1,72 @@ +'use strict' + +const { LRUCache } = require('lru-cache') + +const MEMOIZED = new LRUCache({ + max: 500, + maxSize: 50 * 1024 * 1024, // 50MB + ttl: 3 * 60 * 1000, // 3 minutes + sizeCalculation: (entry, key) => key.startsWith('key:') ? entry.data.length : entry.length, +}) + +module.exports.clearMemoized = clearMemoized + +function clearMemoized () { + const old = {} + MEMOIZED.forEach((v, k) => { + old[k] = v + }) + MEMOIZED.clear() + return old +} + +module.exports.put = put + +function put (cache, entry, data, opts) { + pickMem(opts).set(`key:${cache}:${entry.key}`, { entry, data }) + putDigest(cache, entry.integrity, data, opts) +} + +module.exports.put.byDigest = putDigest + +function putDigest (cache, integrity, data, opts) { + pickMem(opts).set(`digest:${cache}:${integrity}`, data) +} + +module.exports.get = get + +function get (cache, key, opts) { + return pickMem(opts).get(`key:${cache}:${key}`) +} + +module.exports.get.byDigest = getDigest + +function getDigest (cache, integrity, opts) { + return pickMem(opts).get(`digest:${cache}:${integrity}`) +} + +class ObjProxy { + constructor (obj) { + this.obj = obj + } + + get (key) { + return this.obj[key] + } + + set (key, val) { + this.obj[key] = val + } +} + +function pickMem (opts) { + if (!opts || !opts.memoize) { + return MEMOIZED + } else if (opts.memoize.get && opts.memoize.set) { + return opts.memoize + } else if (typeof opts.memoize === 'object') { + return new ObjProxy(opts.memoize) + } else { + return MEMOIZED + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/put.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/put.js new file mode 100644 index 00000000000000..9fc932d5f6dec5 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/put.js @@ -0,0 +1,80 @@ +'use strict' + +const index = require('./entry-index') +const memo = require('./memoization') +const write = require('./content/write') +const Flush = require('minipass-flush') +const { PassThrough } = require('minipass-collect') +const Pipeline = require('minipass-pipeline') + +const putOpts = (opts) => ({ + algorithms: ['sha512'], + ...opts, +}) + +module.exports = putData + +async function putData (cache, key, data, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + const res = await write(cache, data, opts) + const entry = await index.insert(cache, key, res.integrity, { ...opts, size: res.size }) + if (memoize) { + memo.put(cache, entry, data, opts) + } + + return res.integrity +} + +module.exports.stream = putStream + +function putStream (cache, key, opts = {}) { + const { memoize } = opts + opts = putOpts(opts) + let integrity + let size + let error + + let memoData + const pipeline = new Pipeline() + // first item in the pipeline is the memoizer, because we need + // that to end first and get the collected data. + if (memoize) { + const memoizer = new PassThrough().on('collect', data => { + memoData = data + }) + pipeline.push(memoizer) + } + + // contentStream is a write-only, not a passthrough + // no data comes out of it. + const contentStream = write.stream(cache, opts) + .on('integrity', (int) => { + integrity = int + }) + .on('size', (s) => { + size = s + }) + .on('error', (err) => { + error = err + }) + + pipeline.push(contentStream) + + // last but not least, we write the index and emit hash and size, + // and memoize if we're doing that + pipeline.push(new Flush({ + async flush () { + if (!error) { + const entry = await index.insert(cache, key, integrity, { ...opts, size }) + if (memoize && memoData) { + memo.put(cache, entry, memoData, opts) + } + pipeline.emit('integrity', integrity) + pipeline.emit('size', size) + } + }, + })) + + return pipeline +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/rm.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/rm.js new file mode 100644 index 00000000000000..a94760c7cf2430 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/rm.js @@ -0,0 +1,31 @@ +'use strict' + +const { rm } = require('fs/promises') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const memo = require('./memoization') +const path = require('path') +const rmContent = require('./content/rm') + +module.exports = entry +module.exports.entry = entry + +function entry (cache, key, opts) { + memo.clearMemoized() + return index.delete(cache, key, opts) +} + +module.exports.content = content + +function content (cache, integrity) { + memo.clearMemoized() + return rmContent(cache, integrity) +} + +module.exports.all = all + +async function all (cache) { + memo.clearMemoized() + const paths = await glob(path.join(cache, '*(content-*|index-*)'), { silent: true, nosort: true }) + return Promise.all(paths.map((p) => rm(p, { recursive: true, force: true }))) +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js new file mode 100644 index 00000000000000..8500c1c16a429f --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/glob.js @@ -0,0 +1,7 @@ +'use strict' + +const { glob } = require('glob') +const path = require('path') + +const globify = (pattern) => pattern.split(path.win32.sep).join(path.posix.sep) +module.exports = (path, options) => glob(globify(path), options) diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js new file mode 100644 index 00000000000000..445599b5038088 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/hash-to-segments.js @@ -0,0 +1,7 @@ +'use strict' + +module.exports = hashToSegments + +function hashToSegments (hash) { + return [hash.slice(0, 2), hash.slice(2, 4), hash.slice(4)] +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js new file mode 100644 index 00000000000000..0bf5302136ebeb --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/util/tmp.js @@ -0,0 +1,26 @@ +'use strict' + +const { withTempDir } = require('@npmcli/fs') +const fs = require('fs/promises') +const path = require('path') + +module.exports.mkdir = mktmpdir + +async function mktmpdir (cache, opts = {}) { + const { tmpPrefix } = opts + const tmpDir = path.join(cache, 'tmp') + await fs.mkdir(tmpDir, { recursive: true, owner: 'inherit' }) + // do not use path.join(), it drops the trailing / if tmpPrefix is unset + const target = `${tmpDir}${path.sep}${tmpPrefix || ''}` + return fs.mkdtemp(target, { owner: 'inherit' }) +} + +module.exports.withTmp = withTmp + +function withTmp (cache, opts, cb) { + if (!cb) { + cb = opts + opts = {} + } + return withTempDir(path.join(cache, 'tmp'), cb, opts) +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/verify.js b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/verify.js new file mode 100644 index 00000000000000..d7423da1295b68 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/lib/verify.js @@ -0,0 +1,257 @@ +'use strict' + +const { + mkdir, + readFile, + rm, + stat, + truncate, + writeFile, +} = require('fs/promises') +const pMap = require('p-map') +const contentPath = require('./content/path') +const fsm = require('fs-minipass') +const glob = require('./util/glob.js') +const index = require('./entry-index') +const path = require('path') +const ssri = require('ssri') + +const hasOwnProperty = (obj, key) => + Object.prototype.hasOwnProperty.call(obj, key) + +const verifyOpts = (opts) => ({ + concurrency: 20, + log: { silly () {} }, + ...opts, +}) + +module.exports = verify + +async function verify (cache, opts) { + opts = verifyOpts(opts) + opts.log.silly('verify', 'verifying cache at', cache) + + const steps = [ + markStartTime, + fixPerms, + garbageCollect, + rebuildIndex, + cleanTmp, + writeVerifile, + markEndTime, + ] + + const stats = {} + for (const step of steps) { + const label = step.name + const start = new Date() + const s = await step(cache, opts) + if (s) { + Object.keys(s).forEach((k) => { + stats[k] = s[k] + }) + } + const end = new Date() + if (!stats.runTime) { + stats.runTime = {} + } + stats.runTime[label] = end - start + } + stats.runTime.total = stats.endTime - stats.startTime + opts.log.silly( + 'verify', + 'verification finished for', + cache, + 'in', + `${stats.runTime.total}ms` + ) + return stats +} + +async function markStartTime () { + return { startTime: new Date() } +} + +async function markEndTime () { + return { endTime: new Date() } +} + +async function fixPerms (cache, opts) { + opts.log.silly('verify', 'fixing cache permissions') + await mkdir(cache, { recursive: true }) + return null +} + +// Implements a naive mark-and-sweep tracing garbage collector. +// +// The algorithm is basically as follows: +// 1. Read (and filter) all index entries ("pointers") +// 2. Mark each integrity value as "live" +// 3. Read entire filesystem tree in `content-vX/` dir +// 4. If content is live, verify its checksum and delete it if it fails +// 5. If content is not marked as live, rm it. +// +async function garbageCollect (cache, opts) { + opts.log.silly('verify', 'garbage collecting content') + const indexStream = index.lsStream(cache) + const liveContent = new Set() + indexStream.on('data', (entry) => { + if (opts.filter && !opts.filter(entry)) { + return + } + + // integrity is stringified, re-parse it so we can get each hash + const integrity = ssri.parse(entry.integrity) + for (const algo in integrity) { + liveContent.add(integrity[algo].toString()) + } + }) + await new Promise((resolve, reject) => { + indexStream.on('end', resolve).on('error', reject) + }) + const contentDir = contentPath.contentDir(cache) + const files = await glob(path.join(contentDir, '**'), { + follow: false, + nodir: true, + nosort: true, + }) + const stats = { + verifiedContent: 0, + reclaimedCount: 0, + reclaimedSize: 0, + badContentCount: 0, + keptSize: 0, + } + await pMap( + files, + async (f) => { + const split = f.split(/[/\\]/) + const digest = split.slice(split.length - 3).join('') + const algo = split[split.length - 4] + const integrity = ssri.fromHex(digest, algo) + if (liveContent.has(integrity.toString())) { + const info = await verifyContent(f, integrity) + if (!info.valid) { + stats.reclaimedCount++ + stats.badContentCount++ + stats.reclaimedSize += info.size + } else { + stats.verifiedContent++ + stats.keptSize += info.size + } + } else { + // No entries refer to this content. We can delete. + stats.reclaimedCount++ + const s = await stat(f) + await rm(f, { recursive: true, force: true }) + stats.reclaimedSize += s.size + } + return stats + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function verifyContent (filepath, sri) { + const contentInfo = {} + try { + const { size } = await stat(filepath) + contentInfo.size = size + contentInfo.valid = true + await ssri.checkStream(new fsm.ReadStream(filepath), sri) + } catch (err) { + if (err.code === 'ENOENT') { + return { size: 0, valid: false } + } + if (err.code !== 'EINTEGRITY') { + throw err + } + + await rm(filepath, { recursive: true, force: true }) + contentInfo.valid = false + } + return contentInfo +} + +async function rebuildIndex (cache, opts) { + opts.log.silly('verify', 'rebuilding index') + const entries = await index.ls(cache) + const stats = { + missingContent: 0, + rejectedEntries: 0, + totalEntries: 0, + } + const buckets = {} + for (const k in entries) { + /* istanbul ignore else */ + if (hasOwnProperty(entries, k)) { + const hashed = index.hashKey(k) + const entry = entries[k] + const excluded = opts.filter && !opts.filter(entry) + excluded && stats.rejectedEntries++ + if (buckets[hashed] && !excluded) { + buckets[hashed].push(entry) + } else if (buckets[hashed] && excluded) { + // skip + } else if (excluded) { + buckets[hashed] = [] + buckets[hashed]._path = index.bucketPath(cache, k) + } else { + buckets[hashed] = [entry] + buckets[hashed]._path = index.bucketPath(cache, k) + } + } + } + await pMap( + Object.keys(buckets), + (key) => { + return rebuildBucket(cache, buckets[key], stats, opts) + }, + { concurrency: opts.concurrency } + ) + return stats +} + +async function rebuildBucket (cache, bucket, stats) { + await truncate(bucket._path) + // This needs to be serialized because cacache explicitly + // lets very racy bucket conflicts clobber each other. + for (const entry of bucket) { + const content = contentPath(cache, entry.integrity) + try { + await stat(content) + await index.insert(cache, entry.key, entry.integrity, { + metadata: entry.metadata, + size: entry.size, + time: entry.time, + }) + stats.totalEntries++ + } catch (err) { + if (err.code === 'ENOENT') { + stats.rejectedEntries++ + stats.missingContent++ + } else { + throw err + } + } + } +} + +function cleanTmp (cache, opts) { + opts.log.silly('verify', 'cleaning tmp directory') + return rm(path.join(cache, 'tmp'), { recursive: true, force: true }) +} + +async function writeVerifile (cache, opts) { + const verifile = path.join(cache, '_lastverified') + opts.log.silly('verify', 'writing verifile to ' + verifile) + return writeFile(verifile, `${Date.now()}`) +} + +module.exports.lastRun = lastRun + +async function lastRun (cache) { + const data = await readFile(path.join(cache, '_lastverified'), { encoding: 'utf8' }) + return new Date(+data) +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/cacache/package.json b/deps/npm/node_modules/tuf-js/node_modules/cacache/package.json new file mode 100644 index 00000000000000..6e6219158ed759 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/cacache/package.json @@ -0,0 +1,82 @@ +{ + "name": "cacache", + "version": "18.0.4", + "cache-version": { + "content": "2", + "index": "5" + }, + "description": "Fast, fault-tolerant, cross-platform, disk-based, data-agnostic, content-addressable cache.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "snap": "tap", + "coverage": "tap", + "test-docker": "docker run -it --rm --name pacotest -v \"$PWD\":/tmp -w /tmp node:latest npm test", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "npmclilint": "npmcli-lint", + "lintfix": "npm run lint -- --fix", + "postsnap": "npm run lintfix --", + "postlint": "template-oss-check", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/cacache.git" + }, + "keywords": [ + "cache", + "caching", + "content-addressable", + "sri", + "sri hash", + "subresource integrity", + "cache", + "storage", + "store", + "file store", + "filesystem", + "disk cache", + "disk storage" + ], + "license": "ISC", + "dependencies": { + "@npmcli/fs": "^3.1.0", + "fs-minipass": "^3.0.0", + "glob": "^10.2.2", + "lru-cache": "^10.0.1", + "minipass": "^7.0.3", + "minipass-collect": "^2.0.1", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "p-map": "^4.0.0", + "ssri": "^10.0.0", + "tar": "^6.1.11", + "unique-filename": "^3.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "windowsCI": false, + "version": "4.22.0", + "publish": "true" + }, + "author": "GitHub Inc.", + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE new file mode 100644 index 00000000000000..1808eb2844231c --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/LICENSE @@ -0,0 +1,16 @@ +ISC License + +Copyright 2017-2022 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js new file mode 100644 index 00000000000000..bfcfacbcc95e18 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/entry.js @@ -0,0 +1,471 @@ +const { Request, Response } = require('minipass-fetch') +const { Minipass } = require('minipass') +const MinipassFlush = require('minipass-flush') +const cacache = require('cacache') +const url = require('url') + +const CachingMinipassPipeline = require('../pipeline.js') +const CachePolicy = require('./policy.js') +const cacheKey = require('./key.js') +const remote = require('../remote.js') + +const hasOwnProperty = (obj, prop) => Object.prototype.hasOwnProperty.call(obj, prop) + +// allow list for request headers that will be written to the cache index +// note: we will also store any request headers +// that are named in a response's vary header +const KEEP_REQUEST_HEADERS = [ + 'accept-charset', + 'accept-encoding', + 'accept-language', + 'accept', + 'cache-control', +] + +// allow list for response headers that will be written to the cache index +// note: we must not store the real response's age header, or when we load +// a cache policy based on the metadata it will think the cached response +// is always stale +const KEEP_RESPONSE_HEADERS = [ + 'cache-control', + 'content-encoding', + 'content-language', + 'content-type', + 'date', + 'etag', + 'expires', + 'last-modified', + 'link', + 'location', + 'pragma', + 'vary', +] + +// return an object containing all metadata to be written to the index +const getMetadata = (request, response, options) => { + const metadata = { + time: Date.now(), + url: request.url, + reqHeaders: {}, + resHeaders: {}, + + // options on which we must match the request and vary the response + options: { + compress: options.compress != null ? options.compress : request.compress, + }, + } + + // only save the status if it's not a 200 or 304 + if (response.status !== 200 && response.status !== 304) { + metadata.status = response.status + } + + for (const name of KEEP_REQUEST_HEADERS) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + + // if the request's host header differs from the host in the url + // we need to keep it, otherwise it's just noise and we ignore it + const host = request.headers.get('host') + const parsedUrl = new url.URL(request.url) + if (host && parsedUrl.host !== host) { + metadata.reqHeaders.host = host + } + + // if the response has a vary header, make sure + // we store the relevant request headers too + if (response.headers.has('vary')) { + const vary = response.headers.get('vary') + // a vary of "*" means every header causes a different response. + // in that scenario, we do not include any additional headers + // as the freshness check will always fail anyway and we don't + // want to bloat the cache indexes + if (vary !== '*') { + // copy any other request headers that will vary the response + const varyHeaders = vary.trim().toLowerCase().split(/\s*,\s*/) + for (const name of varyHeaders) { + if (request.headers.has(name)) { + metadata.reqHeaders[name] = request.headers.get(name) + } + } + } + } + + for (const name of KEEP_RESPONSE_HEADERS) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + for (const name of options.cacheAdditionalHeaders) { + if (response.headers.has(name)) { + metadata.resHeaders[name] = response.headers.get(name) + } + } + + return metadata +} + +// symbols used to hide objects that may be lazily evaluated in a getter +const _request = Symbol('request') +const _response = Symbol('response') +const _policy = Symbol('policy') + +class CacheEntry { + constructor ({ entry, request, response, options }) { + if (entry) { + this.key = entry.key + this.entry = entry + // previous versions of this module didn't write an explicit timestamp in + // the metadata, so fall back to the entry's timestamp. we can't use the + // entry timestamp to determine staleness because cacache will update it + // when it verifies its data + this.entry.metadata.time = this.entry.metadata.time || this.entry.time + } else { + this.key = cacheKey(request) + } + + this.options = options + + // these properties are behind getters that lazily evaluate + this[_request] = request + this[_response] = response + this[_policy] = null + } + + // returns a CacheEntry instance that satisfies the given request + // or undefined if no existing entry satisfies + static async find (request, options) { + try { + // compacts the index and returns an array of unique entries + var matches = await cacache.index.compact(options.cachePath, cacheKey(request), (A, B) => { + const entryA = new CacheEntry({ entry: A, options }) + const entryB = new CacheEntry({ entry: B, options }) + return entryA.policy.satisfies(entryB.request) + }, { + validateEntry: (entry) => { + // clean out entries with a buggy content-encoding value + if (entry.metadata && + entry.metadata.resHeaders && + entry.metadata.resHeaders['content-encoding'] === null) { + return false + } + + // if an integrity is null, it needs to have a status specified + if (entry.integrity === null) { + return !!(entry.metadata && entry.metadata.status) + } + + return true + }, + }) + } catch (err) { + // if the compact request fails, ignore the error and return + return + } + + // a cache mode of 'reload' means to behave as though we have no cache + // on the way to the network. return undefined to allow cacheFetch to + // create a brand new request no matter what. + if (options.cache === 'reload') { + return + } + + // find the specific entry that satisfies the request + let match + for (const entry of matches) { + const _entry = new CacheEntry({ + entry, + options, + }) + + if (_entry.policy.satisfies(request)) { + match = _entry + break + } + } + + return match + } + + // if the user made a PUT/POST/PATCH then we invalidate our + // cache for the same url by deleting the index entirely + static async invalidate (request, options) { + const key = cacheKey(request) + try { + await cacache.rm.entry(options.cachePath, key, { removeFully: true }) + } catch (err) { + // ignore errors + } + } + + get request () { + if (!this[_request]) { + this[_request] = new Request(this.entry.metadata.url, { + method: 'GET', + headers: this.entry.metadata.reqHeaders, + ...this.entry.metadata.options, + }) + } + + return this[_request] + } + + get response () { + if (!this[_response]) { + this[_response] = new Response(null, { + url: this.entry.metadata.url, + counter: this.options.counter, + status: this.entry.metadata.status || 200, + headers: { + ...this.entry.metadata.resHeaders, + 'content-length': this.entry.size, + }, + }) + } + + return this[_response] + } + + get policy () { + if (!this[_policy]) { + this[_policy] = new CachePolicy({ + entry: this.entry, + request: this.request, + response: this.response, + options: this.options, + }) + } + + return this[_policy] + } + + // wraps the response in a pipeline that stores the data + // in the cache while the user consumes it + async store (status) { + // if we got a status other than 200, 301, or 308, + // or the CachePolicy forbid storage, append the + // cache status header and return it untouched + if ( + this.request.method !== 'GET' || + ![200, 301, 308].includes(this.response.status) || + !this.policy.storable() + ) { + this.response.headers.set('x-local-cache-status', 'skip') + return this.response + } + + const size = this.response.headers.get('content-length') + const cacheOpts = { + algorithms: this.options.algorithms, + metadata: getMetadata(this.request, this.response, this.options), + size, + integrity: this.options.integrity, + integrityEmitter: this.response.body.hasIntegrityEmitter && this.response.body, + } + + let body = null + // we only set a body if the status is a 200, redirects are + // stored as metadata only + if (this.response.status === 200) { + let cacheWriteResolve, cacheWriteReject + const cacheWritePromise = new Promise((resolve, reject) => { + cacheWriteResolve = resolve + cacheWriteReject = reject + }).catch((err) => { + body.emit('error', err) + }) + + body = new CachingMinipassPipeline({ events: ['integrity', 'size'] }, new MinipassFlush({ + flush () { + return cacheWritePromise + }, + })) + // this is always true since if we aren't reusing the one from the remote fetch, we + // are using the one from cacache + body.hasIntegrityEmitter = true + + const onResume = () => { + const tee = new Minipass() + const cacheStream = cacache.put.stream(this.options.cachePath, this.key, cacheOpts) + // re-emit the integrity and size events on our new response body so they can be reused + cacheStream.on('integrity', i => body.emit('integrity', i)) + cacheStream.on('size', s => body.emit('size', s)) + // stick a flag on here so downstream users will know if they can expect integrity events + tee.pipe(cacheStream) + // TODO if the cache write fails, log a warning but return the response anyway + // eslint-disable-next-line promise/catch-or-return + cacheStream.promise().then(cacheWriteResolve, cacheWriteReject) + body.unshift(tee) + body.unshift(this.response.body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + } else { + await cacache.index.insert(this.options.cachePath, this.key, null, cacheOpts) + } + + // note: we do not set the x-local-cache-hash header because we do not know + // the hash value until after the write to the cache completes, which doesn't + // happen until after the response has been sent and it's too late to write + // the header anyway + this.response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + this.response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + this.response.headers.set('x-local-cache-mode', 'stream') + this.response.headers.set('x-local-cache-status', status) + this.response.headers.set('x-local-cache-time', new Date().toISOString()) + const newResponse = new Response(body, { + url: this.response.url, + status: this.response.status, + headers: this.response.headers, + counter: this.options.counter, + }) + return newResponse + } + + // use the cached data to create a response and return it + async respond (method, options, status) { + let response + if (method === 'HEAD' || [301, 308].includes(this.response.status)) { + // if the request is a HEAD, or the response is a redirect, + // then the metadata in the entry already includes everything + // we need to build a response + response = this.response + } else { + // we're responding with a full cached response, so create a body + // that reads from cacache and attach it to a new Response + const body = new Minipass() + const headers = { ...this.policy.responseHeaders() } + + const onResume = () => { + const cacheStream = cacache.get.stream.byDigest( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + cacheStream.on('error', async (err) => { + cacheStream.pause() + if (err.code === 'EINTEGRITY') { + await cacache.rm.content( + this.options.cachePath, this.entry.integrity, { memoize: this.options.memoize } + ) + } + if (err.code === 'ENOENT' || err.code === 'EINTEGRITY') { + await CacheEntry.invalidate(this.request, this.options) + } + body.emit('error', err) + cacheStream.resume() + }) + // emit the integrity and size events based on our metadata so we're consistent + body.emit('integrity', this.entry.integrity) + body.emit('size', Number(headers['content-length'])) + cacheStream.pipe(body) + } + + body.once('resume', onResume) + body.once('end', () => body.removeListener('resume', onResume)) + response = new Response(body, { + url: this.entry.metadata.url, + counter: options.counter, + status: 200, + headers, + }) + } + + response.headers.set('x-local-cache', encodeURIComponent(this.options.cachePath)) + response.headers.set('x-local-cache-hash', encodeURIComponent(this.entry.integrity)) + response.headers.set('x-local-cache-key', encodeURIComponent(this.key)) + response.headers.set('x-local-cache-mode', 'stream') + response.headers.set('x-local-cache-status', status) + response.headers.set('x-local-cache-time', new Date(this.entry.metadata.time).toUTCString()) + return response + } + + // use the provided request along with this cache entry to + // revalidate the stored response. returns a response, either + // from the cache or from the update + async revalidate (request, options) { + const revalidateRequest = new Request(request, { + headers: this.policy.revalidationHeaders(request), + }) + + try { + // NOTE: be sure to remove the headers property from the + // user supplied options, since we have already defined + // them on the new request object. if they're still in the + // options then those will overwrite the ones from the policy + var response = await remote(revalidateRequest, { + ...options, + headers: undefined, + }) + } catch (err) { + // if the network fetch fails, return the stale + // cached response unless it has a cache-control + // of 'must-revalidate' + if (!this.policy.mustRevalidate) { + return this.respond(request.method, options, 'stale') + } + + throw err + } + + if (this.policy.revalidated(revalidateRequest, response)) { + // we got a 304, write a new index to the cache and respond from cache + const metadata = getMetadata(request, response, options) + // 304 responses do not include headers that are specific to the response data + // since they do not include a body, so we copy values for headers that were + // in the old cache entry to the new one, if the new metadata does not already + // include that header + for (const name of KEEP_RESPONSE_HEADERS) { + if ( + !hasOwnProperty(metadata.resHeaders, name) && + hasOwnProperty(this.entry.metadata.resHeaders, name) + ) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + } + + for (const name of options.cacheAdditionalHeaders) { + const inMeta = hasOwnProperty(metadata.resHeaders, name) + const inEntry = hasOwnProperty(this.entry.metadata.resHeaders, name) + const inPolicy = hasOwnProperty(this.policy.response.headers, name) + + // if the header is in the existing entry, but it is not in the metadata + // then we need to write it to the metadata as this will refresh the on-disk cache + if (!inMeta && inEntry) { + metadata.resHeaders[name] = this.entry.metadata.resHeaders[name] + } + // if the header is in the metadata, but not in the policy, then we need to set + // it in the policy so that it's included in the immediate response. future + // responses will load a new cache entry, so we don't need to change that + if (!inPolicy && inMeta) { + this.policy.response.headers[name] = metadata.resHeaders[name] + } + } + + try { + await cacache.index.insert(options.cachePath, this.key, this.entry.integrity, { + size: this.entry.size, + metadata, + }) + } catch (err) { + // if updating the cache index fails, we ignore it and + // respond anyway + } + return this.respond(request.method, options, 'revalidated') + } + + // if we got a modified response, create a new entry based on it + const newEntry = new CacheEntry({ + request, + response, + options, + }) + + // respond with the new entry while writing it to the cache + return newEntry.store('updated') + } +} + +module.exports = CacheEntry diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js new file mode 100644 index 00000000000000..67a66573bebe66 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/errors.js @@ -0,0 +1,11 @@ +class NotCachedError extends Error { + constructor (url) { + /* eslint-disable-next-line max-len */ + super(`request to ${url} failed: cache mode is 'only-if-cached' but no cached response is available.`) + this.code = 'ENOTCACHED' + } +} + +module.exports = { + NotCachedError, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js new file mode 100644 index 00000000000000..0de49d23fb9336 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/index.js @@ -0,0 +1,49 @@ +const { NotCachedError } = require('./errors.js') +const CacheEntry = require('./entry.js') +const remote = require('../remote.js') + +// do whatever is necessary to get a Response and return it +const cacheFetch = async (request, options) => { + // try to find a cached entry that satisfies this request + const entry = await CacheEntry.find(request, options) + if (!entry) { + // no cached result, if the cache mode is 'only-if-cached' that's a failure + if (options.cache === 'only-if-cached') { + throw new NotCachedError(request.url) + } + + // otherwise, we make a request, store it and return it + const response = await remote(request, options) + const newEntry = new CacheEntry({ request, response, options }) + return newEntry.store('miss') + } + + // we have a cached response that satisfies this request, however if the cache + // mode is 'no-cache' then we send the revalidation request no matter what + if (options.cache === 'no-cache') { + return entry.revalidate(request, options) + } + + // if the cached entry is not stale, or if the cache mode is 'force-cache' or + // 'only-if-cached' we can respond with the cached entry. set the status + // based on the result of needsRevalidation and respond + const _needsRevalidation = entry.policy.needsRevalidation(request) + if (options.cache === 'force-cache' || + options.cache === 'only-if-cached' || + !_needsRevalidation) { + return entry.respond(request.method, options, _needsRevalidation ? 'stale' : 'hit') + } + + // if we got here, the cache entry is stale so revalidate it + return entry.revalidate(request, options) +} + +cacheFetch.invalidate = async (request, options) => { + if (!options.cachePath) { + return + } + + return CacheEntry.invalidate(request, options) +} + +module.exports = cacheFetch diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js new file mode 100644 index 00000000000000..f7684d562b7fae --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/key.js @@ -0,0 +1,17 @@ +const { URL, format } = require('url') + +// options passed to url.format() when generating a key +const formatOptions = { + auth: false, + fragment: false, + search: true, + unicode: false, +} + +// returns a string to be used as the cache key for the Request +const cacheKey = (request) => { + const parsed = new URL(request.url) + return `make-fetch-happen:request-cache:${format(parsed, formatOptions)}` +} + +module.exports = cacheKey diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js new file mode 100644 index 00000000000000..ada3c8600dae92 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/cache/policy.js @@ -0,0 +1,161 @@ +const CacheSemantics = require('http-cache-semantics') +const Negotiator = require('negotiator') +const ssri = require('ssri') + +// options passed to http-cache-semantics constructor +const policyOptions = { + shared: false, + ignoreCargoCult: true, +} + +// a fake empty response, used when only testing the +// request for storability +const emptyResponse = { status: 200, headers: {} } + +// returns a plain object representation of the Request +const requestObject = (request) => { + const _obj = { + method: request.method, + url: request.url, + headers: {}, + compress: request.compress, + } + + request.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +// returns a plain object representation of the Response +const responseObject = (response) => { + const _obj = { + status: response.status, + headers: {}, + } + + response.headers.forEach((value, key) => { + _obj.headers[key] = value + }) + + return _obj +} + +class CachePolicy { + constructor ({ entry, request, response, options }) { + this.entry = entry + this.request = requestObject(request) + this.response = responseObject(response) + this.options = options + this.policy = new CacheSemantics(this.request, this.response, policyOptions) + + if (this.entry) { + // if we have an entry, copy the timestamp to the _responseTime + // this is necessary because the CacheSemantics constructor forces + // the value to Date.now() which means a policy created from a + // cache entry is likely to always identify itself as stale + this.policy._responseTime = this.entry.metadata.time + } + } + + // static method to quickly determine if a request alone is storable + static storable (request, options) { + // no cachePath means no caching + if (!options.cachePath) { + return false + } + + // user explicitly asked not to cache + if (options.cache === 'no-store') { + return false + } + + // we only cache GET and HEAD requests + if (!['GET', 'HEAD'].includes(request.method)) { + return false + } + + // otherwise, let http-cache-semantics make the decision + // based on the request's headers + const policy = new CacheSemantics(requestObject(request), emptyResponse, policyOptions) + return policy.storable() + } + + // returns true if the policy satisfies the request + satisfies (request) { + const _req = requestObject(request) + if (this.request.headers.host !== _req.headers.host) { + return false + } + + if (this.request.compress !== _req.compress) { + return false + } + + const negotiatorA = new Negotiator(this.request) + const negotiatorB = new Negotiator(_req) + + if (JSON.stringify(negotiatorA.mediaTypes()) !== JSON.stringify(negotiatorB.mediaTypes())) { + return false + } + + if (JSON.stringify(negotiatorA.languages()) !== JSON.stringify(negotiatorB.languages())) { + return false + } + + if (JSON.stringify(negotiatorA.encodings()) !== JSON.stringify(negotiatorB.encodings())) { + return false + } + + if (this.options.integrity) { + return ssri.parse(this.options.integrity).match(this.entry.integrity) + } + + return true + } + + // returns true if the request and response allow caching + storable () { + return this.policy.storable() + } + + // NOTE: this is a hack to avoid parsing the cache-control + // header ourselves, it returns true if the response's + // cache-control contains must-revalidate + get mustRevalidate () { + return !!this.policy._rescc['must-revalidate'] + } + + // returns true if the cached response requires revalidation + // for the given request + needsRevalidation (request) { + const _req = requestObject(request) + // force method to GET because we only cache GETs + // but can serve a HEAD from a cached GET + _req.method = 'GET' + return !this.policy.satisfiesWithoutRevalidation(_req) + } + + responseHeaders () { + return this.policy.responseHeaders() + } + + // returns a new object containing the appropriate headers + // to send a revalidation request + revalidationHeaders (request) { + const _req = requestObject(request) + return this.policy.revalidationHeaders(_req) + } + + // returns true if the request/response was revalidated + // successfully. returns false if a new response was received + revalidated (request, response) { + const _req = requestObject(request) + const _res = responseObject(response) + const policy = this.policy.revalidatedPolicy(_req, _res) + return !policy.modified + } +} + +module.exports = CachePolicy diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js new file mode 100644 index 00000000000000..233ba67e165502 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/fetch.js @@ -0,0 +1,118 @@ +'use strict' + +const { FetchError, Request, isRedirect } = require('minipass-fetch') +const url = require('url') + +const CachePolicy = require('./cache/policy.js') +const cache = require('./cache/index.js') +const remote = require('./remote.js') + +// given a Request, a Response and user options +// return true if the response is a redirect that +// can be followed. we throw errors that will result +// in the fetch being rejected if the redirect is +// possible but invalid for some reason +const canFollowRedirect = (request, response, options) => { + if (!isRedirect(response.status)) { + return false + } + + if (options.redirect === 'manual') { + return false + } + + if (options.redirect === 'error') { + throw new FetchError(`redirect mode is set to error: ${request.url}`, + 'no-redirect', { code: 'ENOREDIRECT' }) + } + + if (!response.headers.has('location')) { + throw new FetchError(`redirect location header missing for: ${request.url}`, + 'no-location', { code: 'EINVALIDREDIRECT' }) + } + + if (request.counter >= request.follow) { + throw new FetchError(`maximum redirect reached at: ${request.url}`, + 'max-redirect', { code: 'EMAXREDIRECT' }) + } + + return true +} + +// given a Request, a Response, and the user's options return an object +// with a new Request and a new options object that will be used for +// following the redirect +const getRedirect = (request, response, options) => { + const _opts = { ...options } + const location = response.headers.get('location') + const redirectUrl = new url.URL(location, /^https?:/.test(location) ? undefined : request.url) + // Comment below is used under the following license: + /** + * @license + * Copyright (c) 2010-2012 Mikeal Rogers + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * http://www.apache.org/licenses/LICENSE-2.0 + * Unless required by applicable law or agreed to in writing, + * software distributed under the License is distributed on an "AS + * IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either + * express or implied. See the License for the specific language + * governing permissions and limitations under the License. + */ + + // Remove authorization if changing hostnames (but not if just + // changing ports or protocols). This matches the behavior of request: + // https://github.com/request/request/blob/b12a6245/lib/redirect.js#L134-L138 + if (new url.URL(request.url).hostname !== redirectUrl.hostname) { + request.headers.delete('authorization') + request.headers.delete('cookie') + } + + // for POST request with 301/302 response, or any request with 303 response, + // use GET when following redirect + if ( + response.status === 303 || + (request.method === 'POST' && [301, 302].includes(response.status)) + ) { + _opts.method = 'GET' + _opts.body = null + request.headers.delete('content-length') + } + + _opts.headers = {} + request.headers.forEach((value, key) => { + _opts.headers[key] = value + }) + + _opts.counter = ++request.counter + const redirectReq = new Request(url.format(redirectUrl), _opts) + return { + request: redirectReq, + options: _opts, + } +} + +const fetch = async (request, options) => { + const response = CachePolicy.storable(request, options) + ? await cache(request, options) + : await remote(request, options) + + // if the request wasn't a GET or HEAD, and the response + // status is between 200 and 399 inclusive, invalidate the + // request url + if (!['GET', 'HEAD'].includes(request.method) && + response.status >= 200 && + response.status <= 399) { + await cache.invalidate(request, options) + } + + if (!canFollowRedirect(request, response, options)) { + return response + } + + const redirect = getRedirect(request, response, options) + return fetch(redirect.request, redirect.options) +} + +module.exports = fetch diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js new file mode 100644 index 00000000000000..2f12e8e1b61131 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/index.js @@ -0,0 +1,41 @@ +const { FetchError, Headers, Request, Response } = require('minipass-fetch') + +const configureOptions = require('./options.js') +const fetch = require('./fetch.js') + +const makeFetchHappen = (url, opts) => { + const options = configureOptions(opts) + + const request = new Request(url, options) + return fetch(request, options) +} + +makeFetchHappen.defaults = (defaultUrl, defaultOptions = {}, wrappedFetch = makeFetchHappen) => { + if (typeof defaultUrl === 'object') { + defaultOptions = defaultUrl + defaultUrl = null + } + + const defaultedFetch = (url, options = {}) => { + const finalUrl = url || defaultUrl + const finalOptions = { + ...defaultOptions, + ...options, + headers: { + ...defaultOptions.headers, + ...options.headers, + }, + } + return wrappedFetch(finalUrl, finalOptions) + } + + defaultedFetch.defaults = (defaultUrl1, defaultOptions1 = {}) => + makeFetchHappen.defaults(defaultUrl1, defaultOptions1, defaultedFetch) + return defaultedFetch +} + +module.exports = makeFetchHappen +module.exports.FetchError = FetchError +module.exports.Headers = Headers +module.exports.Request = Request +module.exports.Response = Response diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js new file mode 100644 index 00000000000000..f77511279f831d --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/options.js @@ -0,0 +1,54 @@ +const dns = require('dns') + +const conditionalHeaders = [ + 'if-modified-since', + 'if-none-match', + 'if-unmodified-since', + 'if-match', + 'if-range', +] + +const configureOptions = (opts) => { + const { strictSSL, ...options } = { ...opts } + options.method = options.method ? options.method.toUpperCase() : 'GET' + options.rejectUnauthorized = strictSSL !== false + + if (!options.retry) { + options.retry = { retries: 0 } + } else if (typeof options.retry === 'string') { + const retries = parseInt(options.retry, 10) + if (isFinite(retries)) { + options.retry = { retries } + } else { + options.retry = { retries: 0 } + } + } else if (typeof options.retry === 'number') { + options.retry = { retries: options.retry } + } else { + options.retry = { retries: 0, ...options.retry } + } + + options.dns = { ttl: 5 * 60 * 1000, lookup: dns.lookup, ...options.dns } + + options.cache = options.cache || 'default' + if (options.cache === 'default') { + const hasConditionalHeader = Object.keys(options.headers || {}).some((name) => { + return conditionalHeaders.includes(name.toLowerCase()) + }) + if (hasConditionalHeader) { + options.cache = 'no-store' + } + } + + options.cacheAdditionalHeaders = options.cacheAdditionalHeaders || [] + + // cacheManager is deprecated, but if it's set and + // cachePath is not we should copy it to the new field + if (options.cacheManager && !options.cachePath) { + options.cachePath = options.cacheManager + } + + return options +} + +module.exports = configureOptions diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js new file mode 100644 index 00000000000000..b1d221b2d0ce31 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/pipeline.js @@ -0,0 +1,41 @@ +'use strict' + +const MinipassPipeline = require('minipass-pipeline') + +class CachingMinipassPipeline extends MinipassPipeline { + #events = [] + #data = new Map() + + constructor (opts, ...streams) { + // CRITICAL: do NOT pass the streams to the call to super(), this will start + // the flow of data and potentially cause the events we need to catch to emit + // before we've finished our own setup. instead we call super() with no args, + // finish our setup, and then push the streams into ourselves to start the + // data flow + super() + this.#events = opts.events + + /* istanbul ignore next - coverage disabled because this is pointless to test here */ + if (streams.length) { + this.push(...streams) + } + } + + on (event, handler) { + if (this.#events.includes(event) && this.#data.has(event)) { + return handler(...this.#data.get(event)) + } + + return super.on(event, handler) + } + + emit (event, ...data) { + if (this.#events.includes(event)) { + this.#data.set(event, data) + } + + return super.emit(event, ...data) + } +} + +module.exports = CachingMinipassPipeline diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js new file mode 100644 index 00000000000000..8554564074de6e --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/lib/remote.js @@ -0,0 +1,131 @@ +const { Minipass } = require('minipass') +const fetch = require('minipass-fetch') +const promiseRetry = require('promise-retry') +const ssri = require('ssri') +const { log } = require('proc-log') + +const CachingMinipassPipeline = require('./pipeline.js') +const { getAgent } = require('@npmcli/agent') +const pkg = require('../package.json') + +const USER_AGENT = `${pkg.name}/${pkg.version} (+https://npm.im/${pkg.name})` + +const RETRY_ERRORS = [ + 'ECONNRESET', // remote socket closed on us + 'ECONNREFUSED', // remote host refused to open connection + 'EADDRINUSE', // failed to bind to a local port (proxy?) + 'ETIMEDOUT', // someone in the transaction is WAY TOO SLOW + // from @npmcli/agent + 'ECONNECTIONTIMEOUT', + 'EIDLETIMEOUT', + 'ERESPONSETIMEOUT', + 'ETRANSFERTIMEOUT', + // Known codes we do NOT retry on: + // ENOTFOUND (getaddrinfo failure. Either bad hostname, or offline) + // EINVALIDPROXY // invalid protocol from @npmcli/agent + // EINVALIDRESPONSE // invalid status code from @npmcli/agent +] + +const RETRY_TYPES = [ + 'request-timeout', +] + +// make a request directly to the remote source, +// retrying certain classes of errors as well as +// following redirects (through the cache if necessary) +// and verifying response integrity +const remoteFetch = (request, options) => { + const agent = getAgent(request.url, options) + if (!request.headers.has('connection')) { + request.headers.set('connection', agent ? 'keep-alive' : 'close') + } + + if (!request.headers.has('user-agent')) { + request.headers.set('user-agent', USER_AGENT) + } + + // keep our own options since we're overriding the agent + // and the redirect mode + const _opts = { + ...options, + agent, + redirect: 'manual', + } + + return promiseRetry(async (retryHandler, attemptNum) => { + const req = new fetch.Request(request, _opts) + try { + let res = await fetch(req, _opts) + if (_opts.integrity && res.status === 200) { + // we got a 200 response and the user has specified an expected + // integrity value, so wrap the response in an ssri stream to verify it + const integrityStream = ssri.integrityStream({ + algorithms: _opts.algorithms, + integrity: _opts.integrity, + size: _opts.size, + }) + const pipeline = new CachingMinipassPipeline({ + events: ['integrity', 'size'], + }, res.body, integrityStream) + // we also propagate the integrity and size events out to the pipeline so we can use + // this new response body as an integrityEmitter for cacache + integrityStream.on('integrity', i => pipeline.emit('integrity', i)) + integrityStream.on('size', s => pipeline.emit('size', s)) + res = new fetch.Response(pipeline, res) + // set an explicit flag so we know if our response body will emit integrity and size + res.body.hasIntegrityEmitter = true + } + + res.headers.set('x-fetch-attempts', attemptNum) + + // do not retry POST requests, or requests with a streaming body + // do retry requests with a 408, 420, 429 or 500+ status in the response + const isStream = Minipass.isStream(req.body) + const isRetriable = req.method !== 'POST' && + !isStream && + ([408, 420, 429].includes(res.status) || res.status >= 500) + + if (isRetriable) { + if (typeof options.onRetry === 'function') { + options.onRetry(res) + } + + /* eslint-disable-next-line max-len */ + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${res.status}`) + return retryHandler(res) + } + + return res + } catch (err) { + const code = (err.code === 'EPROMISERETRY') + ? err.retried.code + : err.code + + // err.retried will be the thing that was thrown from above + // if it's a response, we just got a bad status code and we + // can re-throw to allow the retry + const isRetryError = err.retried instanceof fetch.Response || + (RETRY_ERRORS.includes(code) && RETRY_TYPES.includes(err.type)) + + if (req.method === 'POST' || isRetryError) { + throw err + } + + if (typeof options.onRetry === 'function') { + options.onRetry(err) + } + + log.http('fetch', `${req.method} ${req.url} attempt ${attemptNum} failed with ${err.code}`) + return retryHandler(err) + } + }, options.retry).catch((err) => { + // don't reject for http errors, just return them + if (err.status >= 400 && err.type !== 'system') { + return err + } + + throw err + }) +} + +module.exports = remoteFetch diff --git a/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/package.json b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/package.json new file mode 100644 index 00000000000000..7adb4d1e7f9719 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/make-fetch-happen/package.json @@ -0,0 +1,75 @@ +{ + "name": "make-fetch-happen", + "version": "13.0.1", + "description": "Opinionated, caching, retrying fetch client", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "test": "tap", + "posttest": "npm run lint", + "eslint": "eslint", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lintfix": "npm run lint -- --fix", + "postlint": "template-oss-check", + "snap": "tap", + "template-oss-apply": "template-oss-apply --force" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/make-fetch-happen.git" + }, + "keywords": [ + "http", + "request", + "fetch", + "mean girls", + "caching", + "cache", + "subresource integrity" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "@npmcli/agent": "^2.0.0", + "cacache": "^18.0.0", + "http-cache-semantics": "^4.1.1", + "is-lambda": "^1.0.1", + "minipass": "^7.0.2", + "minipass-fetch": "^3.0.0", + "minipass-flush": "^1.0.5", + "minipass-pipeline": "^1.2.4", + "negotiator": "^0.6.3", + "proc-log": "^4.2.0", + "promise-retry": "^2.0.1", + "ssri": "^10.0.0" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.4", + "nock": "^13.2.4", + "safe-buffer": "^5.2.1", + "standard-version": "^9.3.2", + "tap": "^16.0.0" + }, + "engines": { + "node": "^16.14.0 || >=18.0.0" + }, + "tap": { + "color": 1, + "files": "test/*.js", + "check-coverage": true, + "timeout": 60, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.4", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE new file mode 100644 index 00000000000000..3c3410cdc12ee3 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/LICENSE @@ -0,0 +1,28 @@ +The MIT License (MIT) + +Copyright (c) Isaac Z. Schlueter and Contributors +Copyright (c) 2016 David Frank + +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: + +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. + +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. + +--- + +Note: This is a derivative work based on "node-fetch" by David Frank, +modified and distributed under the terms of the MIT license above. +https://github.com/bitinn/node-fetch diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js new file mode 100644 index 00000000000000..b18f643269e375 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/abort-error.js @@ -0,0 +1,17 @@ +'use strict' +class AbortError extends Error { + constructor (message) { + super(message) + this.code = 'FETCH_ABORTED' + this.type = 'aborted' + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'AbortError' + } + + // don't allow name to be overridden, but don't throw either + set name (s) {} +} +module.exports = AbortError diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js new file mode 100644 index 00000000000000..121b1730102e72 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/blob.js @@ -0,0 +1,97 @@ +'use strict' +const { Minipass } = require('minipass') +const TYPE = Symbol('type') +const BUFFER = Symbol('buffer') + +class Blob { + constructor (blobParts, options) { + this[TYPE] = '' + + const buffers = [] + let size = 0 + + if (blobParts) { + const a = blobParts + const length = Number(a.length) + for (let i = 0; i < length; i++) { + const element = a[i] + const buffer = element instanceof Buffer ? element + : ArrayBuffer.isView(element) + ? Buffer.from(element.buffer, element.byteOffset, element.byteLength) + : element instanceof ArrayBuffer ? Buffer.from(element) + : element instanceof Blob ? element[BUFFER] + : typeof element === 'string' ? Buffer.from(element) + : Buffer.from(String(element)) + size += buffer.length + buffers.push(buffer) + } + } + + this[BUFFER] = Buffer.concat(buffers, size) + + const type = options && options.type !== undefined + && String(options.type).toLowerCase() + if (type && !/[^\u0020-\u007E]/.test(type)) { + this[TYPE] = type + } + } + + get size () { + return this[BUFFER].length + } + + get type () { + return this[TYPE] + } + + text () { + return Promise.resolve(this[BUFFER].toString()) + } + + arrayBuffer () { + const buf = this[BUFFER] + const off = buf.byteOffset + const len = buf.byteLength + const ab = buf.buffer.slice(off, off + len) + return Promise.resolve(ab) + } + + stream () { + return new Minipass().end(this[BUFFER]) + } + + slice (start, end, type) { + const size = this.size + const relativeStart = start === undefined ? 0 + : start < 0 ? Math.max(size + start, 0) + : Math.min(start, size) + const relativeEnd = end === undefined ? size + : end < 0 ? Math.max(size + end, 0) + : Math.min(end, size) + const span = Math.max(relativeEnd - relativeStart, 0) + + const buffer = this[BUFFER] + const slicedBuffer = buffer.slice( + relativeStart, + relativeStart + span + ) + const blob = new Blob([], { type }) + blob[BUFFER] = slicedBuffer + return blob + } + + get [Symbol.toStringTag] () { + return 'Blob' + } + + static get BUFFER () { + return BUFFER + } +} + +Object.defineProperties(Blob.prototype, { + size: { enumerable: true }, + type: { enumerable: true }, +}) + +module.exports = Blob diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js new file mode 100644 index 00000000000000..62286bd1de0d91 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/body.js @@ -0,0 +1,350 @@ +'use strict' +const { Minipass } = require('minipass') +const MinipassSized = require('minipass-sized') + +const Blob = require('./blob.js') +const { BUFFER } = Blob +const FetchError = require('./fetch-error.js') + +// optional dependency on 'encoding' +let convert +try { + convert = require('encoding').convert +} catch (e) { + // defer error until textConverted is called +} + +const INTERNALS = Symbol('Body internals') +const CONSUME_BODY = Symbol('consumeBody') + +class Body { + constructor (bodyArg, options = {}) { + const { size = 0, timeout = 0 } = options + const body = bodyArg === undefined || bodyArg === null ? null + : isURLSearchParams(bodyArg) ? Buffer.from(bodyArg.toString()) + : isBlob(bodyArg) ? bodyArg + : Buffer.isBuffer(bodyArg) ? bodyArg + : Object.prototype.toString.call(bodyArg) === '[object ArrayBuffer]' + ? Buffer.from(bodyArg) + : ArrayBuffer.isView(bodyArg) + ? Buffer.from(bodyArg.buffer, bodyArg.byteOffset, bodyArg.byteLength) + : Minipass.isStream(bodyArg) ? bodyArg + : Buffer.from(String(bodyArg)) + + this[INTERNALS] = { + body, + disturbed: false, + error: null, + } + + this.size = size + this.timeout = timeout + + if (Minipass.isStream(body)) { + body.on('error', er => { + const error = er.name === 'AbortError' ? er + : new FetchError(`Invalid response while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + this[INTERNALS].error = error + }) + } + } + + get body () { + return this[INTERNALS].body + } + + get bodyUsed () { + return this[INTERNALS].disturbed + } + + arrayBuffer () { + return this[CONSUME_BODY]().then(buf => + buf.buffer.slice(buf.byteOffset, buf.byteOffset + buf.byteLength)) + } + + blob () { + const ct = this.headers && this.headers.get('content-type') || '' + return this[CONSUME_BODY]().then(buf => Object.assign( + new Blob([], { type: ct.toLowerCase() }), + { [BUFFER]: buf } + )) + } + + async json () { + const buf = await this[CONSUME_BODY]() + try { + return JSON.parse(buf.toString()) + } catch (er) { + throw new FetchError( + `invalid json response body at ${this.url} reason: ${er.message}`, + 'invalid-json' + ) + } + } + + text () { + return this[CONSUME_BODY]().then(buf => buf.toString()) + } + + buffer () { + return this[CONSUME_BODY]() + } + + textConverted () { + return this[CONSUME_BODY]().then(buf => convertBody(buf, this.headers)) + } + + [CONSUME_BODY] () { + if (this[INTERNALS].disturbed) { + return Promise.reject(new TypeError(`body used already for: ${ + this.url}`)) + } + + this[INTERNALS].disturbed = true + + if (this[INTERNALS].error) { + return Promise.reject(this[INTERNALS].error) + } + + // body is null + if (this.body === null) { + return Promise.resolve(Buffer.alloc(0)) + } + + if (Buffer.isBuffer(this.body)) { + return Promise.resolve(this.body) + } + + const upstream = isBlob(this.body) ? this.body.stream() : this.body + + /* istanbul ignore if: should never happen */ + if (!Minipass.isStream(upstream)) { + return Promise.resolve(Buffer.alloc(0)) + } + + const stream = this.size && upstream instanceof MinipassSized ? upstream + : !this.size && upstream instanceof Minipass && + !(upstream instanceof MinipassSized) ? upstream + : this.size ? new MinipassSized({ size: this.size }) + : new Minipass() + + // allow timeout on slow response body, but only if the stream is still writable. this + // makes the timeout center on the socket stream from lib/index.js rather than the + // intermediary minipass stream we create to receive the data + const resTimeout = this.timeout && stream.writable ? setTimeout(() => { + stream.emit('error', new FetchError( + `Response timeout while trying to fetch ${ + this.url} (over ${this.timeout}ms)`, 'body-timeout')) + }, this.timeout) : null + + // do not keep the process open just for this timeout, even + // though we expect it'll get cleared eventually. + if (resTimeout && resTimeout.unref) { + resTimeout.unref() + } + + // do the pipe in the promise, because the pipe() can send too much + // data through right away and upset the MP Sized object + return new Promise((resolve) => { + // if the stream is some other kind of stream, then pipe through a MP + // so we can collect it more easily. + if (stream !== upstream) { + upstream.on('error', er => stream.emit('error', er)) + upstream.pipe(stream) + } + resolve() + }).then(() => stream.concat()).then(buf => { + clearTimeout(resTimeout) + return buf + }).catch(er => { + clearTimeout(resTimeout) + // request was aborted, reject with this Error + if (er.name === 'AbortError' || er.name === 'FetchError') { + throw er + } else if (er.name === 'RangeError') { + throw new FetchError(`Could not create Buffer from response body for ${ + this.url}: ${er.message}`, 'system', er) + } else { + // other errors, such as incorrect content-encoding or content-length + throw new FetchError(`Invalid response body while trying to fetch ${ + this.url}: ${er.message}`, 'system', er) + } + }) + } + + static clone (instance) { + if (instance.bodyUsed) { + throw new Error('cannot clone body after it is used') + } + + const body = instance.body + + // check that body is a stream and not form-data object + // NB: can't clone the form-data object without having it as a dependency + if (Minipass.isStream(body) && typeof body.getBoundary !== 'function') { + // create a dedicated tee stream so that we don't lose data + // potentially sitting in the body stream's buffer by writing it + // immediately to p1 and not having it for p2. + const tee = new Minipass() + const p1 = new Minipass() + const p2 = new Minipass() + tee.on('error', er => { + p1.emit('error', er) + p2.emit('error', er) + }) + body.on('error', er => tee.emit('error', er)) + tee.pipe(p1) + tee.pipe(p2) + body.pipe(tee) + // set instance body to one fork, return the other + instance[INTERNALS].body = p1 + return p2 + } else { + return instance.body + } + } + + static extractContentType (body) { + return body === null || body === undefined ? null + : typeof body === 'string' ? 'text/plain;charset=UTF-8' + : isURLSearchParams(body) + ? 'application/x-www-form-urlencoded;charset=UTF-8' + : isBlob(body) ? body.type || null + : Buffer.isBuffer(body) ? null + : Object.prototype.toString.call(body) === '[object ArrayBuffer]' ? null + : ArrayBuffer.isView(body) ? null + : typeof body.getBoundary === 'function' + ? `multipart/form-data;boundary=${body.getBoundary()}` + : Minipass.isStream(body) ? null + : 'text/plain;charset=UTF-8' + } + + static getTotalBytes (instance) { + const { body } = instance + return (body === null || body === undefined) ? 0 + : isBlob(body) ? body.size + : Buffer.isBuffer(body) ? body.length + : body && typeof body.getLengthSync === 'function' && ( + // detect form data input from form-data module + body._lengthRetrievers && + /* istanbul ignore next */ body._lengthRetrievers.length === 0 || // 1.x + body.hasKnownLength && body.hasKnownLength()) // 2.x + ? body.getLengthSync() + : null + } + + static writeToStream (dest, instance) { + const { body } = instance + + if (body === null || body === undefined) { + dest.end() + } else if (Buffer.isBuffer(body) || typeof body === 'string') { + dest.end(body) + } else { + // body is stream or blob + const stream = isBlob(body) ? body.stream() : body + stream.on('error', er => dest.emit('error', er)).pipe(dest) + } + + return dest + } +} + +Object.defineProperties(Body.prototype, { + body: { enumerable: true }, + bodyUsed: { enumerable: true }, + arrayBuffer: { enumerable: true }, + blob: { enumerable: true }, + json: { enumerable: true }, + text: { enumerable: true }, +}) + +const isURLSearchParams = obj => + // Duck-typing as a necessary condition. + (typeof obj !== 'object' || + typeof obj.append !== 'function' || + typeof obj.delete !== 'function' || + typeof obj.get !== 'function' || + typeof obj.getAll !== 'function' || + typeof obj.has !== 'function' || + typeof obj.set !== 'function') ? false + // Brand-checking and more duck-typing as optional condition. + : obj.constructor.name === 'URLSearchParams' || + Object.prototype.toString.call(obj) === '[object URLSearchParams]' || + typeof obj.sort === 'function' + +const isBlob = obj => + typeof obj === 'object' && + typeof obj.arrayBuffer === 'function' && + typeof obj.type === 'string' && + typeof obj.stream === 'function' && + typeof obj.constructor === 'function' && + typeof obj.constructor.name === 'string' && + /^(Blob|File)$/.test(obj.constructor.name) && + /^(Blob|File)$/.test(obj[Symbol.toStringTag]) + +const convertBody = (buffer, headers) => { + /* istanbul ignore if */ + if (typeof convert !== 'function') { + throw new Error('The package `encoding` must be installed to use the textConverted() function') + } + + const ct = headers && headers.get('content-type') + let charset = 'utf-8' + let res + + // header + if (ct) { + res = /charset=([^;]*)/i.exec(ct) + } + + // no charset in content type, peek at response body for at most 1024 bytes + const str = buffer.slice(0, 1024).toString() + + // html5 + if (!res && str) { + res = / this.expect + ? 'max-size' : type + this.message = message + Error.captureStackTrace(this, this.constructor) + } + + get name () { + return 'FetchError' + } + + // don't allow name to be overwritten + set name (n) {} + + get [Symbol.toStringTag] () { + return 'FetchError' + } +} +module.exports = FetchError diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js new file mode 100644 index 00000000000000..dd6e854d5ba399 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/headers.js @@ -0,0 +1,267 @@ +'use strict' +const invalidTokenRegex = /[^^_`a-zA-Z\-0-9!#$%&'*+.|~]/ +const invalidHeaderCharRegex = /[^\t\x20-\x7e\x80-\xff]/ + +const validateName = name => { + name = `${name}` + if (invalidTokenRegex.test(name) || name === '') { + throw new TypeError(`${name} is not a legal HTTP header name`) + } +} + +const validateValue = value => { + value = `${value}` + if (invalidHeaderCharRegex.test(value)) { + throw new TypeError(`${value} is not a legal HTTP header value`) + } +} + +const find = (map, name) => { + name = name.toLowerCase() + for (const key in map) { + if (key.toLowerCase() === name) { + return key + } + } + return undefined +} + +const MAP = Symbol('map') +class Headers { + constructor (init = undefined) { + this[MAP] = Object.create(null) + if (init instanceof Headers) { + const rawHeaders = init.raw() + const headerNames = Object.keys(rawHeaders) + for (const headerName of headerNames) { + for (const value of rawHeaders[headerName]) { + this.append(headerName, value) + } + } + return + } + + // no-op + if (init === undefined || init === null) { + return + } + + if (typeof init === 'object') { + const method = init[Symbol.iterator] + if (method !== null && method !== undefined) { + if (typeof method !== 'function') { + throw new TypeError('Header pairs must be iterable') + } + + // sequence> + // Note: per spec we have to first exhaust the lists then process them + const pairs = [] + for (const pair of init) { + if (typeof pair !== 'object' || + typeof pair[Symbol.iterator] !== 'function') { + throw new TypeError('Each header pair must be iterable') + } + const arrPair = Array.from(pair) + if (arrPair.length !== 2) { + throw new TypeError('Each header pair must be a name/value tuple') + } + pairs.push(arrPair) + } + + for (const pair of pairs) { + this.append(pair[0], pair[1]) + } + } else { + // record + for (const key of Object.keys(init)) { + this.append(key, init[key]) + } + } + } else { + throw new TypeError('Provided initializer must be an object') + } + } + + get (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key === undefined) { + return null + } + + return this[MAP][key].join(', ') + } + + forEach (callback, thisArg = undefined) { + let pairs = getHeaders(this) + for (let i = 0; i < pairs.length; i++) { + const [name, value] = pairs[i] + callback.call(thisArg, value, name, this) + // refresh in case the callback added more headers + pairs = getHeaders(this) + } + } + + set (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + this[MAP][key !== undefined ? key : name] = [value] + } + + append (name, value) { + name = `${name}` + value = `${value}` + validateName(name) + validateValue(value) + const key = find(this[MAP], name) + if (key !== undefined) { + this[MAP][key].push(value) + } else { + this[MAP][name] = [value] + } + } + + has (name) { + name = `${name}` + validateName(name) + return find(this[MAP], name) !== undefined + } + + delete (name) { + name = `${name}` + validateName(name) + const key = find(this[MAP], name) + if (key !== undefined) { + delete this[MAP][key] + } + } + + raw () { + return this[MAP] + } + + keys () { + return new HeadersIterator(this, 'key') + } + + values () { + return new HeadersIterator(this, 'value') + } + + [Symbol.iterator] () { + return new HeadersIterator(this, 'key+value') + } + + entries () { + return new HeadersIterator(this, 'key+value') + } + + get [Symbol.toStringTag] () { + return 'Headers' + } + + static exportNodeCompatibleHeaders (headers) { + const obj = Object.assign(Object.create(null), headers[MAP]) + + // http.request() only supports string as Host header. This hack makes + // specifying custom Host header possible. + const hostHeaderKey = find(headers[MAP], 'Host') + if (hostHeaderKey !== undefined) { + obj[hostHeaderKey] = obj[hostHeaderKey][0] + } + + return obj + } + + static createHeadersLenient (obj) { + const headers = new Headers() + for (const name of Object.keys(obj)) { + if (invalidTokenRegex.test(name)) { + continue + } + + if (Array.isArray(obj[name])) { + for (const val of obj[name]) { + if (invalidHeaderCharRegex.test(val)) { + continue + } + + if (headers[MAP][name] === undefined) { + headers[MAP][name] = [val] + } else { + headers[MAP][name].push(val) + } + } + } else if (!invalidHeaderCharRegex.test(obj[name])) { + headers[MAP][name] = [obj[name]] + } + } + return headers + } +} + +Object.defineProperties(Headers.prototype, { + get: { enumerable: true }, + forEach: { enumerable: true }, + set: { enumerable: true }, + append: { enumerable: true }, + has: { enumerable: true }, + delete: { enumerable: true }, + keys: { enumerable: true }, + values: { enumerable: true }, + entries: { enumerable: true }, +}) + +const getHeaders = (headers, kind = 'key+value') => + Object.keys(headers[MAP]).sort().map( + kind === 'key' ? k => k.toLowerCase() + : kind === 'value' ? k => headers[MAP][k].join(', ') + : k => [k.toLowerCase(), headers[MAP][k].join(', ')] + ) + +const INTERNAL = Symbol('internal') + +class HeadersIterator { + constructor (target, kind) { + this[INTERNAL] = { + target, + kind, + index: 0, + } + } + + get [Symbol.toStringTag] () { + return 'HeadersIterator' + } + + next () { + /* istanbul ignore if: should be impossible */ + if (!this || Object.getPrototypeOf(this) !== HeadersIterator.prototype) { + throw new TypeError('Value of `this` is not a HeadersIterator') + } + + const { target, kind, index } = this[INTERNAL] + const values = getHeaders(target, kind) + const len = values.length + if (index >= len) { + return { + value: undefined, + done: true, + } + } + + this[INTERNAL].index++ + + return { value: values[index], done: false } + } +} + +// manually extend because 'extends' requires a ctor +Object.setPrototypeOf(HeadersIterator.prototype, + Object.getPrototypeOf(Object.getPrototypeOf([][Symbol.iterator]()))) + +module.exports = Headers diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js new file mode 100644 index 00000000000000..da402161670e65 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/index.js @@ -0,0 +1,377 @@ +'use strict' +const { URL } = require('url') +const http = require('http') +const https = require('https') +const zlib = require('minizlib') +const { Minipass } = require('minipass') + +const Body = require('./body.js') +const { writeToStream, getTotalBytes } = Body +const Response = require('./response.js') +const Headers = require('./headers.js') +const { createHeadersLenient } = Headers +const Request = require('./request.js') +const { getNodeRequestOptions } = Request +const FetchError = require('./fetch-error.js') +const AbortError = require('./abort-error.js') + +// XXX this should really be split up and unit-ized for easier testing +// and better DRY implementation of data/http request aborting +const fetch = async (url, opts) => { + if (/^data:/.test(url)) { + const request = new Request(url, opts) + // delay 1 promise tick so that the consumer can abort right away + return Promise.resolve().then(() => new Promise((resolve, reject) => { + let type, data + try { + const { pathname, search } = new URL(url) + const split = pathname.split(',') + if (split.length < 2) { + throw new Error('invalid data: URI') + } + const mime = split.shift() + const base64 = /;base64$/.test(mime) + type = base64 ? mime.slice(0, -1 * ';base64'.length) : mime + const rawData = decodeURIComponent(split.join(',') + search) + data = base64 ? Buffer.from(rawData, 'base64') : Buffer.from(rawData) + } catch (er) { + return reject(new FetchError(`[${request.method}] ${ + request.url} invalid URL, ${er.message}`, 'system', er)) + } + + const { signal } = request + if (signal && signal.aborted) { + return reject(new AbortError('The user aborted a request.')) + } + + const headers = { 'Content-Length': data.length } + if (type) { + headers['Content-Type'] = type + } + return resolve(new Response(data, { headers })) + })) + } + + return new Promise((resolve, reject) => { + // build request object + const request = new Request(url, opts) + let options + try { + options = getNodeRequestOptions(request) + } catch (er) { + return reject(er) + } + + const send = (options.protocol === 'https:' ? https : http).request + const { signal } = request + let response = null + const abort = () => { + const error = new AbortError('The user aborted a request.') + reject(error) + if (Minipass.isStream(request.body) && + typeof request.body.destroy === 'function') { + request.body.destroy(error) + } + if (response && response.body) { + response.body.emit('error', error) + } + } + + if (signal && signal.aborted) { + return abort() + } + + const abortAndFinalize = () => { + abort() + finalize() + } + + const finalize = () => { + req.abort() + if (signal) { + signal.removeEventListener('abort', abortAndFinalize) + } + clearTimeout(reqTimeout) + } + + // send request + const req = send(options) + + if (signal) { + signal.addEventListener('abort', abortAndFinalize) + } + + let reqTimeout = null + if (request.timeout) { + req.once('socket', () => { + reqTimeout = setTimeout(() => { + reject(new FetchError(`network timeout at: ${ + request.url}`, 'request-timeout')) + finalize() + }, request.timeout) + }) + } + + req.on('error', er => { + // if a 'response' event is emitted before the 'error' event, then by the + // time this handler is run it's too late to reject the Promise for the + // response. instead, we forward the error event to the response stream + // so that the error will surface to the user when they try to consume + // the body. this is done as a side effect of aborting the request except + // for in windows, where we must forward the event manually, otherwise + // there is no longer a ref'd socket attached to the request and the + // stream never ends so the event loop runs out of work and the process + // exits without warning. + // coverage skipped here due to the difficulty in testing + // istanbul ignore next + if (req.res) { + req.res.emit('error', er) + } + reject(new FetchError(`request to ${request.url} failed, reason: ${ + er.message}`, 'system', er)) + finalize() + }) + + req.on('response', res => { + clearTimeout(reqTimeout) + + const headers = createHeadersLenient(res.headers) + + // HTTP fetch step 5 + if (fetch.isRedirect(res.statusCode)) { + // HTTP fetch step 5.2 + const location = headers.get('Location') + + // HTTP fetch step 5.3 + let locationURL = null + try { + locationURL = location === null ? null : new URL(location, request.url).toString() + } catch { + // error here can only be invalid URL in Location: header + // do not throw when options.redirect == manual + // let the user extract the errorneous redirect URL + if (request.redirect !== 'manual') { + /* eslint-disable-next-line max-len */ + reject(new FetchError(`uri requested responds with an invalid redirect URL: ${location}`, 'invalid-redirect')) + finalize() + return + } + } + + // HTTP fetch step 5.5 + if (request.redirect === 'error') { + reject(new FetchError('uri requested responds with a redirect, ' + + `redirect mode is set to error: ${request.url}`, 'no-redirect')) + finalize() + return + } else if (request.redirect === 'manual') { + // node-fetch-specific step: make manual redirect a bit easier to + // use by setting the Location header value to the resolved URL. + if (locationURL !== null) { + // handle corrupted header + try { + headers.set('Location', locationURL) + } catch (err) { + /* istanbul ignore next: nodejs server prevent invalid + response headers, we can't test this through normal + request */ + reject(err) + } + } + } else if (request.redirect === 'follow' && locationURL !== null) { + // HTTP-redirect fetch step 5 + if (request.counter >= request.follow) { + reject(new FetchError(`maximum redirect reached at: ${ + request.url}`, 'max-redirect')) + finalize() + return + } + + // HTTP-redirect fetch step 9 + if (res.statusCode !== 303 && + request.body && + getTotalBytes(request) === null) { + reject(new FetchError( + 'Cannot follow redirect with body being a readable stream', + 'unsupported-redirect' + )) + finalize() + return + } + + // Update host due to redirection + request.headers.set('host', (new URL(locationURL)).host) + + // HTTP-redirect fetch step 6 (counter increment) + // Create a new Request object. + const requestOpts = { + headers: new Headers(request.headers), + follow: request.follow, + counter: request.counter + 1, + agent: request.agent, + compress: request.compress, + method: request.method, + body: request.body, + signal: request.signal, + timeout: request.timeout, + } + + // if the redirect is to a new hostname, strip the authorization and cookie headers + const parsedOriginal = new URL(request.url) + const parsedRedirect = new URL(locationURL) + if (parsedOriginal.hostname !== parsedRedirect.hostname) { + requestOpts.headers.delete('authorization') + requestOpts.headers.delete('cookie') + } + + // HTTP-redirect fetch step 11 + if (res.statusCode === 303 || ( + (res.statusCode === 301 || res.statusCode === 302) && + request.method === 'POST' + )) { + requestOpts.method = 'GET' + requestOpts.body = undefined + requestOpts.headers.delete('content-length') + } + + // HTTP-redirect fetch step 15 + resolve(fetch(new Request(locationURL, requestOpts))) + finalize() + return + } + } // end if(isRedirect) + + // prepare response + res.once('end', () => + signal && signal.removeEventListener('abort', abortAndFinalize)) + + const body = new Minipass() + // if an error occurs, either on the response stream itself, on one of the + // decoder streams, or a response length timeout from the Body class, we + // forward the error through to our internal body stream. If we see an + // error event on that, we call finalize to abort the request and ensure + // we don't leave a socket believing a request is in flight. + // this is difficult to test, so lacks specific coverage. + body.on('error', finalize) + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + res.on('error', /* istanbul ignore next */ er => body.emit('error', er)) + res.on('data', (chunk) => body.write(chunk)) + res.on('end', () => body.end()) + + const responseOptions = { + url: request.url, + status: res.statusCode, + statusText: res.statusMessage, + headers: headers, + size: request.size, + timeout: request.timeout, + counter: request.counter, + trailer: new Promise(resolveTrailer => + res.on('end', () => resolveTrailer(createHeadersLenient(res.trailers)))), + } + + // HTTP-network fetch step 12.1.1.3 + const codings = headers.get('Content-Encoding') + + // HTTP-network fetch step 12.1.1.4: handle content codings + + // in following scenarios we ignore compression support + // 1. compression support is disabled + // 2. HEAD request + // 3. no Content-Encoding header + // 4. no content response (204) + // 5. content not modified response (304) + if (!request.compress || + request.method === 'HEAD' || + codings === null || + res.statusCode === 204 || + res.statusCode === 304) { + response = new Response(body, responseOptions) + resolve(response) + return + } + + // Be less strict when decoding compressed responses, since sometimes + // servers send slightly invalid responses that are still accepted + // by common browsers. + // Always using Z_SYNC_FLUSH is what cURL does. + const zlibOptions = { + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH, + } + + // for gzip + if (codings === 'gzip' || codings === 'x-gzip') { + const unzip = new zlib.Gunzip(zlibOptions) + response = new Response( + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => unzip.emit('error', er)).pipe(unzip), + responseOptions + ) + resolve(response) + return + } + + // for deflate + if (codings === 'deflate' || codings === 'x-deflate') { + // handle the infamous raw deflate response from old servers + // a hack for old IIS and Apache servers + const raw = res.pipe(new Minipass()) + raw.once('data', chunk => { + // see http://stackoverflow.com/questions/37519828 + const decoder = (chunk[0] & 0x0F) === 0x08 + ? new zlib.Inflate() + : new zlib.InflateRaw() + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + }) + return + } + + // for br + if (codings === 'br') { + // ignoring coverage so tests don't have to fake support (or lack of) for brotli + // istanbul ignore next + try { + var decoder = new zlib.BrotliDecompress() + } catch (err) { + reject(err) + finalize() + return + } + // exceedingly rare that the stream would have an error, + // but just in case we proxy it to the stream in use. + body.on('error', /* istanbul ignore next */ er => decoder.emit('error', er)).pipe(decoder) + response = new Response(decoder, responseOptions) + resolve(response) + return + } + + // otherwise, use response as-is + response = new Response(body, responseOptions) + resolve(response) + }) + + writeToStream(req, request) + }) +} + +module.exports = fetch + +fetch.isRedirect = code => + code === 301 || + code === 302 || + code === 303 || + code === 307 || + code === 308 + +fetch.Headers = Headers +fetch.Request = Request +fetch.Response = Response +fetch.FetchError = FetchError +fetch.AbortError = AbortError diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js new file mode 100644 index 00000000000000..054439e6699107 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/request.js @@ -0,0 +1,282 @@ +'use strict' +const { URL } = require('url') +const { Minipass } = require('minipass') +const Headers = require('./headers.js') +const { exportNodeCompatibleHeaders } = Headers +const Body = require('./body.js') +const { clone, extractContentType, getTotalBytes } = Body + +const version = require('../package.json').version +const defaultUserAgent = + `minipass-fetch/${version} (+https://github.com/isaacs/minipass-fetch)` + +const INTERNALS = Symbol('Request internals') + +const isRequest = input => + typeof input === 'object' && typeof input[INTERNALS] === 'object' + +const isAbortSignal = signal => { + const proto = ( + signal + && typeof signal === 'object' + && Object.getPrototypeOf(signal) + ) + return !!(proto && proto.constructor.name === 'AbortSignal') +} + +class Request extends Body { + constructor (input, init = {}) { + const parsedURL = isRequest(input) ? new URL(input.url) + : input && input.href ? new URL(input.href) + : new URL(`${input}`) + + if (isRequest(input)) { + init = { ...input[INTERNALS], ...init } + } else if (!input || typeof input === 'string') { + input = {} + } + + const method = (init.method || input.method || 'GET').toUpperCase() + const isGETHEAD = method === 'GET' || method === 'HEAD' + + if ((init.body !== null && init.body !== undefined || + isRequest(input) && input.body !== null) && isGETHEAD) { + throw new TypeError('Request with GET/HEAD method cannot have body') + } + + const inputBody = init.body !== null && init.body !== undefined ? init.body + : isRequest(input) && input.body !== null ? clone(input) + : null + + super(inputBody, { + timeout: init.timeout || input.timeout || 0, + size: init.size || input.size || 0, + }) + + const headers = new Headers(init.headers || input.headers || {}) + + if (inputBody !== null && inputBody !== undefined && + !headers.has('Content-Type')) { + const contentType = extractContentType(inputBody) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + const signal = 'signal' in init ? init.signal + : null + + if (signal !== null && signal !== undefined && !isAbortSignal(signal)) { + throw new TypeError('Expected signal must be an instanceof AbortSignal') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized = process.env.NODE_TLS_REJECT_UNAUTHORIZED !== '0', + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = init + + this[INTERNALS] = { + method, + redirect: init.redirect || input.redirect || 'follow', + headers, + parsedURL, + signal, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } + + // node-fetch-only options + this.follow = init.follow !== undefined ? init.follow + : input.follow !== undefined ? input.follow + : 20 + this.compress = init.compress !== undefined ? init.compress + : input.compress !== undefined ? input.compress + : true + this.counter = init.counter || input.counter || 0 + this.agent = init.agent || input.agent + } + + get method () { + return this[INTERNALS].method + } + + get url () { + return this[INTERNALS].parsedURL.toString() + } + + get headers () { + return this[INTERNALS].headers + } + + get redirect () { + return this[INTERNALS].redirect + } + + get signal () { + return this[INTERNALS].signal + } + + clone () { + return new Request(this) + } + + get [Symbol.toStringTag] () { + return 'Request' + } + + static getNodeRequestOptions (request) { + const parsedURL = request[INTERNALS].parsedURL + const headers = new Headers(request[INTERNALS].headers) + + // fetch step 1.3 + if (!headers.has('Accept')) { + headers.set('Accept', '*/*') + } + + // Basic fetch + if (!/^https?:$/.test(parsedURL.protocol)) { + throw new TypeError('Only HTTP(S) protocols are supported') + } + + if (request.signal && + Minipass.isStream(request.body) && + typeof request.body.destroy !== 'function') { + throw new Error( + 'Cancellation of streamed requests with AbortSignal is not supported') + } + + // HTTP-network-or-cache fetch steps 2.4-2.7 + const contentLengthValue = + (request.body === null || request.body === undefined) && + /^(POST|PUT)$/i.test(request.method) ? '0' + : request.body !== null && request.body !== undefined + ? getTotalBytes(request) + : null + + if (contentLengthValue) { + headers.set('Content-Length', contentLengthValue + '') + } + + // HTTP-network-or-cache fetch step 2.11 + if (!headers.has('User-Agent')) { + headers.set('User-Agent', defaultUserAgent) + } + + // HTTP-network-or-cache fetch step 2.15 + if (request.compress && !headers.has('Accept-Encoding')) { + headers.set('Accept-Encoding', 'gzip,deflate') + } + + const agent = typeof request.agent === 'function' + ? request.agent(parsedURL) + : request.agent + + if (!headers.has('Connection') && !agent) { + headers.set('Connection', 'close') + } + + // TLS specific options that are handled by node + const { + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + } = request[INTERNALS] + + // HTTP-network fetch step 4.2 + // chunked encoding is handled by Node.js + + // we cannot spread parsedURL directly, so we have to read each property one-by-one + // and map them to the equivalent https?.request() method options + const urlProps = { + auth: parsedURL.username || parsedURL.password + ? `${parsedURL.username}:${parsedURL.password}` + : '', + host: parsedURL.host, + hostname: parsedURL.hostname, + path: `${parsedURL.pathname}${parsedURL.search}`, + port: parsedURL.port, + protocol: parsedURL.protocol, + } + + return { + ...urlProps, + method: request.method, + headers: exportNodeCompatibleHeaders(headers), + agent, + ca, + cert, + ciphers, + clientCertEngine, + crl, + dhparam, + ecdhCurve, + family, + honorCipherOrder, + key, + passphrase, + pfx, + rejectUnauthorized, + secureOptions, + secureProtocol, + servername, + sessionIdContext, + timeout: request.timeout, + } + } +} + +module.exports = Request + +Object.defineProperties(Request.prototype, { + method: { enumerable: true }, + url: { enumerable: true }, + headers: { enumerable: true }, + redirect: { enumerable: true }, + clone: { enumerable: true }, + signal: { enumerable: true }, +}) diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js new file mode 100644 index 00000000000000..54cb52db3594a7 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/lib/response.js @@ -0,0 +1,90 @@ +'use strict' +const http = require('http') +const { STATUS_CODES } = http + +const Headers = require('./headers.js') +const Body = require('./body.js') +const { clone, extractContentType } = Body + +const INTERNALS = Symbol('Response internals') + +class Response extends Body { + constructor (body = null, opts = {}) { + super(body, opts) + + const status = opts.status || 200 + const headers = new Headers(opts.headers) + + if (body !== null && body !== undefined && !headers.has('Content-Type')) { + const contentType = extractContentType(body) + if (contentType) { + headers.append('Content-Type', contentType) + } + } + + this[INTERNALS] = { + url: opts.url, + status, + statusText: opts.statusText || STATUS_CODES[status], + headers, + counter: opts.counter, + trailer: Promise.resolve(opts.trailer || new Headers()), + } + } + + get trailer () { + return this[INTERNALS].trailer + } + + get url () { + return this[INTERNALS].url || '' + } + + get status () { + return this[INTERNALS].status + } + + get ok () { + return this[INTERNALS].status >= 200 && this[INTERNALS].status < 300 + } + + get redirected () { + return this[INTERNALS].counter > 0 + } + + get statusText () { + return this[INTERNALS].statusText + } + + get headers () { + return this[INTERNALS].headers + } + + clone () { + return new Response(clone(this), { + url: this.url, + status: this.status, + statusText: this.statusText, + headers: this.headers, + ok: this.ok, + redirected: this.redirected, + trailer: this.trailer, + }) + } + + get [Symbol.toStringTag] () { + return 'Response' + } +} + +module.exports = Response + +Object.defineProperties(Response.prototype, { + url: { enumerable: true }, + status: { enumerable: true }, + ok: { enumerable: true }, + redirected: { enumerable: true }, + statusText: { enumerable: true }, + headers: { enumerable: true }, + clone: { enumerable: true }, +}) diff --git a/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/package.json b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/package.json new file mode 100644 index 00000000000000..d491a7fba126d0 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/minipass-fetch/package.json @@ -0,0 +1,69 @@ +{ + "name": "minipass-fetch", + "version": "3.0.5", + "description": "An implementation of window.fetch in Node.js using Minipass streams", + "license": "MIT", + "main": "lib/index.js", + "scripts": { + "test:tls-fixtures": "./test/fixtures/tls/setup.sh", + "test": "tap", + "snap": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "posttest": "npm run lint", + "template-oss-apply": "template-oss-apply --force" + }, + "tap": { + "coverage-map": "map.js", + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "@ungap/url-search-params": "^0.2.2", + "abort-controller": "^3.0.0", + "abortcontroller-polyfill": "~1.7.3", + "encoding": "^0.1.13", + "form-data": "^4.0.0", + "nock": "^13.2.4", + "parted": "^0.1.1", + "string-to-arraybuffer": "^1.0.2", + "tap": "^16.0.0" + }, + "dependencies": { + "minipass": "^7.0.3", + "minipass-sized": "^1.0.3", + "minizlib": "^2.1.2" + }, + "optionalDependencies": { + "encoding": "^0.1.13" + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/minipass-fetch.git" + }, + "keywords": [ + "fetch", + "minipass", + "node-fetch", + "window.fetch" + ], + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "author": "GitHub Inc.", + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/proc-log/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/proc-log/LICENSE new file mode 100644 index 00000000000000..83837797202b70 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/proc-log/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright (c) GitHub, Inc. + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/proc-log/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/proc-log/lib/index.js new file mode 100644 index 00000000000000..86d90861078dab --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/proc-log/lib/index.js @@ -0,0 +1,153 @@ +const META = Symbol('proc-log.meta') +module.exports = { + META: META, + output: { + LEVELS: [ + 'standard', + 'error', + 'buffer', + 'flush', + ], + KEYS: { + standard: 'standard', + error: 'error', + buffer: 'buffer', + flush: 'flush', + }, + standard: function (...args) { + return process.emit('output', 'standard', ...args) + }, + error: function (...args) { + return process.emit('output', 'error', ...args) + }, + buffer: function (...args) { + return process.emit('output', 'buffer', ...args) + }, + flush: function (...args) { + return process.emit('output', 'flush', ...args) + }, + }, + log: { + LEVELS: [ + 'notice', + 'error', + 'warn', + 'info', + 'verbose', + 'http', + 'silly', + 'timing', + 'pause', + 'resume', + ], + KEYS: { + notice: 'notice', + error: 'error', + warn: 'warn', + info: 'info', + verbose: 'verbose', + http: 'http', + silly: 'silly', + timing: 'timing', + pause: 'pause', + resume: 'resume', + }, + error: function (...args) { + return process.emit('log', 'error', ...args) + }, + notice: function (...args) { + return process.emit('log', 'notice', ...args) + }, + warn: function (...args) { + return process.emit('log', 'warn', ...args) + }, + info: function (...args) { + return process.emit('log', 'info', ...args) + }, + verbose: function (...args) { + return process.emit('log', 'verbose', ...args) + }, + http: function (...args) { + return process.emit('log', 'http', ...args) + }, + silly: function (...args) { + return process.emit('log', 'silly', ...args) + }, + timing: function (...args) { + return process.emit('log', 'timing', ...args) + }, + pause: function () { + return process.emit('log', 'pause') + }, + resume: function () { + return process.emit('log', 'resume') + }, + }, + time: { + LEVELS: [ + 'start', + 'end', + ], + KEYS: { + start: 'start', + end: 'end', + }, + start: function (name, fn) { + process.emit('time', 'start', name) + function end () { + return process.emit('time', 'end', name) + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function (name) { + return process.emit('time', 'end', name) + }, + }, + input: { + LEVELS: [ + 'start', + 'end', + 'read', + ], + KEYS: { + start: 'start', + end: 'end', + read: 'read', + }, + start: function (fn) { + process.emit('input', 'start') + function end () { + return process.emit('input', 'end') + } + if (typeof fn === 'function') { + const res = fn() + if (res && res.finally) { + return res.finally(end) + } + end() + return res + } + return end + }, + end: function () { + return process.emit('input', 'end') + }, + read: function (...args) { + let resolve, reject + const promise = new Promise((_resolve, _reject) => { + resolve = _resolve + reject = _reject + }) + process.emit('input', 'read', resolve, reject, ...args) + return promise + }, + }, +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/proc-log/package.json b/deps/npm/node_modules/tuf-js/node_modules/proc-log/package.json new file mode 100644 index 00000000000000..4ab89102ecc9b5 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/proc-log/package.json @@ -0,0 +1,45 @@ +{ + "name": "proc-log", + "version": "4.2.0", + "files": [ + "bin/", + "lib/" + ], + "main": "lib/index.js", + "description": "just emit 'log' events on the process object", + "repository": { + "type": "git", + "url": "https://github.com/npm/proc-log.git" + }, + "author": "GitHub Inc.", + "license": "ISC", + "scripts": { + "test": "tap", + "snap": "tap", + "posttest": "npm run lint", + "postsnap": "eslint index.js test/*.js --fix", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "lintfix": "npm run lint -- --fix", + "template-oss-apply": "template-oss-apply --force" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.21.3", + "tap": "^16.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.21.3", + "publish": true + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/ssri/LICENSE.md b/deps/npm/node_modules/tuf-js/node_modules/ssri/LICENSE.md new file mode 100644 index 00000000000000..e335388869f50f --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/ssri/LICENSE.md @@ -0,0 +1,16 @@ +ISC License + +Copyright 2021 (c) npm, Inc. + +Permission to use, copy, modify, and/or distribute this software for +any purpose with or without fee is hereby granted, provided that the +above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE COPYRIGHT HOLDER DISCLAIMS +ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED +WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE +COPYRIGHT HOLDER BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR +CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS +OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE +OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE +USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/ssri/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/ssri/lib/index.js new file mode 100644 index 00000000000000..7d749ed480fb98 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/ssri/lib/index.js @@ -0,0 +1,580 @@ +'use strict' + +const crypto = require('crypto') +const { Minipass } = require('minipass') + +const SPEC_ALGORITHMS = ['sha512', 'sha384', 'sha256'] +const DEFAULT_ALGORITHMS = ['sha512'] + +// TODO: this should really be a hardcoded list of algorithms we support, +// rather than [a-z0-9]. +const BASE64_REGEX = /^[a-z0-9+/]+(?:=?=?)$/i +const SRI_REGEX = /^([a-z0-9]+)-([^?]+)([?\S*]*)$/ +const STRICT_SRI_REGEX = /^([a-z0-9]+)-([A-Za-z0-9+/=]{44,88})(\?[\x21-\x7E]*)?$/ +const VCHAR_REGEX = /^[\x21-\x7E]+$/ + +const getOptString = options => options?.length ? `?${options.join('?')}` : '' + +class IntegrityStream extends Minipass { + #emittedIntegrity + #emittedSize + #emittedVerified + + constructor (opts) { + super() + this.size = 0 + this.opts = opts + + // may be overridden later, but set now for class consistency + this.#getOptions() + + // options used for calculating stream. can't be changed. + if (opts?.algorithms) { + this.algorithms = [...opts.algorithms] + } else { + this.algorithms = [...DEFAULT_ALGORITHMS] + } + if (this.algorithm !== null && !this.algorithms.includes(this.algorithm)) { + this.algorithms.push(this.algorithm) + } + + this.hashes = this.algorithms.map(crypto.createHash) + } + + #getOptions () { + // For verification + this.sri = this.opts?.integrity ? parse(this.opts?.integrity, this.opts) : null + this.expectedSize = this.opts?.size + + if (!this.sri) { + this.algorithm = null + } else if (this.sri.isHash) { + this.goodSri = true + this.algorithm = this.sri.algorithm + } else { + this.goodSri = !this.sri.isEmpty() + this.algorithm = this.sri.pickAlgorithm(this.opts) + } + + this.digests = this.goodSri ? this.sri[this.algorithm] : null + this.optString = getOptString(this.opts?.options) + } + + on (ev, handler) { + if (ev === 'size' && this.#emittedSize) { + return handler(this.#emittedSize) + } + + if (ev === 'integrity' && this.#emittedIntegrity) { + return handler(this.#emittedIntegrity) + } + + if (ev === 'verified' && this.#emittedVerified) { + return handler(this.#emittedVerified) + } + + return super.on(ev, handler) + } + + emit (ev, data) { + if (ev === 'end') { + this.#onEnd() + } + return super.emit(ev, data) + } + + write (data) { + this.size += data.length + this.hashes.forEach(h => h.update(data)) + return super.write(data) + } + + #onEnd () { + if (!this.goodSri) { + this.#getOptions() + } + const newSri = parse(this.hashes.map((h, i) => { + return `${this.algorithms[i]}-${h.digest('base64')}${this.optString}` + }).join(' '), this.opts) + // Integrity verification mode + const match = this.goodSri && newSri.match(this.sri, this.opts) + if (typeof this.expectedSize === 'number' && this.size !== this.expectedSize) { + /* eslint-disable-next-line max-len */ + const err = new Error(`stream size mismatch when checking ${this.sri}.\n Wanted: ${this.expectedSize}\n Found: ${this.size}`) + err.code = 'EBADSIZE' + err.found = this.size + err.expected = this.expectedSize + err.sri = this.sri + this.emit('error', err) + } else if (this.sri && !match) { + /* eslint-disable-next-line max-len */ + const err = new Error(`${this.sri} integrity checksum failed when using ${this.algorithm}: wanted ${this.digests} but got ${newSri}. (${this.size} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = this.digests + err.algorithm = this.algorithm + err.sri = this.sri + this.emit('error', err) + } else { + this.#emittedSize = this.size + this.emit('size', this.size) + this.#emittedIntegrity = newSri + this.emit('integrity', newSri) + if (match) { + this.#emittedVerified = match + this.emit('verified', match) + } + } + } +} + +class Hash { + get isHash () { + return true + } + + constructor (hash, opts) { + const strict = opts?.strict + this.source = hash.trim() + + // set default values so that we make V8 happy to + // always see a familiar object template. + this.digest = '' + this.algorithm = '' + this.options = [] + + // 3.1. Integrity metadata (called "Hash" by ssri) + // https://w3c.github.io/webappsec-subresource-integrity/#integrity-metadata-description + const match = this.source.match( + strict + ? STRICT_SRI_REGEX + : SRI_REGEX + ) + if (!match) { + return + } + if (strict && !SPEC_ALGORITHMS.includes(match[1])) { + return + } + this.algorithm = match[1] + this.digest = match[2] + + const rawOpts = match[3] + if (rawOpts) { + this.options = rawOpts.slice(1).split('?') + } + } + + hexDigest () { + return this.digest && Buffer.from(this.digest, 'base64').toString('hex') + } + + toJSON () { + return this.toString() + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + if (other.isIntegrity) { + const algo = other.pickAlgorithm(opts, [this.algorithm]) + + if (!algo) { + return false + } + + const foundHash = other[algo].find(hash => hash.digest === this.digest) + + if (foundHash) { + return foundHash + } + + return false + } + return other.digest === this.digest ? other : false + } + + toString (opts) { + if (opts?.strict) { + // Strict mode enforces the standard as close to the foot of the + // letter as it can. + if (!( + // The spec has very restricted productions for algorithms. + // https://www.w3.org/TR/CSP2/#source-list-syntax + SPEC_ALGORITHMS.includes(this.algorithm) && + // Usually, if someone insists on using a "different" base64, we + // leave it as-is, since there's multiple standards, and the + // specified is not a URL-safe variant. + // https://www.w3.org/TR/CSP2/#base64_value + this.digest.match(BASE64_REGEX) && + // Option syntax is strictly visual chars. + // https://w3c.github.io/webappsec-subresource-integrity/#grammardef-option-expression + // https://tools.ietf.org/html/rfc5234#appendix-B.1 + this.options.every(opt => opt.match(VCHAR_REGEX)) + )) { + return '' + } + } + return `${this.algorithm}-${this.digest}${getOptString(this.options)}` + } +} + +function integrityHashToString (toString, sep, opts, hashes) { + const toStringIsNotEmpty = toString !== '' + + let shouldAddFirstSep = false + let complement = '' + + const lastIndex = hashes.length - 1 + + for (let i = 0; i < lastIndex; i++) { + const hashString = Hash.prototype.toString.call(hashes[i], opts) + + if (hashString) { + shouldAddFirstSep = true + + complement += hashString + complement += sep + } + } + + const finalHashString = Hash.prototype.toString.call(hashes[lastIndex], opts) + + if (finalHashString) { + shouldAddFirstSep = true + complement += finalHashString + } + + if (toStringIsNotEmpty && shouldAddFirstSep) { + return toString + sep + complement + } + + return toString + complement +} + +class Integrity { + get isIntegrity () { + return true + } + + toJSON () { + return this.toString() + } + + isEmpty () { + return Object.keys(this).length === 0 + } + + toString (opts) { + let sep = opts?.sep || ' ' + let toString = '' + + if (opts?.strict) { + // Entries must be separated by whitespace, according to spec. + sep = sep.replace(/\S+/g, ' ') + + for (const hash of SPEC_ALGORITHMS) { + if (this[hash]) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + } else { + for (const hash of Object.keys(this)) { + toString = integrityHashToString(toString, sep, opts, this[hash]) + } + } + + return toString + } + + concat (integrity, opts) { + const other = typeof integrity === 'string' + ? integrity + : stringify(integrity, opts) + return parse(`${this.toString(opts)} ${other}`, opts) + } + + hexDigest () { + return parse(this, { single: true }).hexDigest() + } + + // add additional hashes to an integrity value, but prevent + // *changing* an existing integrity hash. + merge (integrity, opts) { + const other = parse(integrity, opts) + for (const algo in other) { + if (this[algo]) { + if (!this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest))) { + throw new Error('hashes do not match, cannot update integrity') + } + } else { + this[algo] = other[algo] + } + } + } + + match (integrity, opts) { + const other = parse(integrity, opts) + if (!other) { + return false + } + const algo = other.pickAlgorithm(opts, Object.keys(this)) + return ( + !!algo && + this[algo] && + other[algo] && + this[algo].find(hash => + other[algo].find(otherhash => + hash.digest === otherhash.digest + ) + ) + ) || false + } + + // Pick the highest priority algorithm present, optionally also limited to a + // set of hashes found in another integrity. When limiting it may return + // nothing. + pickAlgorithm (opts, hashes) { + const pickAlgorithm = opts?.pickAlgorithm || getPrioritizedHash + const keys = Object.keys(this).filter(k => { + if (hashes?.length) { + return hashes.includes(k) + } + return true + }) + if (keys.length) { + return keys.reduce((acc, algo) => pickAlgorithm(acc, algo) || acc) + } + // no intersection between this and hashes, + return null + } +} + +module.exports.parse = parse +function parse (sri, opts) { + if (!sri) { + return null + } + if (typeof sri === 'string') { + return _parse(sri, opts) + } else if (sri.algorithm && sri.digest) { + const fullSri = new Integrity() + fullSri[sri.algorithm] = [sri] + return _parse(stringify(fullSri, opts), opts) + } else { + return _parse(stringify(sri, opts), opts) + } +} + +function _parse (integrity, opts) { + // 3.4.3. Parse metadata + // https://w3c.github.io/webappsec-subresource-integrity/#parse-metadata + if (opts?.single) { + return new Hash(integrity, opts) + } + const hashes = integrity.trim().split(/\s+/).reduce((acc, string) => { + const hash = new Hash(string, opts) + if (hash.algorithm && hash.digest) { + const algo = hash.algorithm + if (!acc[algo]) { + acc[algo] = [] + } + acc[algo].push(hash) + } + return acc + }, new Integrity()) + return hashes.isEmpty() ? null : hashes +} + +module.exports.stringify = stringify +function stringify (obj, opts) { + if (obj.algorithm && obj.digest) { + return Hash.prototype.toString.call(obj, opts) + } else if (typeof obj === 'string') { + return stringify(parse(obj, opts), opts) + } else { + return Integrity.prototype.toString.call(obj, opts) + } +} + +module.exports.fromHex = fromHex +function fromHex (hexDigest, algorithm, opts) { + const optString = getOptString(opts?.options) + return parse( + `${algorithm}-${ + Buffer.from(hexDigest, 'hex').toString('base64') + }${optString}`, opts + ) +} + +module.exports.fromData = fromData +function fromData (data, opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + return algorithms.reduce((acc, algo) => { + const digest = crypto.createHash(algo).update(data).digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the string we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) +} + +module.exports.fromStream = fromStream +function fromStream (stream, opts) { + const istream = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(istream) + stream.on('error', reject) + istream.on('error', reject) + let sri + istream.on('integrity', s => { + sri = s + }) + istream.on('end', () => resolve(sri)) + istream.resume() + }) +} + +module.exports.checkData = checkData +function checkData (data, sri, opts) { + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + if (opts?.error) { + throw Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + ) + } else { + return false + } + } + const algorithm = sri.pickAlgorithm(opts) + const digest = crypto.createHash(algorithm).update(data).digest('base64') + const newSri = parse({ algorithm, digest }) + const match = newSri.match(sri, opts) + opts = opts || {} + if (match || !(opts.error)) { + return match + } else if (typeof opts.size === 'number' && (data.length !== opts.size)) { + /* eslint-disable-next-line max-len */ + const err = new Error(`data size mismatch when checking ${sri}.\n Wanted: ${opts.size}\n Found: ${data.length}`) + err.code = 'EBADSIZE' + err.found = data.length + err.expected = opts.size + err.sri = sri + throw err + } else { + /* eslint-disable-next-line max-len */ + const err = new Error(`Integrity checksum failed when using ${algorithm}: Wanted ${sri}, but got ${newSri}. (${data.length} bytes)`) + err.code = 'EINTEGRITY' + err.found = newSri + err.expected = sri + err.algorithm = algorithm + err.sri = sri + throw err + } +} + +module.exports.checkStream = checkStream +function checkStream (stream, sri, opts) { + opts = opts || Object.create(null) + opts.integrity = sri + sri = parse(sri, opts) + if (!sri || !Object.keys(sri).length) { + return Promise.reject(Object.assign( + new Error('No valid integrity hashes to check against'), { + code: 'EINTEGRITY', + } + )) + } + const checker = integrityStream(opts) + return new Promise((resolve, reject) => { + stream.pipe(checker) + stream.on('error', reject) + checker.on('error', reject) + let verified + checker.on('verified', s => { + verified = s + }) + checker.on('end', () => resolve(verified)) + checker.resume() + }) +} + +module.exports.integrityStream = integrityStream +function integrityStream (opts = Object.create(null)) { + return new IntegrityStream(opts) +} + +module.exports.create = createIntegrity +function createIntegrity (opts) { + const algorithms = opts?.algorithms || [...DEFAULT_ALGORITHMS] + const optString = getOptString(opts?.options) + + const hashes = algorithms.map(crypto.createHash) + + return { + update: function (chunk, enc) { + hashes.forEach(h => h.update(chunk, enc)) + return this + }, + digest: function () { + const integrity = algorithms.reduce((acc, algo) => { + const digest = hashes.shift().digest('base64') + const hash = new Hash( + `${algo}-${digest}${optString}`, + opts + ) + /* istanbul ignore else - it would be VERY strange if the hash we + * just calculated with an algo did not have an algo or digest. + */ + if (hash.algorithm && hash.digest) { + const hashAlgo = hash.algorithm + if (!acc[hashAlgo]) { + acc[hashAlgo] = [] + } + acc[hashAlgo].push(hash) + } + return acc + }, new Integrity()) + + return integrity + }, + } +} + +const NODE_HASHES = crypto.getHashes() + +// This is a Best Effort™ at a reasonable priority for hash algos +const DEFAULT_PRIORITY = [ + 'md5', 'whirlpool', 'sha1', 'sha224', 'sha256', 'sha384', 'sha512', + // TODO - it's unclear _which_ of these Node will actually use as its name + // for the algorithm, so we guesswork it based on the OpenSSL names. + 'sha3', + 'sha3-256', 'sha3-384', 'sha3-512', + 'sha3_256', 'sha3_384', 'sha3_512', +].filter(algo => NODE_HASHES.includes(algo)) + +function getPrioritizedHash (algo1, algo2) { + /* eslint-disable-next-line max-len */ + return DEFAULT_PRIORITY.indexOf(algo1.toLowerCase()) >= DEFAULT_PRIORITY.indexOf(algo2.toLowerCase()) + ? algo1 + : algo2 +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/ssri/package.json b/deps/npm/node_modules/tuf-js/node_modules/ssri/package.json new file mode 100644 index 00000000000000..28395414e4643c --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/ssri/package.json @@ -0,0 +1,65 @@ +{ + "name": "ssri", + "version": "10.0.6", + "description": "Standard Subresource Integrity library -- parses, serializes, generates, and verifies integrity metadata according to the SRI spec.", + "main": "lib/index.js", + "files": [ + "bin/", + "lib/" + ], + "scripts": { + "prerelease": "npm t", + "postrelease": "npm publish", + "posttest": "npm run lint", + "test": "tap", + "coverage": "tap", + "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap" + }, + "tap": { + "check-coverage": true, + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + }, + "repository": { + "type": "git", + "url": "git+https://github.com/npm/ssri.git" + }, + "keywords": [ + "w3c", + "web", + "security", + "integrity", + "checksum", + "hashing", + "subresource integrity", + "sri", + "sri hash", + "sri string", + "sri generator", + "html" + ], + "author": "GitHub Inc.", + "license": "ISC", + "dependencies": { + "minipass": "^7.0.3" + }, + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.22.0", + "tap": "^16.0.1" + }, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.22.0", + "publish": "true" + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-filename/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/LICENSE new file mode 100644 index 00000000000000..69619c125ea7ef --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/LICENSE @@ -0,0 +1,5 @@ +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any purpose with or without fee is hereby granted, provided that the above copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-filename/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/lib/index.js new file mode 100644 index 00000000000000..d067d2e709809a --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/lib/index.js @@ -0,0 +1,7 @@ +var path = require('path') + +var uniqueSlug = require('unique-slug') + +module.exports = function (filepath, prefix, uniq) { + return path.join(filepath, (prefix ? prefix + '-' : '') + uniqueSlug(uniq)) +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-filename/package.json b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/package.json new file mode 100644 index 00000000000000..b2fbf0666489a6 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/unique-filename/package.json @@ -0,0 +1,51 @@ +{ + "name": "unique-filename", + "version": "3.0.0", + "description": "Generate a unique filename for use in temporary directories or caches.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/unique-filename.git" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "bugs": { + "url": "https://github.com/iarna/unique-filename/issues" + }, + "homepage": "https://github.com/iarna/unique-filename", + "devDependencies": { + "@npmcli/eslint-config": "^4.0.0", + "@npmcli/template-oss": "4.5.1", + "tap": "^16.3.0" + }, + "dependencies": { + "unique-slug": "^4.0.0" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.5.1" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-slug/LICENSE b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/LICENSE new file mode 100644 index 00000000000000..7953647e7760b8 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/LICENSE @@ -0,0 +1,15 @@ +The ISC License + +Copyright npm, Inc + +Permission to use, copy, modify, and/or distribute this software for any +purpose with or without fee is hereby granted, provided that the above +copyright notice and this permission notice appear in all copies. + +THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES +WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF +MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR +ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES +WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN +ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF OR +IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE. diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-slug/lib/index.js b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/lib/index.js new file mode 100644 index 00000000000000..1bac84d95d7307 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/lib/index.js @@ -0,0 +1,11 @@ +'use strict' +var MurmurHash3 = require('imurmurhash') + +module.exports = function (uniq) { + if (uniq) { + var hash = new MurmurHash3(uniq) + return ('00000000' + hash.result().toString(16)).slice(-8) + } else { + return (Math.random().toString(16) + '0000000').slice(2, 10) + } +} diff --git a/deps/npm/node_modules/tuf-js/node_modules/unique-slug/package.json b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/package.json new file mode 100644 index 00000000000000..33732cdbb42859 --- /dev/null +++ b/deps/npm/node_modules/tuf-js/node_modules/unique-slug/package.json @@ -0,0 +1,47 @@ +{ + "name": "unique-slug", + "version": "4.0.0", + "description": "Generate a unique character string suitible for use in files and URLs.", + "main": "lib/index.js", + "scripts": { + "test": "tap", + "lint": "eslint \"**/*.js\"", + "postlint": "template-oss-check", + "template-oss-apply": "template-oss-apply --force", + "lintfix": "npm run lint -- --fix", + "snap": "tap", + "posttest": "npm run lint" + }, + "keywords": [], + "author": "GitHub Inc.", + "license": "ISC", + "devDependencies": { + "@npmcli/eslint-config": "^3.1.0", + "@npmcli/template-oss": "4.5.1", + "tap": "^16.3.0" + }, + "repository": { + "type": "git", + "url": "https://github.com/npm/unique-slug.git" + }, + "dependencies": { + "imurmurhash": "^0.1.4" + }, + "files": [ + "bin/", + "lib/" + ], + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + }, + "templateOSS": { + "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", + "version": "4.5.1" + }, + "tap": { + "nyc-arg": [ + "--exclude", + "tap-snapshots/**" + ] + } +} diff --git a/deps/npm/node_modules/unique-filename/package.json b/deps/npm/node_modules/unique-filename/package.json index b2fbf0666489a6..a08196e1db303a 100644 --- a/deps/npm/node_modules/unique-filename/package.json +++ b/deps/npm/node_modules/unique-filename/package.json @@ -1,20 +1,21 @@ { "name": "unique-filename", - "version": "3.0.0", + "version": "4.0.0", "description": "Generate a unique filename for use in temporary directories or caches.", "main": "lib/index.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", - "url": "https://github.com/npm/unique-filename.git" + "url": "git+https://github.com/npm/unique-filename.git" }, "keywords": [], "author": "GitHub Inc.", @@ -24,23 +25,24 @@ }, "homepage": "https://github.com/iarna/unique-filename", "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "dependencies": { - "unique-slug": "^4.0.0" + "unique-slug": "^5.0.0" }, "files": [ "bin/", "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.23.3", + "publish": true }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/unique-slug/package.json b/deps/npm/node_modules/unique-slug/package.json index 33732cdbb42859..b4d287aecef3d2 100644 --- a/deps/npm/node_modules/unique-slug/package.json +++ b/deps/npm/node_modules/unique-slug/package.json @@ -1,28 +1,29 @@ { "name": "unique-slug", - "version": "4.0.0", + "version": "5.0.0", "description": "Generate a unique character string suitible for use in files and URLs.", "main": "lib/index.js", "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "keywords": [], "author": "GitHub Inc.", "license": "ISC", "devDependencies": { - "@npmcli/eslint-config": "^3.1.0", - "@npmcli/template-oss": "4.5.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "repository": { "type": "git", - "url": "https://github.com/npm/unique-slug.git" + "url": "git+https://github.com/npm/unique-slug.git" }, "dependencies": { "imurmurhash": "^0.1.4" @@ -32,11 +33,12 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.5.1" + "version": "4.23.3", + "publish": true }, "tap": { "nyc-arg": [ diff --git a/deps/npm/node_modules/validate-npm-package-name/package.json b/deps/npm/node_modules/validate-npm-package-name/package.json index 8a38b66e1d3e43..42089cbbede700 100644 --- a/deps/npm/node_modules/validate-npm-package-name/package.json +++ b/deps/npm/node_modules/validate-npm-package-name/package.json @@ -1,14 +1,14 @@ { "name": "validate-npm-package-name", - "version": "5.0.1", + "version": "6.0.0", "description": "Give me a string and I'll tell you if it's a valid npm package name", "main": "lib/", "directories": { "test": "test" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.22.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "scripts": { @@ -16,12 +16,13 @@ "test:code": "tap ${TAP_FLAGS:-'--'} test/*.js", "test:style": "standard", "test": "tap", - "lint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", @@ -44,11 +45,11 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "version": "4.22.0", + "version": "4.23.3", "publish": true }, "tap": { diff --git a/deps/npm/node_modules/which/package.json b/deps/npm/node_modules/which/package.json index 515bfb22ca0e1e..94184233c61c4c 100644 --- a/deps/npm/node_modules/which/package.json +++ b/deps/npm/node_modules/which/package.json @@ -2,10 +2,10 @@ "author": "GitHub Inc.", "name": "which", "description": "Like which(1) unix command. Find the first instance of an executable in the PATH.", - "version": "4.0.0", + "version": "5.0.0", "repository": { "type": "git", - "url": "https://github.com/npm/node-which.git" + "url": "git+https://github.com/npm/node-which.git" }, "main": "lib/index.js", "bin": { @@ -16,18 +16,19 @@ "isexe": "^3.1.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.18.0", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.3.0" }, "scripts": { "test": "tap", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", "template-oss-apply": "template-oss-apply --force", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "posttest": "npm run lint" + "posttest": "npm run lint", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "files": [ "bin/", @@ -41,17 +42,11 @@ ] }, "engines": { - "node": "^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", - "ciVersions": [ - "16.13.0", - "16.x", - "18.0.0", - "18.x" - ], - "version": "4.18.0", + "version": "4.23.3", "publish": "true" } } diff --git a/deps/npm/node_modules/write-file-atomic/package.json b/deps/npm/node_modules/write-file-atomic/package.json index 54d58d7eeb984d..1e88b5b889ec95 100644 --- a/deps/npm/node_modules/write-file-atomic/package.json +++ b/deps/npm/node_modules/write-file-atomic/package.json @@ -1,20 +1,21 @@ { "name": "write-file-atomic", - "version": "5.0.1", + "version": "6.0.0", "description": "Write files in an atomic fashion w/configurable ownership", "main": "./lib/index.js", "scripts": { "test": "tap", "posttest": "npm run lint", - "lint": "eslint \"**/*.js\"", + "lint": "npm run eslint", "postlint": "template-oss-check", - "lintfix": "npm run lint -- --fix", + "lintfix": "npm run eslint -- --fix", "snap": "tap", - "template-oss-apply": "template-oss-apply --force" + "template-oss-apply": "template-oss-apply --force", + "eslint": "eslint \"**/*.{js,cjs,ts,mjs,jsx,tsx}\"" }, "repository": { "type": "git", - "url": "https://github.com/npm/write-file-atomic.git" + "url": "git+https://github.com/npm/write-file-atomic.git" }, "keywords": [ "writeFile", @@ -31,8 +32,8 @@ "signal-exit": "^4.0.1" }, "devDependencies": { - "@npmcli/eslint-config": "^4.0.0", - "@npmcli/template-oss": "4.14.1", + "@npmcli/eslint-config": "^5.0.0", + "@npmcli/template-oss": "4.23.3", "tap": "^16.0.1" }, "files": [ @@ -40,12 +41,12 @@ "lib/" ], "engines": { - "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + "node": "^18.17.0 || >=20.5.0" }, "templateOSS": { "//@npmcli/template-oss": "This file is partially managed by @npmcli/template-oss. Edits may be overwritten.", "windowsCI": false, - "version": "4.14.1", + "version": "4.23.3", "publish": "true" }, "tap": { diff --git a/deps/npm/package.json b/deps/npm/package.json index e7784119332087..c92578506b30ac 100644 --- a/deps/npm/package.json +++ b/deps/npm/package.json @@ -1,5 +1,5 @@ { - "version": "10.8.3", + "version": "10.9.0", "name": "npm", "description": "a package manager for JavaScript", "workspaces": [ @@ -52,18 +52,18 @@ }, "dependencies": { "@isaacs/string-locale-compare": "^1.1.0", - "@npmcli/arborist": "^7.5.4", - "@npmcli/config": "^8.3.4", - "@npmcli/fs": "^3.1.1", - "@npmcli/map-workspaces": "^3.0.6", - "@npmcli/package-json": "^5.2.0", - "@npmcli/promise-spawn": "^7.0.2", - "@npmcli/redact": "^2.0.1", - "@npmcli/run-script": "^8.1.0", + "@npmcli/arborist": "^8.0.0", + "@npmcli/config": "^9.0.0", + "@npmcli/fs": "^4.0.0", + "@npmcli/map-workspaces": "^4.0.1", + "@npmcli/package-json": "^6.0.1", + "@npmcli/promise-spawn": "^8.0.1", + "@npmcli/redact": "^3.0.0", + "@npmcli/run-script": "^9.0.1", "@sigstore/tuf": "^2.3.4", - "abbrev": "^2.0.0", + "abbrev": "^3.0.0", "archy": "~1.0.0", - "cacache": "^18.0.4", + "cacache": "^19.0.1", "chalk": "^5.3.0", "ci-info": "^4.0.0", "cli-columns": "^4.0.0", @@ -71,54 +71,54 @@ "fs-minipass": "^3.0.3", "glob": "^10.4.5", "graceful-fs": "^4.2.11", - "hosted-git-info": "^7.0.2", - "ini": "^4.1.3", - "init-package-json": "^6.0.3", + "hosted-git-info": "^8.0.0", + "ini": "^5.0.0", + "init-package-json": "^7.0.1", "is-cidr": "^5.1.0", - "json-parse-even-better-errors": "^3.0.2", - "libnpmaccess": "^8.0.6", - "libnpmdiff": "^6.1.4", - "libnpmexec": "^8.1.4", - "libnpmfund": "^5.0.12", - "libnpmhook": "^10.0.5", - "libnpmorg": "^6.0.6", - "libnpmpack": "^7.0.4", - "libnpmpublish": "^9.0.9", - "libnpmsearch": "^7.0.6", - "libnpmteam": "^6.0.5", - "libnpmversion": "^6.0.3", - "make-fetch-happen": "^13.0.1", + "json-parse-even-better-errors": "^4.0.0", + "libnpmaccess": "^9.0.0", + "libnpmdiff": "^7.0.0", + "libnpmexec": "^9.0.0", + "libnpmfund": "^6.0.0", + "libnpmhook": "^11.0.0", + "libnpmorg": "^7.0.0", + "libnpmpack": "^8.0.0", + "libnpmpublish": "^10.0.0", + "libnpmsearch": "^8.0.0", + "libnpmteam": "^7.0.0", + "libnpmversion": "^7.0.0", + "make-fetch-happen": "^14.0.1", "minimatch": "^9.0.5", "minipass": "^7.1.1", "minipass-pipeline": "^1.2.4", "ms": "^2.1.2", "node-gyp": "^10.2.0", - "nopt": "^7.2.1", - "normalize-package-data": "^6.0.2", - "npm-audit-report": "^5.0.0", - "npm-install-checks": "^6.3.0", - "npm-package-arg": "^11.0.3", - "npm-pick-manifest": "^9.1.0", - "npm-profile": "^10.0.0", - "npm-registry-fetch": "^17.1.0", - "npm-user-validate": "^2.0.1", + "nopt": "^8.0.0", + "normalize-package-data": "^7.0.0", + "npm-audit-report": "^6.0.0", + "npm-install-checks": "^7.1.0", + "npm-package-arg": "^12.0.0", + "npm-pick-manifest": "^10.0.0", + "npm-profile": "^11.0.1", + "npm-registry-fetch": "^18.0.1", + "npm-user-validate": "^3.0.0", "p-map": "^4.0.0", - "pacote": "^18.0.6", - "parse-conflict-json": "^3.0.1", - "proc-log": "^4.2.0", + "pacote": "^19.0.0", + "parse-conflict-json": "^4.0.0", + "proc-log": "^5.0.0", "qrcode-terminal": "^0.12.0", - "read": "^3.0.1", + "read": "^4.0.0", "semver": "^7.6.3", "spdx-expression-parse": "^4.0.0", - "ssri": "^10.0.6", + "ssri": "^12.0.0", "supports-color": "^9.4.0", "tar": "^6.2.1", "text-table": "~0.2.0", "tiny-relative-date": "^1.3.0", "treeverse": "^3.0.0", - "validate-npm-package-name": "^5.0.1", - "which": "^4.0.0", - "write-file-atomic": "^5.0.1" + "validate-npm-package-name": "^6.0.0", + "which": "^5.0.0", + "write-file-atomic": "^6.0.0" }, "bundleDependencies": [ "@isaacs/string-locale-compare", @@ -192,8 +192,8 @@ ], "devDependencies": { "@npmcli/docs": "^1.0.0", - "@npmcli/eslint-config": "^4.0.2", - "@npmcli/git": "^5.0.8", + "@npmcli/eslint-config": "^5.0.1", + "@npmcli/git": "^6.0.1", "@npmcli/mock-globals": "^1.0.0", "@npmcli/mock-registry": "^1.0.0", "@npmcli/template-oss": "4.23.3", @@ -204,7 +204,7 @@ "cli-table3": "^0.6.4", "diff": "^5.2.0", "nock": "^13.4.0", - "npm-packlist": "^8.0.2", + "npm-packlist": "^9.0.0", "remark": "^14.0.2", "remark-gfm": "^3.0.1", "remark-github": "^11.2.4", diff --git a/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs index 0f5b9520516f23..134e2290b5e999 100644 --- a/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs +++ b/deps/npm/tap-snapshots/test/lib/commands/doctor.js.test.cjs @@ -1166,7 +1166,7 @@ Object { exports[`test/lib/commands/doctor.js TAP ping 404 > ping 404 1`] = ` Connecting to the registry Not ok -404 404 Not Found - GET https://registry.npmjs.org/-/ping?write=true +404 404 Not Found - GET https://registry.npmjs.org/-/ping Checking npm version Ok current: v1.0.0, latest: v1.0.0 @@ -1226,7 +1226,7 @@ Object { exports[`test/lib/commands/doctor.js TAP ping 404 in color > ping 404 in color 1`] = ` Connecting to the registry Not ok -404 404 Not Found - GET https://registry.npmjs.org/-/ping?write=true +404 404 Not Found - GET https://registry.npmjs.org/-/ping Checking npm version Ok current: v1.0.0, latest: v1.0.0 @@ -1286,7 +1286,7 @@ Object { exports[`test/lib/commands/doctor.js TAP ping exception with code > ping failure 1`] = ` Connecting to the registry Not ok -request to https://registry.npmjs.org/-/ping?write=true failed, reason: Test Error +request to https://registry.npmjs.org/-/ping failed, reason: Test Error Checking npm version Ok current: v1.0.0, latest: v1.0.0 @@ -1346,7 +1346,7 @@ Object { exports[`test/lib/commands/doctor.js TAP ping exception without code > ping failure 1`] = ` Connecting to the registry Not ok -request to https://registry.npmjs.org/-/ping?write=true failed, reason: Test Error +request to https://registry.npmjs.org/-/ping failed, reason: Test Error Checking npm version Ok current: v1.0.0, latest: v1.0.0 diff --git a/deps/npm/tap-snapshots/test/lib/commands/install.js.test.cjs b/deps/npm/tap-snapshots/test/lib/commands/install.js.test.cjs new file mode 100644 index 00000000000000..8f426ec3103aee --- /dev/null +++ b/deps/npm/tap-snapshots/test/lib/commands/install.js.test.cjs @@ -0,0 +1,309 @@ +/* IMPORTANT + * This snapshot file is auto-generated, but designed for humans. + * It should be checked into source control and tracked carefully. + * Re-generate by setting TAP_SNAPSHOT=1 and running tests. + * Make sure to inspect the output below. Do not ignore changes! + */ +'use strict' +exports[`test/lib/commands/install.js TAP devEngines should not utilize engines in root if devEngines is provided > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +warn EBADDEVENGINES The developer of this package has specified the following through devEngines +warn EBADDEVENGINES Invalid engine "runtime" +warn EBADDEVENGINES Invalid semver version "0.0.1" does not match "v1337.0.0" for "runtime" +warn EBADDEVENGINES { +warn EBADDEVENGINES current: { name: 'node', version: 'v1337.0.0' }, +warn EBADDEVENGINES required: { name: 'node', version: '0.0.1', onFail: 'warn' } +warn EBADDEVENGINES } +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly reify moves {} +silly audit report null + +up to date, audited 1 package in {TIME} +found 0 vulnerabilities +` + +exports[`test/lib/commands/install.js TAP devEngines should show devEngines doesnt break engines > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" "--global" "true" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly placeDep ROOT alpha@ OK for: want: file:../../prefix/alpha +warn EBADENGINE Unsupported engine { +warn EBADENGINE package: undefined, +warn EBADENGINE required: { node: '1.0.0' }, +warn EBADENGINE current: { node: 'v1337.0.0', npm: '42.0.0' } +warn EBADENGINE } +warn EBADENGINE Unsupported engine { +warn EBADENGINE package: undefined, +warn EBADENGINE required: { node: '1.0.0' }, +warn EBADENGINE current: { node: 'v1337.0.0', npm: '42.0.0' } +warn EBADENGINE } +silly reify moves {} +silly ADD node_modules/alpha + +added 1 package in {TIME} +` + +exports[`test/lib/commands/install.js TAP devEngines should show devEngines has no effect on dev package install > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" "--save-dev" "true" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly placeDep ROOT alpha@ OK for: want: file:alpha +silly reify moves {} +silly audit bulk request {} +silly audit report null +silly ADD node_modules/alpha + +added 1 package, and audited 3 packages in {TIME} +found 0 vulnerabilities +` + +exports[`test/lib/commands/install.js TAP devEngines should show devEngines has no effect on global package install > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" "--global" "true" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly placeDep ROOT alpha@ OK for: want: file:../../prefix +silly reify moves {} +silly ADD node_modules/alpha + +added 1 package in {TIME} +` + +exports[`test/lib/commands/install.js TAP devEngines should show devEngines has no effect on package install > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly placeDep ROOT alpha@ OK for: want: file:alpha +silly reify moves {} +silly audit bulk request {} +silly audit report null +silly ADD node_modules/alpha + +added 1 package, and audited 3 packages in {TIME} +found 0 vulnerabilities +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize devEngines 2x error case > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +verbose stack Error: The developer of this package has specified the following through devEngines +verbose stack Invalid engine "runtime" +verbose stack Invalid name "nondescript" does not match "node" for "runtime" +verbose stack at Install.checkDevEngines ({CWD}/lib/base-cmd.js:182:27) +verbose stack at MockNpm.#exec ({CWD}/lib/npm.js:251:7) +verbose stack at MockNpm.exec ({CWD}/lib/npm.js:207:9) +error code EBADDEVENGINES +error EBADDEVENGINES The developer of this package has specified the following through devEngines +error EBADDEVENGINES Invalid engine "runtime" +error EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime" +error EBADDEVENGINES { +error EBADDEVENGINES current: { name: 'node', version: 'v1337.0.0' }, +error EBADDEVENGINES required: { name: 'nondescript', onFail: 'error' } +error EBADDEVENGINES } +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize devEngines 2x warning case > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +warn EBADDEVENGINES The developer of this package has specified the following through devEngines +warn EBADDEVENGINES Invalid engine "runtime" +warn EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime" +warn EBADDEVENGINES { +warn EBADDEVENGINES current: { name: 'node', version: 'v1337.0.0' }, +warn EBADDEVENGINES required: { name: 'nondescript', onFail: 'warn' } +warn EBADDEVENGINES } +warn EBADDEVENGINES Invalid engine "cpu" +warn EBADDEVENGINES Invalid name "risv" does not match "x86" for "cpu" +warn EBADDEVENGINES { +warn EBADDEVENGINES current: { name: 'x86' }, +warn EBADDEVENGINES required: { name: 'risv', onFail: 'warn' } +warn EBADDEVENGINES } +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly reify moves {} +silly audit report null + +up to date, audited 1 package in {TIME} +found 0 vulnerabilities +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize devEngines failure and warning case > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +warn EBADDEVENGINES The developer of this package has specified the following through devEngines +warn EBADDEVENGINES Invalid engine "cpu" +warn EBADDEVENGINES Invalid name "risv" does not match "x86" for "cpu" +warn EBADDEVENGINES { +warn EBADDEVENGINES current: { name: 'x86' }, +warn EBADDEVENGINES required: { name: 'risv', onFail: 'warn' } +warn EBADDEVENGINES } +verbose stack Error: The developer of this package has specified the following through devEngines +verbose stack Invalid engine "runtime" +verbose stack Invalid name "nondescript" does not match "node" for "runtime" +verbose stack at Install.checkDevEngines ({CWD}/lib/base-cmd.js:182:27) +verbose stack at MockNpm.#exec ({CWD}/lib/npm.js:251:7) +verbose stack at MockNpm.exec ({CWD}/lib/npm.js:207:9) +error code EBADDEVENGINES +error EBADDEVENGINES The developer of this package has specified the following through devEngines +error EBADDEVENGINES Invalid engine "runtime" +error EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime" +error EBADDEVENGINES { +error EBADDEVENGINES current: { name: 'node', version: 'v1337.0.0' }, +error EBADDEVENGINES required: { name: 'nondescript' } +error EBADDEVENGINES } +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize devEngines failure case > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +verbose stack Error: The developer of this package has specified the following through devEngines +verbose stack Invalid engine "runtime" +verbose stack Invalid name "nondescript" does not match "node" for "runtime" +verbose stack at Install.checkDevEngines ({CWD}/lib/base-cmd.js:182:27) +verbose stack at MockNpm.#exec ({CWD}/lib/npm.js:251:7) +verbose stack at MockNpm.exec ({CWD}/lib/npm.js:207:9) +error code EBADDEVENGINES +error EBADDEVENGINES The developer of this package has specified the following through devEngines +error EBADDEVENGINES Invalid engine "runtime" +error EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime" +error EBADDEVENGINES { +error EBADDEVENGINES current: { name: 'node', version: 'v1337.0.0' }, +error EBADDEVENGINES required: { name: 'nondescript' } +error EBADDEVENGINES } +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize devEngines failure force case > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" "--force" "true" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +warn using --force Recommended protections disabled. +silly logfile done cleaning log files +warn EBADDEVENGINES The developer of this package has specified the following through devEngines +warn EBADDEVENGINES Invalid engine "runtime" +warn EBADDEVENGINES Invalid name "nondescript" does not match "node" for "runtime" +warn EBADDEVENGINES { +warn EBADDEVENGINES current: { name: 'node', version: 'v1337.0.0' }, +warn EBADDEVENGINES required: { name: 'nondescript' } +warn EBADDEVENGINES } +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly reify moves {} +silly audit report null + +up to date, audited 1 package in {TIME} +found 0 vulnerabilities +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize devEngines success case > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +silly reify moves {} +silly audit report null + +up to date, audited 1 package in {TIME} +found 0 vulnerabilities +` + +exports[`test/lib/commands/install.js TAP devEngines should utilize engines in root if devEngines is not provided > must match snapshot 1`] = ` +silly config load:file:{CWD}/npmrc +silly config load:file:{CWD}/prefix/.npmrc +silly config load:file:{CWD}/home/.npmrc +silly config load:file:{CWD}/global/etc/npmrc +verbose title npm +verbose argv "--fetch-retries" "0" "--cache" "{CWD}/cache" "--loglevel" "silly" "--color" "false" +verbose logfile logs-max:10 dir:{CWD}/cache/_logs/{DATE}- +verbose logfile {CWD}/cache/_logs/{DATE}-debug-0.log +silly logfile done cleaning log files +silly packumentCache heap:{heap} maxSize:{maxSize} maxEntrySize:{maxEntrySize} +silly idealTree buildDeps +warn EBADENGINE Unsupported engine { +warn EBADENGINE package: undefined, +warn EBADENGINE required: { node: '0.0.1' }, +warn EBADENGINE current: { node: 'v1337.0.0', npm: '42.0.0' } +warn EBADENGINE } +silly reify moves {} +silly audit report null + +up to date, audited 1 package in {TIME} +found 0 vulnerabilities +` diff --git a/deps/npm/test/fixtures/clean-snapshot.js b/deps/npm/test/fixtures/clean-snapshot.js index bcbf699cb81fc9..3439400b576aeb 100644 --- a/deps/npm/test/fixtures/clean-snapshot.js +++ b/deps/npm/test/fixtures/clean-snapshot.js @@ -42,12 +42,18 @@ const cleanZlib = str => str .replace(/"integrity": ".*",/g, '"integrity": "{integrity}",') .replace(/"size": [0-9]*,/g, '"size": "{size}",') +const cleanPackumentCache = str => str + .replace(/heap:[0-9]*/g, 'heap:{heap}') + .replace(/maxSize:[0-9]*/g, 'maxSize:{maxSize}') + .replace(/maxEntrySize:[0-9]*/g, 'maxEntrySize:{maxEntrySize}') + module.exports = { cleanCwd, cleanDate, cleanNewlines, cleanTime, cleanZlib, + cleanPackumentCache, normalizePath, pathRegex, } diff --git a/deps/npm/test/lib/commands/doctor.js b/deps/npm/test/lib/commands/doctor.js index 0c58a09e20c577..5d912d8b82f769 100644 --- a/deps/npm/test/lib/commands/doctor.js +++ b/deps/npm/test/lib/commands/doctor.js @@ -89,7 +89,7 @@ t.test('all clear', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -108,7 +108,7 @@ t.test('all clear in color', async t => { }, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -127,7 +127,7 @@ t.test('silent success', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -146,7 +146,7 @@ t.test('silent errors', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(404, '{}') + .get('/-/ping').reply(404, '{}') await t.rejects(npm.exec('doctor', ['ping']), { message: /Check logs/, }) @@ -161,7 +161,7 @@ t.test('ping 404', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(404, '{}') + .get('/-/ping').reply(404, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -182,7 +182,7 @@ t.test('ping 404 in color', async t => { }, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(404, '{}') + .get('/-/ping').reply(404, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -198,7 +198,7 @@ t.test('ping exception with code', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').replyWithError({ message: 'Test Error', code: 'TEST' }) + .get('/-/ping').replyWithError({ message: 'Test Error', code: 'TEST' }) .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -214,7 +214,7 @@ t.test('ping exception without code', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').replyWithError({ message: 'Test Error', code: false }) + .get('/-/ping').replyWithError({ message: 'Test Error', code: false }) .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -230,7 +230,7 @@ t.test('npm out of date', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest('2.0.0')) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -255,7 +255,7 @@ t.test('node out of date - lts', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -280,7 +280,7 @@ t.test('node out of date - current', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -297,7 +297,7 @@ t.test('non-default registry', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -318,7 +318,7 @@ t.test('missing git', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -344,7 +344,7 @@ t.test('windows skips permissions checks', async t => { globalPrefixDir: {}, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -361,7 +361,7 @@ t.test('missing global directories', async t => { globalPrefixDir: {}, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -377,7 +377,7 @@ t.test('missing local node_modules', async t => { globalPrefixDir: dirs.globalPrefixDir, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -406,7 +406,7 @@ t.test('incorrect owner', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -430,7 +430,7 @@ t.test('incorrect permissions', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -458,7 +458,7 @@ t.test('error reading directory', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -481,7 +481,7 @@ t.test('cacache badContent', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -504,7 +504,7 @@ t.test('cacache reclaimedCount', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -527,7 +527,7 @@ t.test('cacache missingContent', async t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') .get('/npm').reply(200, npmManifest(npm.version)) tnock(t, 'https://nodejs.org') .get('/dist/index.json').reply(200, nodeVersions) @@ -558,7 +558,7 @@ t.test('discrete checks', t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') await npm.exec('doctor', ['ping']) t.matchSnapshot(joinedOutput(), 'output') t.matchSnapshot({ info: logs.info, warn: logs.warn, error: logs.error }, 'logs') @@ -586,7 +586,7 @@ t.test('discrete checks', t => { ...dirs, }) tnock(t, npm.config.get('registry')) - .get('/-/ping?write=true').reply(200, '{}') + .get('/-/ping').reply(200, '{}') await npm.exec('doctor', ['registry']) t.matchSnapshot(joinedOutput(), 'output') t.matchSnapshot({ info: logs.info, warn: logs.warn, error: logs.error }, 'logs') diff --git a/deps/npm/test/lib/commands/install.js b/deps/npm/test/lib/commands/install.js index 0273f3deec73e9..a4d9c06129ec06 100644 --- a/deps/npm/test/lib/commands/install.js +++ b/deps/npm/test/lib/commands/install.js @@ -1,8 +1,16 @@ const tspawk = require('../../fixtures/tspawk') +const { + cleanCwd, + cleanTime, + cleanDate, + cleanPackumentCache, +} = require('../../fixtures/clean-snapshot.js') const path = require('node:path') const t = require('tap') +t.cleanSnapshot = (str) => cleanPackumentCache(cleanDate(cleanTime(cleanCwd(str)))) + const { loadNpmWithRegistry: loadMockNpm, workspaceMock, @@ -400,3 +408,312 @@ t.test('should show install keeps dirty --workspace flag', async t => { assert.packageDirty('node_modules/abbrev@1.1.0') assert.packageInstalled('node_modules/lodash@1.1.1') }) + +t.test('devEngines', async t => { + const mockArguments = { + globals: { + 'process.platform': 'linux', + 'process.arch': 'x86', + 'process.version': 'v1337.0.0', + }, + mocks: { + '{ROOT}/package.json': { version: '42.0.0' }, + }, + } + + t.test('should utilize devEngines success case', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-package', + version: '1.0.0', + devEngines: { + runtime: { + name: 'node', + }, + }, + }), + }, + }) + await npm.exec('install', []) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(!output.includes('EBADDEVENGINES')) + }) + + t.test('should utilize devEngines failure case', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-package', + version: '1.0.0', + devEngines: { + runtime: { + name: 'nondescript', + }, + }, + }), + }, + }) + await t.rejects( + npm.exec('install', []) + ) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('error EBADDEVENGINES')) + }) + + t.test('should utilize devEngines failure force case', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + config: { + force: true, + }, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-package', + version: '1.0.0', + devEngines: { + runtime: { + name: 'nondescript', + }, + }, + }), + }, + }) + await npm.exec('install', []) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('warn EBADDEVENGINES')) + }) + + t.test('should utilize devEngines 2x warning case', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-package', + version: '1.0.0', + devEngines: { + runtime: { + name: 'nondescript', + onFail: 'warn', + }, + cpu: { + name: 'risv', + onFail: 'warn', + }, + }, + }), + }, + }) + await npm.exec('install', []) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('warn EBADDEVENGINES')) + }) + + t.test('should utilize devEngines 2x error case', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-package', + version: '1.0.0', + devEngines: { + runtime: { + name: 'nondescript', + onFail: 'error', + }, + cpu: { + name: 'risv', + onFail: 'error', + }, + }, + }), + }, + }) + await t.rejects( + npm.exec('install', []) + ) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('error EBADDEVENGINES')) + }) + + t.test('should utilize devEngines failure and warning case', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'test-package', + version: '1.0.0', + devEngines: { + runtime: { + name: 'nondescript', + }, + cpu: { + name: 'risv', + onFail: 'warn', + }, + }, + }), + }, + }) + await t.rejects( + npm.exec('install', []) + ) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('EBADDEVENGINES')) + }) + + t.test('should show devEngines has no effect on package install', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + alpha: { + 'package.json': JSON.stringify({ + name: 'alpha', + devEngines: { runtime: { name: 'node', version: '1.0.0' } }, + }), + 'index.js': 'console.log("this is alpha index")', + }, + 'package.json': JSON.stringify({ + name: 'project', + }), + }, + }) + await npm.exec('install', ['./alpha']) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(!output.includes('EBADDEVENGINES')) + }) + + t.test('should show devEngines has no effect on dev package install', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + alpha: { + 'package.json': JSON.stringify({ + name: 'alpha', + devEngines: { runtime: { name: 'node', version: '1.0.0' } }, + }), + 'index.js': 'console.log("this is alpha index")', + }, + 'package.json': JSON.stringify({ + name: 'project', + }), + }, + config: { + 'save-dev': true, + }, + }) + await npm.exec('install', ['./alpha']) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(!output.includes('EBADDEVENGINES')) + }) + + t.test('should show devEngines doesnt break engines', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + alpha: { + 'package.json': JSON.stringify({ + name: 'alpha', + devEngines: { runtime: { name: 'node', version: '1.0.0' } }, + engines: { node: '1.0.0' }, + }), + 'index.js': 'console.log("this is alpha index")', + }, + 'package.json': JSON.stringify({ + name: 'project', + }), + }, + config: { global: true }, + }) + await npm.exec('install', ['./alpha']) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('warn EBADENGINE')) + }) + + t.test('should not utilize engines in root if devEngines is provided', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'alpha', + engines: { + node: '0.0.1', + }, + devEngines: { + runtime: { + name: 'node', + version: '0.0.1', + onFail: 'warn', + }, + }, + }), + 'index.js': 'console.log("this is alpha index")', + }, + }) + await npm.exec('install') + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(!output.includes('EBADENGINE')) + t.ok(output.includes('warn EBADDEVENGINES')) + }) + + t.test('should utilize engines in root if devEngines is not provided', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'alpha', + engines: { + node: '0.0.1', + }, + }), + 'index.js': 'console.log("this is alpha index")', + }, + }) + await npm.exec('install') + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(output.includes('EBADENGINE')) + t.ok(!output.includes('EBADDEVENGINES')) + }) + + t.test('should show devEngines has no effect on global package install', async t => { + const { npm, joinedFullOutput } = await loadMockNpm(t, { + ...mockArguments, + prefixDir: { + 'package.json': JSON.stringify({ + name: 'alpha', + bin: { + alpha: 'index.js', + }, + devEngines: { + runtime: { + name: 'node', + version: '0.0.1', + }, + }, + }), + 'index.js': 'console.log("this is alpha index")', + }, + config: { + global: true, + }, + }) + await npm.exec('install', ['.']) + const output = joinedFullOutput() + t.matchSnapshot(output) + t.ok(!output.includes('EBADENGINE')) + t.ok(!output.includes('EBADDEVENGINES')) + }) +}) diff --git a/deps/npm/test/lib/commands/ping.js b/deps/npm/test/lib/commands/ping.js index 7f90ea394f9aeb..aca1e730131df6 100644 --- a/deps/npm/test/lib/commands/ping.js +++ b/deps/npm/test/lib/commands/ping.js @@ -1,6 +1,8 @@ const t = require('tap') const { load: loadMockNpm } = require('../../fixtures/mock-npm.js') const MockRegistry = require('@npmcli/mock-registry') +const cacache = require('cacache') +const path = require('node:path') t.test('no details', async t => { const { npm, logs, joinedOutput } = await loadMockNpm(t) @@ -74,3 +76,19 @@ t.test('invalid json', async t => { details: {}, }) }) +t.test('fail when registry is unreachable even if request is cached', async t => { + const { npm } = await loadMockNpm(t, { + config: { registry: 'https://ur.npmlocal.npmtest/' }, + cacheDir: { _cacache: {} }, + }) + const url = `${npm.config.get('registry')}-/ping` + const cache = path.join(npm.cache, '_cacache') + await cacache.put(cache, + `make-fetch-happen:request-cache:${url}`, + '{}', { metadata: { url } } + ) + t.rejects(npm.exec('ping', []), { + code: 'ENOTFOUND', + }, + 'throws ENOTFOUND error') +}) diff --git a/deps/npm/test/lib/npm.js b/deps/npm/test/lib/npm.js index 00ef3f79b04c17..739aa28eb0343f 100644 --- a/deps/npm/test/lib/npm.js +++ b/deps/npm/test/lib/npm.js @@ -149,8 +149,8 @@ t.test('npm.load', async t => { 'does not change npm.command when another command is called') t.match(logs, [ + /timing config:load:flatten Completed in [0-9.]+ms/, /timing command:config Completed in [0-9.]+ms/, - /timing command:get Completed in [0-9.]+ms/, ]) t.same(outputs, ['scope=@foo\nusage=false']) }) diff --git a/deps/openssl/config/archs/BSD-x86/asm/configdata.pm b/deps/openssl/config/archs/BSD-x86/asm/configdata.pm index 8382e5a57de628..b4ff88112bd18d 100644 --- a/deps/openssl/config/archs/BSD-x86/asm/configdata.pm +++ b/deps/openssl/config/archs/BSD-x86/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -203,7 +203,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -255,11 +255,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "BSD-x86", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -324,6 +324,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1239,6 +1240,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7734,6 +7738,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18772,6 +18780,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20334,6 +20346,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26686,6 +26699,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/BSD-x86/asm/crypto/buildinf.h b/deps/openssl/config/archs/BSD-x86/asm/crypto/buildinf.h index 5ae248b0fcbf8e..af438f4f0fa031 100644 --- a/deps/openssl/config/archs/BSD-x86/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/BSD-x86/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: BSD-x86" -#define DATE "built on: Wed Jan 31 12:57:29 2024 UTC" +#define DATE "built on: Mon Sep 30 17:05:30 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/BSD-x86/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/BSD-x86/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/BSD-x86/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/BSD-x86/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/BSD-x86/asm_avx2/configdata.pm b/deps/openssl/config/archs/BSD-x86/asm_avx2/configdata.pm index ef047655a33035..058d8def744cbf 100644 --- a/deps/openssl/config/archs/BSD-x86/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/BSD-x86/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -203,7 +203,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -255,11 +255,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "BSD-x86", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -324,6 +324,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1239,6 +1240,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7734,6 +7738,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18772,6 +18780,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20334,6 +20346,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26686,6 +26699,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/BSD-x86/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/BSD-x86/asm_avx2/crypto/buildinf.h index 5beff8d651b48a..fce54a74f651f5 100644 --- a/deps/openssl/config/archs/BSD-x86/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/BSD-x86/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: BSD-x86" -#define DATE "built on: Wed Jan 31 12:57:42 2024 UTC" +#define DATE "built on: Mon Sep 30 17:05:44 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/BSD-x86/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/BSD-x86/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/BSD-x86/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/BSD-x86/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/BSD-x86/no-asm/configdata.pm b/deps/openssl/config/archs/BSD-x86/no-asm/configdata.pm index 449b67e7317785..6b5d662fd77178 100644 --- a/deps/openssl/config/archs/BSD-x86/no-asm/configdata.pm +++ b/deps/openssl/config/archs/BSD-x86/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -202,7 +202,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -255,11 +255,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "BSD-x86", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -324,6 +324,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1240,6 +1241,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7676,6 +7680,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18692,6 +18700,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20254,6 +20266,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26518,6 +26531,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/BSD-x86/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/BSD-x86/no-asm/crypto/buildinf.h index 628312a9c73101..14d772edccc76f 100644 --- a/deps/openssl/config/archs/BSD-x86/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/BSD-x86/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: BSD-x86" -#define DATE "built on: Wed Jan 31 12:57:55 2024 UTC" +#define DATE "built on: Mon Sep 30 17:05:57 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/BSD-x86/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/BSD-x86/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/BSD-x86/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/BSD-x86/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/configdata.pm b/deps/openssl/config/archs/BSD-x86_64/asm/configdata.pm index c98dd836086a9a..f7cc1b97611e63 100644 --- a/deps/openssl/config/archs/BSD-x86_64/asm/configdata.pm +++ b/deps/openssl/config/archs/BSD-x86_64/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -203,7 +203,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -255,11 +255,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "BSD-x86_64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1240,6 +1241,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7740,6 +7744,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18814,6 +18822,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20376,6 +20388,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26832,6 +26845,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/buildinf.h b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/buildinf.h index ae35447b7d8921..9d75e25aa5f6cd 100644 --- a/deps/openssl/config/archs/BSD-x86_64/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/BSD-x86_64/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: BSD-x86_64" -#define DATE "built on: Wed Jan 31 12:58:07 2024 UTC" +#define DATE "built on: Mon Sep 30 17:06:08 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/BSD-x86_64/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/BSD-x86_64/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/BSD-x86_64/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/BSD-x86_64/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/BSD-x86_64/asm_avx2/configdata.pm b/deps/openssl/config/archs/BSD-x86_64/asm_avx2/configdata.pm index 62e705ce7d8fbf..a9ff7c2027b32a 100644 --- a/deps/openssl/config/archs/BSD-x86_64/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/BSD-x86_64/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -203,7 +203,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -255,11 +255,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "BSD-x86_64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1240,6 +1241,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7740,6 +7744,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18814,6 +18822,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20376,6 +20388,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26832,6 +26845,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/BSD-x86_64/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/BSD-x86_64/asm_avx2/crypto/buildinf.h index bb3c014867e772..f1b735d8043953 100644 --- a/deps/openssl/config/archs/BSD-x86_64/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/BSD-x86_64/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: BSD-x86_64" -#define DATE "built on: Wed Jan 31 12:58:22 2024 UTC" +#define DATE "built on: Mon Sep 30 17:06:24 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/BSD-x86_64/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/BSD-x86_64/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/BSD-x86_64/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/BSD-x86_64/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/BSD-x86_64/no-asm/configdata.pm b/deps/openssl/config/archs/BSD-x86_64/no-asm/configdata.pm index 419db86f724155..1ab302ff9a2dd9 100644 --- a/deps/openssl/config/archs/BSD-x86_64/no-asm/configdata.pm +++ b/deps/openssl/config/archs/BSD-x86_64/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -202,7 +202,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -255,11 +255,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "BSD-x86_64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1241,6 +1242,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7677,6 +7681,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18693,6 +18701,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20255,6 +20267,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26519,6 +26532,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/BSD-x86_64/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/BSD-x86_64/no-asm/crypto/buildinf.h index 201f0f7d4ec27d..c2bae3d778be1d 100644 --- a/deps/openssl/config/archs/BSD-x86_64/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/BSD-x86_64/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: BSD-x86_64" -#define DATE "built on: Wed Jan 31 12:58:38 2024 UTC" +#define DATE "built on: Mon Sep 30 17:06:40 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/BSD-x86_64/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/BSD-x86_64/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/BSD-x86_64/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/BSD-x86_64/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN32/asm/configdata.pm b/deps/openssl/config/archs/VC-WIN32/asm/configdata.pm index f724da47527e6a..3c9cf488d332fc 100644 --- a/deps/openssl/config/archs/VC-WIN32/asm/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN32/asm/configdata.pm @@ -165,7 +165,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -216,7 +216,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -268,11 +268,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "VC-WIN32", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "lib", @@ -287,7 +287,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x5644d3bdea88)", + "RANLIB" => "CODE(0x55c413c34420)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", @@ -374,6 +374,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1290,6 +1291,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7788,6 +7792,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18852,6 +18860,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20414,6 +20426,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26780,6 +26793,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/VC-WIN32/asm/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN32/asm/crypto/buildinf.h index bdcc1602d19976..ddbc0c9135bcdf 100644 --- a/deps/openssl/config/archs/VC-WIN32/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN32/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: " -#define DATE "built on: Wed Jan 31 13:07:51 2024 UTC" +#define DATE "built on: Mon Sep 30 17:15:56 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN32/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN32/asm/include/openssl/opensslv.h index 61260779ae4363..50c2dce1470fde 100644 --- a/deps/openssl/config/archs/VC-WIN32/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN32/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN32/asm_avx2/configdata.pm b/deps/openssl/config/archs/VC-WIN32/asm_avx2/configdata.pm index 9585418f7e5ff2..a9d9e2fe5751aa 100644 --- a/deps/openssl/config/archs/VC-WIN32/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN32/asm_avx2/configdata.pm @@ -165,7 +165,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -216,7 +216,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -268,11 +268,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "VC-WIN32", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "lib", @@ -287,7 +287,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x564719267e28)", + "RANLIB" => "CODE(0x55fa4e900730)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", @@ -374,6 +374,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1290,6 +1291,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7788,6 +7792,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18852,6 +18860,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20414,6 +20426,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26780,6 +26793,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/VC-WIN32/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN32/asm_avx2/crypto/buildinf.h index 71f6defc14b660..25257dbe4cdc20 100644 --- a/deps/openssl/config/archs/VC-WIN32/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN32/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: " -#define DATE "built on: Wed Jan 31 13:08:03 2024 UTC" +#define DATE "built on: Mon Sep 30 17:16:08 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN32/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN32/asm_avx2/include/openssl/opensslv.h index 61260779ae4363..50c2dce1470fde 100644 --- a/deps/openssl/config/archs/VC-WIN32/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN32/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN32/no-asm/configdata.pm b/deps/openssl/config/archs/VC-WIN32/no-asm/configdata.pm index e5fcaa06b5bfbb..242b585006bf5d 100644 --- a/deps/openssl/config/archs/VC-WIN32/no-asm/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN32/no-asm/configdata.pm @@ -163,7 +163,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -215,7 +215,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -268,11 +268,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "VC-WIN32", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "lib", @@ -287,7 +287,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x5571e27ca240)", + "RANLIB" => "CODE(0x556017cde7e0)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", @@ -374,6 +374,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1291,6 +1292,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7730,6 +7734,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18772,6 +18780,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20334,6 +20346,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26612,6 +26625,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/VC-WIN32/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN32/no-asm/crypto/buildinf.h index 6a3750ca20c513..f3494c62b7ffb6 100644 --- a/deps/openssl/config/archs/VC-WIN32/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN32/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: " -#define DATE "built on: Wed Jan 31 13:08:15 2024 UTC" +#define DATE "built on: Mon Sep 30 17:16:20 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN32/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN32/no-asm/include/openssl/opensslv.h index 61260779ae4363..50c2dce1470fde 100644 --- a/deps/openssl/config/archs/VC-WIN32/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN32/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/configdata.pm b/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/configdata.pm index f0d251945602cc..51e48deaccd109 100644 --- a/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/configdata.pm @@ -163,7 +163,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -213,7 +213,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -266,11 +266,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "VC-WIN64-ARM", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "lib", @@ -283,7 +283,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x55d9bc8048e0)", + "RANLIB" => "CODE(0x563ab4010d40)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", @@ -366,6 +366,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1283,6 +1284,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7722,6 +7726,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18764,6 +18772,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20326,6 +20338,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26604,6 +26617,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/crypto/buildinf.h index 9787debf662e37..9e7f66c628b399 100644 --- a/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: VC-WIN64-ARM" -#define DATE "built on: Wed Jan 31 13:08:26 2024 UTC" +#define DATE "built on: Mon Sep 30 17:16:31 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/include/openssl/opensslv.h index 61260779ae4363..50c2dce1470fde 100644 --- a/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN64-ARM/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN64A/asm/configdata.pm b/deps/openssl/config/archs/VC-WIN64A/asm/configdata.pm index 13e18c89b0aa59..a58ae5c555f86e 100644 --- a/deps/openssl/config/archs/VC-WIN64A/asm/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN64A/asm/configdata.pm @@ -168,7 +168,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -219,7 +219,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -271,11 +271,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "VC-WIN64A", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "lib", @@ -290,7 +290,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x562d3fd74038)", + "RANLIB" => "CODE(0x5567a098ee80)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", @@ -378,6 +378,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1294,6 +1295,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7797,6 +7801,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18887,6 +18895,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20449,6 +20461,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26919,6 +26932,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/VC-WIN64A/asm/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN64A/asm/crypto/buildinf.h index 98286464f80cf9..af6d41b18a92c0 100644 --- a/deps/openssl/config/archs/VC-WIN64A/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN64A/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: " -#define DATE "built on: Wed Jan 31 13:07:09 2024 UTC" +#define DATE "built on: Mon Sep 30 17:15:14 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN64A/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN64A/asm/include/openssl/opensslv.h index 61260779ae4363..50c2dce1470fde 100644 --- a/deps/openssl/config/archs/VC-WIN64A/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN64A/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN64A/asm_avx2/configdata.pm b/deps/openssl/config/archs/VC-WIN64A/asm_avx2/configdata.pm index 9841596b5aba4c..b6476912c55ecc 100644 --- a/deps/openssl/config/archs/VC-WIN64A/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN64A/asm_avx2/configdata.pm @@ -168,7 +168,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -219,7 +219,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -271,11 +271,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "VC-WIN64A", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "lib", @@ -290,7 +290,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x5579ca7a23f8)", + "RANLIB" => "CODE(0x563a6722d330)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", @@ -378,6 +378,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1294,6 +1295,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7797,6 +7801,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18887,6 +18895,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20449,6 +20461,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26919,6 +26932,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/VC-WIN64A/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN64A/asm_avx2/crypto/buildinf.h index 19f5adb778b915..8e51d88ba4b79b 100644 --- a/deps/openssl/config/archs/VC-WIN64A/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN64A/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: " -#define DATE "built on: Wed Jan 31 13:07:24 2024 UTC" +#define DATE "built on: Mon Sep 30 17:15:29 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN64A/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN64A/asm_avx2/include/openssl/opensslv.h index 61260779ae4363..50c2dce1470fde 100644 --- a/deps/openssl/config/archs/VC-WIN64A/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN64A/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/VC-WIN64A/no-asm/configdata.pm b/deps/openssl/config/archs/VC-WIN64A/no-asm/configdata.pm index 9e8a1ec6db6169..08e8c7dc590b24 100644 --- a/deps/openssl/config/archs/VC-WIN64A/no-asm/configdata.pm +++ b/deps/openssl/config/archs/VC-WIN64A/no-asm/configdata.pm @@ -166,7 +166,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -218,7 +218,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -271,11 +271,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "VC-WIN64A", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "lib", @@ -290,7 +290,7 @@ our %target = ( "LDFLAGS" => "/nologo /debug", "MT" => "mt", "MTFLAGS" => "-nologo", - "RANLIB" => "CODE(0x55ba24087cd0)", + "RANLIB" => "CODE(0x55f18ff2a220)", "RC" => "rc", "_conf_fname_int" => [ "Configurations/00-base-templates.conf", @@ -378,6 +378,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1295,6 +1296,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7734,6 +7738,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18776,6 +18784,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20338,6 +20350,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26616,6 +26629,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/VC-WIN64A/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/VC-WIN64A/no-asm/crypto/buildinf.h index 98267ccf4001f9..50896740e947c3 100644 --- a/deps/openssl/config/archs/VC-WIN64A/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/VC-WIN64A/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: " -#define DATE "built on: Wed Jan 31 13:07:39 2024 UTC" +#define DATE "built on: Mon Sep 30 17:15:44 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/VC-WIN64A/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/VC-WIN64A/no-asm/include/openssl/opensslv.h index 61260779ae4363..50c2dce1470fde 100644 --- a/deps/openssl/config/archs/VC-WIN64A/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/VC-WIN64A/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm/configdata.pm b/deps/openssl/config/archs/aix64-gcc-as/asm/configdata.pm index b857105391594a..77e00d35a9d76f 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm/configdata.pm +++ b/deps/openssl/config/archs/aix64-gcc-as/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "aix64-gcc-as", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar -X64", @@ -327,6 +327,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1243,6 +1244,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7717,6 +7721,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18770,6 +18778,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20332,6 +20344,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26744,6 +26757,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm/crypto/aes/aesp8-ppc.s b/deps/openssl/config/archs/aix64-gcc-as/asm/crypto/aes/aesp8-ppc.s index 998bc4481c4029..811f4d23d53ad6 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm/crypto/aes/aesp8-ppc.s +++ b/deps/openssl/config/archs/aix64-gcc-as/asm/crypto/aes/aesp8-ppc.s @@ -8,11 +8,12 @@ rcon: .byte 0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00 .byte 0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c .byte 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 +.long 0x0f102132, 0x43546576, 0x8798a9ba, 0xcbdcedfe Lconsts: mflr 0 bcl 20,31,$+4 mflr 6 - addi 6,6,-0x48 + addi 6,6,-0x58 mtlr 0 blr .long 0 @@ -2338,6 +2339,18 @@ _aesp8_xts_encrypt6x: li 31,0x70 or 0,0,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 @@ -2380,69 +2393,77 @@ Load_xts_enc_key: vperm 31,31,22,7 lvx 25,3,7 + + + + + + + + vperm 0,2,4,5 subi 10,10,31 vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 @@ -2468,6 +2489,8 @@ Loop_xts_enc6x: lvx 25,3,7 bc 16,0,Loop_xts_enc6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C508 @@ -2477,7 +2500,6 @@ Loop_xts_enc6x: vaddubm 8,8,8 .long 0x11ADC508 .long 0x11CEC508 - vsldoi 11,11,11,15 .long 0x11EFC508 .long 0x1210C508 @@ -2485,7 +2507,8 @@ Loop_xts_enc6x: vand 11,11,10 .long 0x10E7CD08 .long 0x118CCD08 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD08 .long 0x11CECD08 vxor 1,18,31 @@ -2496,13 +2519,13 @@ Loop_xts_enc6x: and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D508 .long 0x118CD508 vand 11,11,10 .long 0x11ADD508 .long 0x11CED508 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD508 .long 0x1210D508 @@ -2516,7 +2539,6 @@ Loop_xts_enc6x: vaddubm 8,8,8 .long 0x10E7DD08 .long 0x118CDD08 - vsldoi 11,11,11,15 .long 0x11ADDD08 .long 0x11CEDD08 vand 11,11,10 @@ -2524,7 +2546,8 @@ Loop_xts_enc6x: .long 0x1210DD08 addi 7,1,64+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E508 .long 0x118CE508 vxor 3,20,31 @@ -2533,7 +2556,6 @@ Loop_xts_enc6x: .long 0x11ADE508 .long 0x11CEE508 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE508 .long 0x1210E508 lvx 24,0,7 @@ -2541,7 +2563,8 @@ Loop_xts_enc6x: .long 0x10E7ED08 .long 0x118CED08 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED08 .long 0x11CEED08 vxor 4,21,31 @@ -2551,14 +2574,14 @@ Loop_xts_enc6x: .long 0x1210ED08 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F508 .long 0x118CF508 vand 11,11,10 .long 0x11ADF508 .long 0x11CEF508 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF508 .long 0x1210F508 vxor 5,22,31 @@ -2568,7 +2591,6 @@ Loop_xts_enc6x: .long 0x10E70509 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D09 .long 0x7C235699 .long 0x11AD1509 @@ -2581,7 +2603,10 @@ Loop_xts_enc6x: .long 0x11EF2509 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x11702D09 @@ -2614,6 +2639,8 @@ Loop_xts_enc6x: mtctr 9 beq Loop_xts_enc6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq Lxts_enc6x_zero cmpwi 5,0x20 @@ -2990,6 +3017,18 @@ _aesp8_xts_decrypt6x: li 31,0x70 or 0,0,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 @@ -3037,64 +3076,64 @@ Load_xts_dec_key: vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 @@ -3120,6 +3159,8 @@ Loop_xts_dec6x: lvx 25,3,7 bc 16,0,Loop_xts_dec6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C548 @@ -3129,7 +3170,6 @@ Loop_xts_dec6x: vaddubm 8,8,8 .long 0x11ADC548 .long 0x11CEC548 - vsldoi 11,11,11,15 .long 0x11EFC548 .long 0x1210C548 @@ -3137,7 +3177,8 @@ Loop_xts_dec6x: vand 11,11,10 .long 0x10E7CD48 .long 0x118CCD48 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD48 .long 0x11CECD48 vxor 1,18,31 @@ -3148,13 +3189,13 @@ Loop_xts_dec6x: and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D548 .long 0x118CD548 vand 11,11,10 .long 0x11ADD548 .long 0x11CED548 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD548 .long 0x1210D548 @@ -3168,7 +3209,6 @@ Loop_xts_dec6x: vaddubm 8,8,8 .long 0x10E7DD48 .long 0x118CDD48 - vsldoi 11,11,11,15 .long 0x11ADDD48 .long 0x11CEDD48 vand 11,11,10 @@ -3176,7 +3216,8 @@ Loop_xts_dec6x: .long 0x1210DD48 addi 7,1,64+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E548 .long 0x118CE548 vxor 3,20,31 @@ -3185,7 +3226,6 @@ Loop_xts_dec6x: .long 0x11ADE548 .long 0x11CEE548 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE548 .long 0x1210E548 lvx 24,0,7 @@ -3193,7 +3233,8 @@ Loop_xts_dec6x: .long 0x10E7ED48 .long 0x118CED48 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED48 .long 0x11CEED48 vxor 4,21,31 @@ -3203,14 +3244,14 @@ Loop_xts_dec6x: .long 0x1210ED48 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F548 .long 0x118CF548 vand 11,11,10 .long 0x11ADF548 .long 0x11CEF548 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF548 .long 0x1210F548 vxor 5,22,31 @@ -3220,7 +3261,6 @@ Loop_xts_dec6x: .long 0x10E70549 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D49 .long 0x7C235699 .long 0x11AD1549 @@ -3233,7 +3273,10 @@ Loop_xts_dec6x: .long 0x11EF2549 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x12102D49 .long 0x7CBD5699 @@ -3264,6 +3307,8 @@ Loop_xts_dec6x: mtctr 9 beq Loop_xts_dec6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq Lxts_dec6x_zero cmpwi 5,0x20 diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm/crypto/buildinf.h b/deps/openssl/config/archs/aix64-gcc-as/asm/crypto/buildinf.h index e6d5e6f9a047a3..99d30076120310 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/aix64-gcc-as/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: aix64-gcc-as" -#define DATE "built on: Wed Jan 31 12:56:52 2024 UTC" +#define DATE "built on: Mon Sep 30 17:04:53 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/aix64-gcc-as/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/aix64-gcc-as/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/configdata.pm b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/configdata.pm index ff854e43c4d10d..34862e4d370a0e 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "aix64-gcc-as", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar -X64", @@ -327,6 +327,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1243,6 +1244,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7717,6 +7721,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18770,6 +18778,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20332,6 +20344,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26744,6 +26757,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/crypto/aes/aesp8-ppc.s b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/crypto/aes/aesp8-ppc.s index 998bc4481c4029..811f4d23d53ad6 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/crypto/aes/aesp8-ppc.s +++ b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/crypto/aes/aesp8-ppc.s @@ -8,11 +8,12 @@ rcon: .byte 0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00 .byte 0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c .byte 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 +.long 0x0f102132, 0x43546576, 0x8798a9ba, 0xcbdcedfe Lconsts: mflr 0 bcl 20,31,$+4 mflr 6 - addi 6,6,-0x48 + addi 6,6,-0x58 mtlr 0 blr .long 0 @@ -2338,6 +2339,18 @@ _aesp8_xts_encrypt6x: li 31,0x70 or 0,0,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 @@ -2380,69 +2393,77 @@ Load_xts_enc_key: vperm 31,31,22,7 lvx 25,3,7 + + + + + + + + vperm 0,2,4,5 subi 10,10,31 vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 @@ -2468,6 +2489,8 @@ Loop_xts_enc6x: lvx 25,3,7 bc 16,0,Loop_xts_enc6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C508 @@ -2477,7 +2500,6 @@ Loop_xts_enc6x: vaddubm 8,8,8 .long 0x11ADC508 .long 0x11CEC508 - vsldoi 11,11,11,15 .long 0x11EFC508 .long 0x1210C508 @@ -2485,7 +2507,8 @@ Loop_xts_enc6x: vand 11,11,10 .long 0x10E7CD08 .long 0x118CCD08 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD08 .long 0x11CECD08 vxor 1,18,31 @@ -2496,13 +2519,13 @@ Loop_xts_enc6x: and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D508 .long 0x118CD508 vand 11,11,10 .long 0x11ADD508 .long 0x11CED508 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD508 .long 0x1210D508 @@ -2516,7 +2539,6 @@ Loop_xts_enc6x: vaddubm 8,8,8 .long 0x10E7DD08 .long 0x118CDD08 - vsldoi 11,11,11,15 .long 0x11ADDD08 .long 0x11CEDD08 vand 11,11,10 @@ -2524,7 +2546,8 @@ Loop_xts_enc6x: .long 0x1210DD08 addi 7,1,64+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E508 .long 0x118CE508 vxor 3,20,31 @@ -2533,7 +2556,6 @@ Loop_xts_enc6x: .long 0x11ADE508 .long 0x11CEE508 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE508 .long 0x1210E508 lvx 24,0,7 @@ -2541,7 +2563,8 @@ Loop_xts_enc6x: .long 0x10E7ED08 .long 0x118CED08 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED08 .long 0x11CEED08 vxor 4,21,31 @@ -2551,14 +2574,14 @@ Loop_xts_enc6x: .long 0x1210ED08 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F508 .long 0x118CF508 vand 11,11,10 .long 0x11ADF508 .long 0x11CEF508 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF508 .long 0x1210F508 vxor 5,22,31 @@ -2568,7 +2591,6 @@ Loop_xts_enc6x: .long 0x10E70509 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D09 .long 0x7C235699 .long 0x11AD1509 @@ -2581,7 +2603,10 @@ Loop_xts_enc6x: .long 0x11EF2509 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x11702D09 @@ -2614,6 +2639,8 @@ Loop_xts_enc6x: mtctr 9 beq Loop_xts_enc6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq Lxts_enc6x_zero cmpwi 5,0x20 @@ -2990,6 +3017,18 @@ _aesp8_xts_decrypt6x: li 31,0x70 or 0,0,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 @@ -3037,64 +3076,64 @@ Load_xts_dec_key: vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 @@ -3120,6 +3159,8 @@ Loop_xts_dec6x: lvx 25,3,7 bc 16,0,Loop_xts_dec6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C548 @@ -3129,7 +3170,6 @@ Loop_xts_dec6x: vaddubm 8,8,8 .long 0x11ADC548 .long 0x11CEC548 - vsldoi 11,11,11,15 .long 0x11EFC548 .long 0x1210C548 @@ -3137,7 +3177,8 @@ Loop_xts_dec6x: vand 11,11,10 .long 0x10E7CD48 .long 0x118CCD48 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD48 .long 0x11CECD48 vxor 1,18,31 @@ -3148,13 +3189,13 @@ Loop_xts_dec6x: and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D548 .long 0x118CD548 vand 11,11,10 .long 0x11ADD548 .long 0x11CED548 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD548 .long 0x1210D548 @@ -3168,7 +3209,6 @@ Loop_xts_dec6x: vaddubm 8,8,8 .long 0x10E7DD48 .long 0x118CDD48 - vsldoi 11,11,11,15 .long 0x11ADDD48 .long 0x11CEDD48 vand 11,11,10 @@ -3176,7 +3216,8 @@ Loop_xts_dec6x: .long 0x1210DD48 addi 7,1,64+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E548 .long 0x118CE548 vxor 3,20,31 @@ -3185,7 +3226,6 @@ Loop_xts_dec6x: .long 0x11ADE548 .long 0x11CEE548 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE548 .long 0x1210E548 lvx 24,0,7 @@ -3193,7 +3233,8 @@ Loop_xts_dec6x: .long 0x10E7ED48 .long 0x118CED48 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED48 .long 0x11CEED48 vxor 4,21,31 @@ -3203,14 +3244,14 @@ Loop_xts_dec6x: .long 0x1210ED48 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F548 .long 0x118CF548 vand 11,11,10 .long 0x11ADF548 .long 0x11CEF548 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF548 .long 0x1210F548 vxor 5,22,31 @@ -3220,7 +3261,6 @@ Loop_xts_dec6x: .long 0x10E70549 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D49 .long 0x7C235699 .long 0x11AD1549 @@ -3233,7 +3273,10 @@ Loop_xts_dec6x: .long 0x11EF2549 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x12102D49 .long 0x7CBD5699 @@ -3264,6 +3307,8 @@ Loop_xts_dec6x: mtctr 9 beq Loop_xts_dec6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq Lxts_dec6x_zero cmpwi 5,0x20 diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/crypto/buildinf.h index aa9a492f1854bd..69dd90a6c66fe7 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: aix64-gcc-as" -#define DATE "built on: Wed Jan 31 12:57:05 2024 UTC" +#define DATE "built on: Mon Sep 30 17:05:06 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/aix64-gcc-as/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/aix64-gcc-as/no-asm/configdata.pm b/deps/openssl/config/archs/aix64-gcc-as/no-asm/configdata.pm index e0fe47f3f4c302..3012083af82f69 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/no-asm/configdata.pm +++ b/deps/openssl/config/archs/aix64-gcc-as/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -205,7 +205,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "aix64-gcc-as", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar -X64", @@ -327,6 +327,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1244,6 +1245,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7680,6 +7684,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18695,6 +18703,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20257,6 +20269,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26517,6 +26530,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/aix64-gcc-as/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/aix64-gcc-as/no-asm/crypto/buildinf.h index 5a1f77c7b09564..c1c41584528e9c 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/aix64-gcc-as/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: aix64-gcc-as" -#define DATE "built on: Wed Jan 31 12:57:17 2024 UTC" +#define DATE "built on: Mon Sep 30 17:05:19 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/aix64-gcc-as/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/aix64-gcc-as/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/aix64-gcc-as/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/aix64-gcc-as/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin-i386-cc/asm/configdata.pm b/deps/openssl/config/archs/darwin-i386-cc/asm/configdata.pm index 609612b69b820d..c48853b80e4e46 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/asm/configdata.pm +++ b/deps/openssl/config/archs/darwin-i386-cc/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "darwin-i386-cc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1241,6 +1242,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7728,6 +7732,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18736,6 +18744,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20298,6 +20310,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26644,6 +26657,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/darwin-i386-cc/asm/crypto/buildinf.h b/deps/openssl/config/archs/darwin-i386-cc/asm/crypto/buildinf.h index 491dfa9f4617a2..9e690dbea0a857 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin-i386-cc/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin-i386-cc" -#define DATE "built on: Wed Jan 31 12:59:31 2024 UTC" +#define DATE "built on: Mon Sep 30 17:07:34 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin-i386-cc/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin-i386-cc/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin-i386-cc/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/configdata.pm b/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/configdata.pm index d92a94efb6f152..0b7a603aa08014 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "darwin-i386-cc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1241,6 +1242,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7728,6 +7732,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18736,6 +18744,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20298,6 +20310,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26644,6 +26657,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/crypto/buildinf.h index 89c993059692a2..f0fc131a0ac265 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin-i386-cc" -#define DATE "built on: Wed Jan 31 12:59:44 2024 UTC" +#define DATE "built on: Mon Sep 30 17:07:47 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin-i386-cc/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin-i386-cc/no-asm/configdata.pm b/deps/openssl/config/archs/darwin-i386-cc/no-asm/configdata.pm index d5b971f9d20d1c..4eafb602153f5e 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/no-asm/configdata.pm +++ b/deps/openssl/config/archs/darwin-i386-cc/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -205,7 +205,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "darwin-i386-cc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1242,6 +1243,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7670,6 +7674,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18656,6 +18664,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20218,6 +20230,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26476,6 +26489,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/darwin-i386-cc/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/darwin-i386-cc/no-asm/crypto/buildinf.h index 2d13043ced2571..7c35bb03c23c0d 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin-i386-cc/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin-i386-cc" -#define DATE "built on: Wed Jan 31 12:59:57 2024 UTC" +#define DATE "built on: Mon Sep 30 17:08:00 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin-i386-cc/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin-i386-cc/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/darwin-i386-cc/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin-i386-cc/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/asm/configdata.pm b/deps/openssl/config/archs/darwin64-arm64-cc/asm/configdata.pm index 5085e5ad401684..877a6b8a253eed 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/asm/configdata.pm +++ b/deps/openssl/config/archs/darwin64-arm64-cc/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "darwin64-arm64-cc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1241,6 +1242,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7701,6 +7705,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18770,6 +18778,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20332,6 +20344,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26682,6 +26695,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/asm/crypto/buildinf.h b/deps/openssl/config/archs/darwin64-arm64-cc/asm/crypto/buildinf.h index 8b0c9897062718..53600343288552 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin64-arm64-cc/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin64-arm64-cc" -#define DATE "built on: Wed Jan 31 13:00:08 2024 UTC" +#define DATE "built on: Mon Sep 30 17:08:11 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin64-arm64-cc/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin64-arm64-cc/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/configdata.pm b/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/configdata.pm index bdb02ff56b6413..3deb53b0966350 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "darwin64-arm64-cc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1241,6 +1242,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7701,6 +7705,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18770,6 +18778,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20332,6 +20344,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26682,6 +26695,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/crypto/buildinf.h index e209ae9d1a2f78..74f8be4cef56d1 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin64-arm64-cc" -#define DATE "built on: Wed Jan 31 13:00:21 2024 UTC" +#define DATE "built on: Mon Sep 30 17:08:24 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin64-arm64-cc/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/configdata.pm b/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/configdata.pm index 752eaf3a647b74..7698c6b4a8d09e 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/configdata.pm +++ b/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -205,7 +205,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "darwin64-arm64-cc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1242,6 +1243,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7670,6 +7674,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18656,6 +18664,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20218,6 +20230,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26476,6 +26489,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/crypto/buildinf.h index f58186b307020c..16436bc0b1796c 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin64-arm64-cc" -#define DATE "built on: Wed Jan 31 13:00:33 2024 UTC" +#define DATE "built on: Mon Sep 30 17:08:36 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin64-arm64-cc/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/asm/configdata.pm b/deps/openssl/config/archs/darwin64-x86_64-cc/asm/configdata.pm index 2d3418aa631279..4d3767f42329f5 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/asm/configdata.pm +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "darwin64-x86_64-cc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1241,6 +1242,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7733,6 +7737,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18777,6 +18785,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20339,6 +20351,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26789,6 +26802,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/asm/crypto/buildinf.h b/deps/openssl/config/archs/darwin64-x86_64-cc/asm/crypto/buildinf.h index e53812582a877f..8c788f6d59a666 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin64-x86_64-cc" -#define DATE "built on: Wed Jan 31 12:58:49 2024 UTC" +#define DATE "built on: Mon Sep 30 17:06:51 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin64-x86_64-cc/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/configdata.pm b/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/configdata.pm index ea30dc93d43ae3..1f75fe7575c2cf 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "darwin64-x86_64-cc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1241,6 +1242,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7733,6 +7737,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18777,6 +18785,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20339,6 +20351,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26789,6 +26802,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/crypto/buildinf.h index 588d59e092b146..7fc043860e91f6 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin64-x86_64-cc" -#define DATE "built on: Wed Jan 31 12:59:05 2024 UTC" +#define DATE "built on: Mon Sep 30 17:07:07 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/configdata.pm b/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/configdata.pm index b008b9a8a1a960..10e32f3d13be08 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/configdata.pm +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -205,7 +205,7 @@ our %config = ( ], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -258,11 +258,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "darwin64-x86_64-cc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1242,6 +1243,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7670,6 +7674,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18656,6 +18664,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20218,6 +20230,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26476,6 +26489,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/crypto/buildinf.h index 36035d10a344b7..ecf1d141ca8f37 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: darwin64-x86_64-cc" -#define DATE "built on: Wed Jan 31 12:59:20 2024 UTC" +#define DATE "built on: Mon Sep 30 17:07:22 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/darwin64-x86_64-cc/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-aarch64/asm/configdata.pm b/deps/openssl/config/archs/linux-aarch64/asm/configdata.pm index 6b9f0f9e20278f..b9637a41bbedcc 100644 --- a/deps/openssl/config/archs/linux-aarch64/asm/configdata.pm +++ b/deps/openssl/config/archs/linux-aarch64/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-aarch64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -332,6 +332,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1248,6 +1249,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7716,6 +7720,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18814,6 +18822,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20376,6 +20388,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26728,6 +26741,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-aarch64/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-aarch64/asm/crypto/buildinf.h index 45946ceaf6a533..eba7c744efbda4 100644 --- a/deps/openssl/config/archs/linux-aarch64/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-aarch64/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-aarch64" -#define DATE "built on: Wed Jan 31 13:00:44 2024 UTC" +#define DATE "built on: Mon Sep 30 17:08:48 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-aarch64/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-aarch64/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-aarch64/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-aarch64/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-aarch64/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux-aarch64/asm_avx2/configdata.pm index c0ce61cbb81f53..988b82a67947ef 100644 --- a/deps/openssl/config/archs/linux-aarch64/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux-aarch64/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-aarch64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -332,6 +332,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1248,6 +1249,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7716,6 +7720,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18814,6 +18822,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20376,6 +20388,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26728,6 +26741,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-aarch64/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux-aarch64/asm_avx2/crypto/buildinf.h index b44bd4bb5b9fe0..33cbe794886024 100644 --- a/deps/openssl/config/archs/linux-aarch64/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-aarch64/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-aarch64" -#define DATE "built on: Wed Jan 31 13:00:57 2024 UTC" +#define DATE "built on: Mon Sep 30 17:09:00 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-aarch64/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-aarch64/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-aarch64/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-aarch64/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-aarch64/no-asm/configdata.pm b/deps/openssl/config/archs/linux-aarch64/no-asm/configdata.pm index 09dbbbd0f4dac9..9ff6e315a7f549 100644 --- a/deps/openssl/config/archs/linux-aarch64/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux-aarch64/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-aarch64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -332,6 +332,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1249,6 +1250,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7685,6 +7689,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18700,6 +18708,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20262,6 +20274,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26522,6 +26535,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-aarch64/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-aarch64/no-asm/crypto/buildinf.h index d1a00e8b46a21c..82407322b9655a 100644 --- a/deps/openssl/config/archs/linux-aarch64/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-aarch64/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-aarch64" -#define DATE "built on: Wed Jan 31 13:01:10 2024 UTC" +#define DATE "built on: Mon Sep 30 17:09:13 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-aarch64/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-aarch64/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-aarch64/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-aarch64/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-armv4/asm/configdata.pm b/deps/openssl/config/archs/linux-armv4/asm/configdata.pm index b56c2ea33a722f..4ac06388d755d8 100644 --- a/deps/openssl/config/archs/linux-armv4/asm/configdata.pm +++ b/deps/openssl/config/archs/linux-armv4/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-armv4", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -332,6 +332,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1248,6 +1249,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7724,6 +7728,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18848,6 +18856,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20410,6 +20422,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26770,6 +26783,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-armv4/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-armv4/asm/crypto/buildinf.h index 6c556e9489e771..c2f243c0696fb7 100644 --- a/deps/openssl/config/archs/linux-armv4/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-armv4/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-armv4" -#define DATE "built on: Wed Jan 31 13:01:21 2024 UTC" +#define DATE "built on: Mon Sep 30 17:09:25 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-armv4/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-armv4/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-armv4/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-armv4/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-armv4/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux-armv4/asm_avx2/configdata.pm index 76009b2dee65f5..f5715110100590 100644 --- a/deps/openssl/config/archs/linux-armv4/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux-armv4/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-armv4", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -332,6 +332,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1248,6 +1249,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7724,6 +7728,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18848,6 +18856,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20410,6 +20422,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26770,6 +26783,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-armv4/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux-armv4/asm_avx2/crypto/buildinf.h index 3c25da4cbf40e0..e5f76f6ad716ba 100644 --- a/deps/openssl/config/archs/linux-armv4/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-armv4/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-armv4" -#define DATE "built on: Wed Jan 31 13:01:34 2024 UTC" +#define DATE "built on: Mon Sep 30 17:09:38 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-armv4/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-armv4/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-armv4/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-armv4/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-armv4/no-asm/configdata.pm b/deps/openssl/config/archs/linux-armv4/no-asm/configdata.pm index 2e34266f4e384a..2254a71143d8fd 100644 --- a/deps/openssl/config/archs/linux-armv4/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux-armv4/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-armv4", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -332,6 +332,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1249,6 +1250,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7685,6 +7689,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18700,6 +18708,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20262,6 +20274,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26522,6 +26535,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-armv4/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-armv4/no-asm/crypto/buildinf.h index 7a8a7caa417f99..9b3d97b629171d 100644 --- a/deps/openssl/config/archs/linux-armv4/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-armv4/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-armv4" -#define DATE "built on: Wed Jan 31 13:01:46 2024 UTC" +#define DATE "built on: Mon Sep 30 17:09:50 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-armv4/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-armv4/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-armv4/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-armv4/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-elf/asm/configdata.pm b/deps/openssl/config/archs/linux-elf/asm/configdata.pm index 7892d0e6f537f2..f134e72ba600e3 100644 --- a/deps/openssl/config/archs/linux-elf/asm/configdata.pm +++ b/deps/openssl/config/archs/linux-elf/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-elf", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -331,6 +331,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1247,6 +1248,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7742,6 +7746,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18779,6 +18787,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20341,6 +20353,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26689,6 +26702,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-elf/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-elf/asm/crypto/buildinf.h index 96090e0478b789..939d8f553cc2f0 100644 --- a/deps/openssl/config/archs/linux-elf/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-elf/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-elf" -#define DATE "built on: Wed Jan 31 13:01:58 2024 UTC" +#define DATE "built on: Mon Sep 30 17:10:02 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-elf/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-elf/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-elf/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-elf/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-elf/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux-elf/asm_avx2/configdata.pm index aeb57e7bc872ec..90aff138cbc5cd 100644 --- a/deps/openssl/config/archs/linux-elf/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux-elf/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-elf", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -331,6 +331,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1247,6 +1248,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7742,6 +7746,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18779,6 +18787,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20341,6 +20353,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26689,6 +26702,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-elf/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux-elf/asm_avx2/crypto/buildinf.h index f92ceb4158b5dc..3a655350497561 100644 --- a/deps/openssl/config/archs/linux-elf/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-elf/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-elf" -#define DATE "built on: Wed Jan 31 13:02:11 2024 UTC" +#define DATE "built on: Mon Sep 30 17:10:15 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-elf/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-elf/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-elf/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-elf/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-elf/no-asm/configdata.pm b/deps/openssl/config/archs/linux-elf/no-asm/configdata.pm index f8c616164c6be4..479b266eb3e98d 100644 --- a/deps/openssl/config/archs/linux-elf/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux-elf/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-elf", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -331,6 +331,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1248,6 +1249,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7684,6 +7688,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18699,6 +18707,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20261,6 +20273,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26521,6 +26534,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-elf/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-elf/no-asm/crypto/buildinf.h index 9a4d8ec2adcab7..cea8ea6ace931b 100644 --- a/deps/openssl/config/archs/linux-elf/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-elf/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-elf" -#define DATE "built on: Wed Jan 31 13:02:24 2024 UTC" +#define DATE "built on: Mon Sep 30 17:10:28 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-elf/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-elf/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-elf/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-elf/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-ppc64le/asm/configdata.pm b/deps/openssl/config/archs/linux-ppc64le/asm/configdata.pm index 294b69122eef95..be8ddf1031a71c 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm/configdata.pm +++ b/deps/openssl/config/archs/linux-ppc64le/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-ppc64le", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -332,6 +332,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1248,6 +1249,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7722,6 +7726,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18775,6 +18783,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20337,6 +20349,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26749,6 +26762,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-ppc64le/asm/crypto/aes/aesp8-ppc.s b/deps/openssl/config/archs/linux-ppc64le/asm/crypto/aes/aesp8-ppc.s index ae924ef9dab84b..2577338d55f518 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm/crypto/aes/aesp8-ppc.s +++ b/deps/openssl/config/archs/linux-ppc64le/asm/crypto/aes/aesp8-ppc.s @@ -9,11 +9,12 @@ rcon: .byte 0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b .byte 0x0c,0x0f,0x0e,0x0d,0x0c,0x0f,0x0e,0x0d,0x0c,0x0f,0x0e,0x0d,0x0c,0x0f,0x0e,0x0d .byte 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 +.long 0x0f102132, 0x43546576, 0x8798a9ba, 0xcbdcedfe .Lconsts: mflr 0 bcl 20,31,$+4 mflr 6 - addi 6,6,-0x48 + addi 6,6,-0x58 mtlr 0 blr .long 0 @@ -2363,6 +2364,18 @@ _aesp8_xts_encrypt6x: li 31,0x70 or 0,0,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl .Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 @@ -2405,69 +2418,77 @@ _aesp8_xts_encrypt6x: vperm 31,22,31,7 lvx 25,3,7 + + + + + + + + vperm 0,2,4,5 subi 10,10,31 vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 1,1,1,6 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 2,2,2,6 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 3,3,3,6 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 4,4,4,6 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 5,5,5,6 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 @@ -2493,6 +2514,8 @@ _aesp8_xts_encrypt6x: lvx 25,3,7 bdnz .Loop_xts_enc6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C508 @@ -2502,7 +2525,6 @@ _aesp8_xts_encrypt6x: vaddubm 8,8,8 .long 0x11ADC508 .long 0x11CEC508 - vsldoi 11,11,11,15 .long 0x11EFC508 .long 0x1210C508 @@ -2510,7 +2532,8 @@ _aesp8_xts_encrypt6x: vand 11,11,10 .long 0x10E7CD08 .long 0x118CCD08 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD08 .long 0x11CECD08 vxor 1,18,31 @@ -2521,13 +2544,13 @@ _aesp8_xts_encrypt6x: and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D508 .long 0x118CD508 vand 11,11,10 .long 0x11ADD508 .long 0x11CED508 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD508 .long 0x1210D508 @@ -2541,7 +2564,6 @@ _aesp8_xts_encrypt6x: vaddubm 8,8,8 .long 0x10E7DD08 .long 0x118CDD08 - vsldoi 11,11,11,15 .long 0x11ADDD08 .long 0x11CEDD08 vand 11,11,10 @@ -2549,7 +2571,8 @@ _aesp8_xts_encrypt6x: .long 0x1210DD08 addi 7,1,64+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E508 .long 0x118CE508 vxor 3,20,31 @@ -2558,7 +2581,6 @@ _aesp8_xts_encrypt6x: .long 0x11ADE508 .long 0x11CEE508 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE508 .long 0x1210E508 lvx 24,0,7 @@ -2566,7 +2588,8 @@ _aesp8_xts_encrypt6x: .long 0x10E7ED08 .long 0x118CED08 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED08 .long 0x11CEED08 vxor 4,21,31 @@ -2576,14 +2599,14 @@ _aesp8_xts_encrypt6x: .long 0x1210ED08 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F508 .long 0x118CF508 vand 11,11,10 .long 0x11ADF508 .long 0x11CEF508 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF508 .long 0x1210F508 vxor 5,22,31 @@ -2593,7 +2616,6 @@ _aesp8_xts_encrypt6x: .long 0x10E70509 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D09 .long 0x7C235699 .long 0x11AD1509 @@ -2606,7 +2628,10 @@ _aesp8_xts_encrypt6x: .long 0x11EF2509 vperm 2,2,2,6 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x11702D09 vperm 3,3,3,6 @@ -2639,6 +2664,8 @@ _aesp8_xts_encrypt6x: mtctr 9 beq .Loop_xts_enc6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq .Lxts_enc6x_zero cmpwi 5,0x20 @@ -3015,6 +3042,18 @@ _aesp8_xts_decrypt6x: li 31,0x70 or 0,0,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl .Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 @@ -3062,64 +3101,64 @@ _aesp8_xts_decrypt6x: vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 1,1,1,6 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 2,2,2,6 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 3,3,3,6 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 4,4,4,6 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 5,5,5,6 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 @@ -3145,6 +3184,8 @@ _aesp8_xts_decrypt6x: lvx 25,3,7 bdnz .Loop_xts_dec6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C548 @@ -3154,7 +3195,6 @@ _aesp8_xts_decrypt6x: vaddubm 8,8,8 .long 0x11ADC548 .long 0x11CEC548 - vsldoi 11,11,11,15 .long 0x11EFC548 .long 0x1210C548 @@ -3162,7 +3202,8 @@ _aesp8_xts_decrypt6x: vand 11,11,10 .long 0x10E7CD48 .long 0x118CCD48 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD48 .long 0x11CECD48 vxor 1,18,31 @@ -3173,13 +3214,13 @@ _aesp8_xts_decrypt6x: and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D548 .long 0x118CD548 vand 11,11,10 .long 0x11ADD548 .long 0x11CED548 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD548 .long 0x1210D548 @@ -3193,7 +3234,6 @@ _aesp8_xts_decrypt6x: vaddubm 8,8,8 .long 0x10E7DD48 .long 0x118CDD48 - vsldoi 11,11,11,15 .long 0x11ADDD48 .long 0x11CEDD48 vand 11,11,10 @@ -3201,7 +3241,8 @@ _aesp8_xts_decrypt6x: .long 0x1210DD48 addi 7,1,64+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E548 .long 0x118CE548 vxor 3,20,31 @@ -3210,7 +3251,6 @@ _aesp8_xts_decrypt6x: .long 0x11ADE548 .long 0x11CEE548 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE548 .long 0x1210E548 lvx 24,0,7 @@ -3218,7 +3258,8 @@ _aesp8_xts_decrypt6x: .long 0x10E7ED48 .long 0x118CED48 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED48 .long 0x11CEED48 vxor 4,21,31 @@ -3228,14 +3269,14 @@ _aesp8_xts_decrypt6x: .long 0x1210ED48 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F548 .long 0x118CF548 vand 11,11,10 .long 0x11ADF548 .long 0x11CEF548 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF548 .long 0x1210F548 vxor 5,22,31 @@ -3245,7 +3286,6 @@ _aesp8_xts_decrypt6x: .long 0x10E70549 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D49 .long 0x7C235699 .long 0x11AD1549 @@ -3258,7 +3298,10 @@ _aesp8_xts_decrypt6x: .long 0x11EF2549 vperm 2,2,2,6 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x12102D49 vperm 3,3,3,6 .long 0x7CBD5699 @@ -3289,6 +3332,8 @@ _aesp8_xts_decrypt6x: mtctr 9 beq .Loop_xts_dec6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq .Lxts_dec6x_zero cmpwi 5,0x20 diff --git a/deps/openssl/config/archs/linux-ppc64le/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-ppc64le/asm/crypto/buildinf.h index f868966a04b719..a1d53486daf67c 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-ppc64le/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-ppc64le" -#define DATE "built on: Wed Jan 31 13:03:19 2024 UTC" +#define DATE "built on: Mon Sep 30 17:11:23 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-ppc64le/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-ppc64le/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-ppc64le/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/configdata.pm index dcc417e8d68a9e..4693b1e2ca61fc 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-ppc64le", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -332,6 +332,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1248,6 +1249,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7722,6 +7726,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18775,6 +18783,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20337,6 +20349,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26749,6 +26762,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/crypto/aes/aesp8-ppc.s b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/crypto/aes/aesp8-ppc.s index ae924ef9dab84b..2577338d55f518 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/crypto/aes/aesp8-ppc.s +++ b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/crypto/aes/aesp8-ppc.s @@ -9,11 +9,12 @@ rcon: .byte 0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b .byte 0x0c,0x0f,0x0e,0x0d,0x0c,0x0f,0x0e,0x0d,0x0c,0x0f,0x0e,0x0d,0x0c,0x0f,0x0e,0x0d .byte 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 +.long 0x0f102132, 0x43546576, 0x8798a9ba, 0xcbdcedfe .Lconsts: mflr 0 bcl 20,31,$+4 mflr 6 - addi 6,6,-0x48 + addi 6,6,-0x58 mtlr 0 blr .long 0 @@ -2363,6 +2364,18 @@ _aesp8_xts_encrypt6x: li 31,0x70 or 0,0,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl .Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 @@ -2405,69 +2418,77 @@ _aesp8_xts_encrypt6x: vperm 31,22,31,7 lvx 25,3,7 + + + + + + + + vperm 0,2,4,5 subi 10,10,31 vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 1,1,1,6 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 2,2,2,6 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 3,3,3,6 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 4,4,4,6 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 5,5,5,6 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 @@ -2493,6 +2514,8 @@ _aesp8_xts_encrypt6x: lvx 25,3,7 bdnz .Loop_xts_enc6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C508 @@ -2502,7 +2525,6 @@ _aesp8_xts_encrypt6x: vaddubm 8,8,8 .long 0x11ADC508 .long 0x11CEC508 - vsldoi 11,11,11,15 .long 0x11EFC508 .long 0x1210C508 @@ -2510,7 +2532,8 @@ _aesp8_xts_encrypt6x: vand 11,11,10 .long 0x10E7CD08 .long 0x118CCD08 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD08 .long 0x11CECD08 vxor 1,18,31 @@ -2521,13 +2544,13 @@ _aesp8_xts_encrypt6x: and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D508 .long 0x118CD508 vand 11,11,10 .long 0x11ADD508 .long 0x11CED508 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD508 .long 0x1210D508 @@ -2541,7 +2564,6 @@ _aesp8_xts_encrypt6x: vaddubm 8,8,8 .long 0x10E7DD08 .long 0x118CDD08 - vsldoi 11,11,11,15 .long 0x11ADDD08 .long 0x11CEDD08 vand 11,11,10 @@ -2549,7 +2571,8 @@ _aesp8_xts_encrypt6x: .long 0x1210DD08 addi 7,1,64+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E508 .long 0x118CE508 vxor 3,20,31 @@ -2558,7 +2581,6 @@ _aesp8_xts_encrypt6x: .long 0x11ADE508 .long 0x11CEE508 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE508 .long 0x1210E508 lvx 24,0,7 @@ -2566,7 +2588,8 @@ _aesp8_xts_encrypt6x: .long 0x10E7ED08 .long 0x118CED08 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED08 .long 0x11CEED08 vxor 4,21,31 @@ -2576,14 +2599,14 @@ _aesp8_xts_encrypt6x: .long 0x1210ED08 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F508 .long 0x118CF508 vand 11,11,10 .long 0x11ADF508 .long 0x11CEF508 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF508 .long 0x1210F508 vxor 5,22,31 @@ -2593,7 +2616,6 @@ _aesp8_xts_encrypt6x: .long 0x10E70509 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D09 .long 0x7C235699 .long 0x11AD1509 @@ -2606,7 +2628,10 @@ _aesp8_xts_encrypt6x: .long 0x11EF2509 vperm 2,2,2,6 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x11702D09 vperm 3,3,3,6 @@ -2639,6 +2664,8 @@ _aesp8_xts_encrypt6x: mtctr 9 beq .Loop_xts_enc6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq .Lxts_enc6x_zero cmpwi 5,0x20 @@ -3015,6 +3042,18 @@ _aesp8_xts_decrypt6x: li 31,0x70 or 0,0,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl .Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 @@ -3062,64 +3101,64 @@ _aesp8_xts_decrypt6x: vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 1,1,1,6 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 2,2,2,6 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 3,3,3,6 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 4,4,4,6 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 5,5,5,6 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 @@ -3145,6 +3184,8 @@ _aesp8_xts_decrypt6x: lvx 25,3,7 bdnz .Loop_xts_dec6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C548 @@ -3154,7 +3195,6 @@ _aesp8_xts_decrypt6x: vaddubm 8,8,8 .long 0x11ADC548 .long 0x11CEC548 - vsldoi 11,11,11,15 .long 0x11EFC548 .long 0x1210C548 @@ -3162,7 +3202,8 @@ _aesp8_xts_decrypt6x: vand 11,11,10 .long 0x10E7CD48 .long 0x118CCD48 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD48 .long 0x11CECD48 vxor 1,18,31 @@ -3173,13 +3214,13 @@ _aesp8_xts_decrypt6x: and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D548 .long 0x118CD548 vand 11,11,10 .long 0x11ADD548 .long 0x11CED548 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD548 .long 0x1210D548 @@ -3193,7 +3234,6 @@ _aesp8_xts_decrypt6x: vaddubm 8,8,8 .long 0x10E7DD48 .long 0x118CDD48 - vsldoi 11,11,11,15 .long 0x11ADDD48 .long 0x11CEDD48 vand 11,11,10 @@ -3201,7 +3241,8 @@ _aesp8_xts_decrypt6x: .long 0x1210DD48 addi 7,1,64+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E548 .long 0x118CE548 vxor 3,20,31 @@ -3210,7 +3251,6 @@ _aesp8_xts_decrypt6x: .long 0x11ADE548 .long 0x11CEE548 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE548 .long 0x1210E548 lvx 24,0,7 @@ -3218,7 +3258,8 @@ _aesp8_xts_decrypt6x: .long 0x10E7ED48 .long 0x118CED48 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED48 .long 0x11CEED48 vxor 4,21,31 @@ -3228,14 +3269,14 @@ _aesp8_xts_decrypt6x: .long 0x1210ED48 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F548 .long 0x118CF548 vand 11,11,10 .long 0x11ADF548 .long 0x11CEF548 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF548 .long 0x1210F548 vxor 5,22,31 @@ -3245,7 +3286,6 @@ _aesp8_xts_decrypt6x: .long 0x10E70549 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D49 .long 0x7C235699 .long 0x11AD1549 @@ -3258,7 +3298,10 @@ _aesp8_xts_decrypt6x: .long 0x11EF2549 vperm 2,2,2,6 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x12102D49 vperm 3,3,3,6 .long 0x7CBD5699 @@ -3289,6 +3332,8 @@ _aesp8_xts_decrypt6x: mtctr 9 beq .Loop_xts_dec6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq .Lxts_dec6x_zero cmpwi 5,0x20 diff --git a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/crypto/buildinf.h index af79fca6cdd64f..62ae301563cef0 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-ppc64le" -#define DATE "built on: Wed Jan 31 13:03:31 2024 UTC" +#define DATE "built on: Mon Sep 30 17:11:36 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-ppc64le/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-ppc64le/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-ppc64le/no-asm/configdata.pm b/deps/openssl/config/archs/linux-ppc64le/no-asm/configdata.pm index 65bbbec1229b91..4ea931962056d2 100644 --- a/deps/openssl/config/archs/linux-ppc64le/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux-ppc64le/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-ppc64le", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -332,6 +332,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1249,6 +1250,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7685,6 +7689,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18700,6 +18708,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20262,6 +20274,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26522,6 +26535,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-ppc64le/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-ppc64le/no-asm/crypto/buildinf.h index c7183f844429c4..813a758dbb8463 100644 --- a/deps/openssl/config/archs/linux-ppc64le/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-ppc64le/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-ppc64le" -#define DATE "built on: Wed Jan 31 13:03:44 2024 UTC" +#define DATE "built on: Mon Sep 30 17:11:49 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-ppc64le/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-ppc64le/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-ppc64le/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-ppc64le/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-x86_64/asm/configdata.pm b/deps/openssl/config/archs/linux-x86_64/asm/configdata.pm index 1f87af83ac7da3..0a7527c74f608d 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/configdata.pm +++ b/deps/openssl/config/archs/linux-x86_64/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-x86_64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -333,6 +333,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1249,6 +1250,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7749,6 +7753,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18822,6 +18830,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20384,6 +20396,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26836,6 +26849,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-x86_64/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-x86_64/asm/crypto/buildinf.h index 699fafe5c0af57..56ff0a9f0ced76 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-x86_64/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-x86_64" -#define DATE "built on: Wed Jan 31 13:02:36 2024 UTC" +#define DATE "built on: Mon Sep 30 17:10:40 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-x86_64/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-x86_64/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-x86_64/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-x86_64/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux-x86_64/asm_avx2/configdata.pm index f5f1bbec0436e9..8de9a59e88a749 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux-x86_64/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-x86_64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -333,6 +333,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1249,6 +1250,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7749,6 +7753,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18822,6 +18830,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20384,6 +20396,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26836,6 +26849,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-x86_64/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux-x86_64/asm_avx2/crypto/buildinf.h index 5804b20f527214..75117a04bb1d92 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-x86_64/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-x86_64" -#define DATE "built on: Wed Jan 31 13:02:51 2024 UTC" +#define DATE "built on: Mon Sep 30 17:10:56 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-x86_64/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-x86_64/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-x86_64/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-x86_64/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux-x86_64/no-asm/configdata.pm b/deps/openssl/config/archs/linux-x86_64/no-asm/configdata.pm index 973e1e1265b7dc..d520c3b33b87ae 100644 --- a/deps/openssl/config/archs/linux-x86_64/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux-x86_64/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux-x86_64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -333,6 +333,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1250,6 +1251,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7686,6 +7690,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18701,6 +18709,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20263,6 +20275,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26523,6 +26536,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux-x86_64/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux-x86_64/no-asm/crypto/buildinf.h index cc6654b94a6b76..8062b751473863 100644 --- a/deps/openssl/config/archs/linux-x86_64/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux-x86_64/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux-x86_64" -#define DATE "built on: Wed Jan 31 13:03:07 2024 UTC" +#define DATE "built on: Mon Sep 30 17:11:12 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux-x86_64/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux-x86_64/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux-x86_64/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux-x86_64/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux32-s390x/asm/configdata.pm b/deps/openssl/config/archs/linux32-s390x/asm/configdata.pm index f03243a0e161a1..25d7152a50338e 100644 --- a/deps/openssl/config/archs/linux32-s390x/asm/configdata.pm +++ b/deps/openssl/config/archs/linux32-s390x/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux32-s390x", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -332,6 +332,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1248,6 +1249,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7731,6 +7735,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18816,6 +18824,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20378,6 +20390,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26702,6 +26715,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux32-s390x/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux32-s390x/asm/crypto/buildinf.h index 0f0b2de4f51ac1..a1815adcb47351 100644 --- a/deps/openssl/config/archs/linux32-s390x/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux32-s390x/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux32-s390x" -#define DATE "built on: Wed Jan 31 13:03:56 2024 UTC" +#define DATE "built on: Mon Sep 30 17:12:01 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux32-s390x/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux32-s390x/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux32-s390x/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux32-s390x/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux32-s390x/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux32-s390x/asm_avx2/configdata.pm index 3c7006cb9b9e70..bfdea24dd5e5ae 100644 --- a/deps/openssl/config/archs/linux32-s390x/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux32-s390x/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux32-s390x", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -332,6 +332,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1248,6 +1249,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7731,6 +7735,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18816,6 +18824,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20378,6 +20390,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26702,6 +26715,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux32-s390x/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux32-s390x/asm_avx2/crypto/buildinf.h index e8782c94c1f1da..e6b3e00f5c752f 100644 --- a/deps/openssl/config/archs/linux32-s390x/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux32-s390x/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux32-s390x" -#define DATE "built on: Wed Jan 31 13:04:09 2024 UTC" +#define DATE "built on: Mon Sep 30 17:12:14 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux32-s390x/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux32-s390x/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux32-s390x/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux32-s390x/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux32-s390x/no-asm/configdata.pm b/deps/openssl/config/archs/linux32-s390x/no-asm/configdata.pm index f363125e2a7b40..36af8f54cc6c9f 100644 --- a/deps/openssl/config/archs/linux32-s390x/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux32-s390x/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux32-s390x", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -332,6 +332,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1249,6 +1250,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7685,6 +7689,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18700,6 +18708,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20262,6 +20274,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26522,6 +26535,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux32-s390x/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux32-s390x/no-asm/crypto/buildinf.h index c597c4d38fdb35..2e191c2e4841b1 100644 --- a/deps/openssl/config/archs/linux32-s390x/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux32-s390x/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux32-s390x" -#define DATE "built on: Wed Jan 31 13:04:22 2024 UTC" +#define DATE "built on: Mon Sep 30 17:12:26 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux32-s390x/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux32-s390x/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux32-s390x/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux32-s390x/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-loongarch64/no-asm/configdata.pm b/deps/openssl/config/archs/linux64-loongarch64/no-asm/configdata.pm index c254f9376fe637..4ba193f4bf39c7 100644 --- a/deps/openssl/config/archs/linux64-loongarch64/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux64-loongarch64/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux64-loongarch64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -331,6 +331,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1248,6 +1249,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7684,6 +7688,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18699,6 +18707,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20261,6 +20273,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26521,6 +26534,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux64-loongarch64/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux64-loongarch64/no-asm/crypto/buildinf.h index 2d29318d84a21e..8b0b4c9de878bc 100644 --- a/deps/openssl/config/archs/linux64-loongarch64/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-loongarch64/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-loongarch64" -#define DATE "built on: Wed Jan 31 13:08:49 2024 UTC" +#define DATE "built on: Mon Sep 30 17:16:54 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-loongarch64/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-loongarch64/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux64-loongarch64/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-loongarch64/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-mips64/asm/configdata.pm b/deps/openssl/config/archs/linux64-mips64/asm/configdata.pm index 33f877abf4ba4b..751dc619e087fa 100644 --- a/deps/openssl/config/archs/linux64-mips64/asm/configdata.pm +++ b/deps/openssl/config/archs/linux64-mips64/asm/configdata.pm @@ -162,7 +162,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -210,7 +210,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -262,11 +262,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux64-mips64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -336,6 +336,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1252,6 +1253,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7710,6 +7714,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18773,6 +18781,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20335,6 +20347,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26631,6 +26644,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux64-mips64/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux64-mips64/asm/crypto/buildinf.h index 0ecfd9b31ae486..5bcb19f0038b15 100644 --- a/deps/openssl/config/archs/linux64-mips64/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-mips64/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-mips64" -#define DATE "built on: Wed Jan 31 13:05:11 2024 UTC" +#define DATE "built on: Mon Sep 30 17:13:16 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-mips64/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-mips64/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux64-mips64/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-mips64/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-mips64/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux64-mips64/asm_avx2/configdata.pm index 78289801ba39a7..c861a6aa75de33 100644 --- a/deps/openssl/config/archs/linux64-mips64/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux64-mips64/asm_avx2/configdata.pm @@ -162,7 +162,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -210,7 +210,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -262,11 +262,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux64-mips64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -336,6 +336,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1252,6 +1253,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7710,6 +7714,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18773,6 +18781,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20335,6 +20347,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26631,6 +26644,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux64-mips64/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux64-mips64/asm_avx2/crypto/buildinf.h index a45ecc65c50250..9969f7fbe4894a 100644 --- a/deps/openssl/config/archs/linux64-mips64/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-mips64/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-mips64" -#define DATE "built on: Wed Jan 31 13:05:23 2024 UTC" +#define DATE "built on: Mon Sep 30 17:13:28 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-mips64/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-mips64/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux64-mips64/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-mips64/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-mips64/no-asm/configdata.pm b/deps/openssl/config/archs/linux64-mips64/no-asm/configdata.pm index 07632e82e26608..0aa374d19d56cb 100644 --- a/deps/openssl/config/archs/linux64-mips64/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux64-mips64/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux64-mips64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -333,6 +333,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1250,6 +1251,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7686,6 +7690,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18701,6 +18709,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20263,6 +20275,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26523,6 +26536,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux64-mips64/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux64-mips64/no-asm/crypto/buildinf.h index 2780a78e9cca2c..135ced7215d174 100644 --- a/deps/openssl/config/archs/linux64-mips64/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-mips64/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-mips64" -#define DATE "built on: Wed Jan 31 13:05:35 2024 UTC" +#define DATE "built on: Mon Sep 30 17:13:40 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-mips64/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-mips64/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux64-mips64/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-mips64/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-riscv64/no-asm/configdata.pm b/deps/openssl/config/archs/linux64-riscv64/no-asm/configdata.pm index eb98331a682e6e..ce13ea66723d9f 100644 --- a/deps/openssl/config/archs/linux64-riscv64/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux64-riscv64/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux64-riscv64", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -331,6 +331,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1248,6 +1249,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7684,6 +7688,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18699,6 +18707,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20261,6 +20273,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26521,6 +26534,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux64-riscv64/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux64-riscv64/no-asm/crypto/buildinf.h index 0ca3c0b6e4db75..f5d77a05993d1a 100644 --- a/deps/openssl/config/archs/linux64-riscv64/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-riscv64/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-riscv64" -#define DATE "built on: Wed Jan 31 13:08:37 2024 UTC" +#define DATE "built on: Mon Sep 30 17:16:42 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-riscv64/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-riscv64/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux64-riscv64/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-riscv64/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-s390x/asm/configdata.pm b/deps/openssl/config/archs/linux64-s390x/asm/configdata.pm index d615c028da84a6..df0ce5c951f633 100644 --- a/deps/openssl/config/archs/linux64-s390x/asm/configdata.pm +++ b/deps/openssl/config/archs/linux64-s390x/asm/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux64-s390x", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -333,6 +333,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1249,6 +1250,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7732,6 +7736,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18827,6 +18835,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20389,6 +20401,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26713,6 +26726,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux64-s390x/asm/crypto/buildinf.h b/deps/openssl/config/archs/linux64-s390x/asm/crypto/buildinf.h index 3c794e9cce429f..d7922fa6537630 100644 --- a/deps/openssl/config/archs/linux64-s390x/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-s390x/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-s390x" -#define DATE "built on: Wed Jan 31 13:04:34 2024 UTC" +#define DATE "built on: Mon Sep 30 17:12:38 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-s390x/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-s390x/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux64-s390x/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-s390x/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-s390x/asm_avx2/configdata.pm b/deps/openssl/config/archs/linux64-s390x/asm_avx2/configdata.pm index 64f0a78c6daef6..b106196b63ae5c 100644 --- a/deps/openssl/config/archs/linux64-s390x/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/linux64-s390x/asm_avx2/configdata.pm @@ -159,7 +159,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -207,7 +207,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux64-s390x", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -333,6 +333,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1249,6 +1250,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7732,6 +7736,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18827,6 +18835,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20389,6 +20401,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26713,6 +26726,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux64-s390x/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/linux64-s390x/asm_avx2/crypto/buildinf.h index 3d0c483c575a75..2c9106ca3ca56c 100644 --- a/deps/openssl/config/archs/linux64-s390x/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-s390x/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-s390x" -#define DATE "built on: Wed Jan 31 13:04:46 2024 UTC" +#define DATE "built on: Mon Sep 30 17:12:51 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-s390x/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-s390x/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux64-s390x/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-s390x/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/linux64-s390x/no-asm/configdata.pm b/deps/openssl/config/archs/linux64-s390x/no-asm/configdata.pm index 69bc1c7975b343..1462cc41faebed 100644 --- a/deps/openssl/config/archs/linux64-s390x/no-asm/configdata.pm +++ b/deps/openssl/config/archs/linux64-s390x/no-asm/configdata.pm @@ -157,7 +157,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -206,7 +206,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -259,11 +259,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned char", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "linux64-s390x", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -333,6 +333,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1250,6 +1251,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7686,6 +7690,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18701,6 +18709,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20263,6 +20275,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26523,6 +26536,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/linux64-s390x/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/linux64-s390x/no-asm/crypto/buildinf.h index 26a9eee8e3c3c5..65fd9d291b3df3 100644 --- a/deps/openssl/config/archs/linux64-s390x/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/linux64-s390x/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: linux64-s390x" -#define DATE "built on: Wed Jan 31 13:04:59 2024 UTC" +#define DATE "built on: Mon Sep 30 17:13:04 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/linux64-s390x/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/linux64-s390x/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/linux64-s390x/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/linux64-s390x/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/solaris-x86-gcc/asm/configdata.pm b/deps/openssl/config/archs/solaris-x86-gcc/asm/configdata.pm index ef7cd1e1ccd587..15732afc0605eb 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/asm/configdata.pm +++ b/deps/openssl/config/archs/solaris-x86-gcc/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -204,7 +204,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -256,11 +256,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "solaris-x86-gcc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -324,6 +324,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1240,6 +1241,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7735,6 +7739,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18772,6 +18780,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20334,6 +20346,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26682,6 +26695,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/solaris-x86-gcc/asm/crypto/buildinf.h b/deps/openssl/config/archs/solaris-x86-gcc/asm/crypto/buildinf.h index 75a6d54a84bc6c..fcd321c88356d4 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/solaris-x86-gcc/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: solaris-x86-gcc" -#define DATE "built on: Wed Jan 31 13:05:47 2024 UTC" +#define DATE "built on: Mon Sep 30 17:13:52 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/solaris-x86-gcc/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/solaris-x86-gcc/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/solaris-x86-gcc/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/configdata.pm b/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/configdata.pm index 0fc41abe71068d..67a1475d1ddbe5 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -204,7 +204,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -256,11 +256,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "solaris-x86-gcc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -324,6 +324,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1240,6 +1241,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7735,6 +7739,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18772,6 +18780,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20334,6 +20346,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26682,6 +26695,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/crypto/buildinf.h index 58695dce18281f..6e6e28436ecdbc 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: solaris-x86-gcc" -#define DATE "built on: Wed Jan 31 13:06:00 2024 UTC" +#define DATE "built on: Mon Sep 30 17:14:05 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/solaris-x86-gcc/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/solaris-x86-gcc/no-asm/configdata.pm b/deps/openssl/config/archs/solaris-x86-gcc/no-asm/configdata.pm index be79bd091411ec..6210b1d2a3a71f 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/no-asm/configdata.pm +++ b/deps/openssl/config/archs/solaris-x86-gcc/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -203,7 +203,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -256,11 +256,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "solaris-x86-gcc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -324,6 +324,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1241,6 +1242,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7677,6 +7681,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18692,6 +18700,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20254,6 +20266,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26514,6 +26527,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/solaris-x86-gcc/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/solaris-x86-gcc/no-asm/crypto/buildinf.h index 0b4b1b3b6d0b16..4c9d7e40a9a713 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/solaris-x86-gcc/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: solaris-x86-gcc" -#define DATE "built on: Wed Jan 31 13:06:14 2024 UTC" +#define DATE "built on: Mon Sep 30 17:14:18 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/solaris-x86-gcc/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/solaris-x86-gcc/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/solaris-x86-gcc/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/solaris-x86-gcc/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/configdata.pm b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/configdata.pm index 6ca22d6bc68da3..b0a159b0cc7022 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/configdata.pm +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -204,7 +204,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -256,11 +256,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "solaris64-x86_64-gcc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1241,6 +1242,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7741,6 +7745,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18814,6 +18822,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20376,6 +20388,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26828,6 +26841,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h index 4936c63ba119fb..d4ff152f987c38 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: solaris64-x86_64-gcc" -#define DATE "built on: Wed Jan 31 13:06:25 2024 UTC" +#define DATE "built on: Mon Sep 30 17:14:30 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/include/openssl/opensslv.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/configdata.pm b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/configdata.pm index cce6080a4bdc5a..fe2ce273e36091 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/configdata.pm +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/configdata.pm @@ -156,7 +156,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -204,7 +204,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -256,11 +256,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "solaris64-x86_64-gcc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1241,6 +1242,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7741,6 +7745,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18814,6 +18822,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20376,6 +20388,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26828,6 +26841,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/crypto/buildinf.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/crypto/buildinf.h index f14127004a37fb..8d026664c28a3b 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/crypto/buildinf.h +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: solaris64-x86_64-gcc" -#define DATE "built on: Wed Jan 31 13:06:41 2024 UTC" +#define DATE "built on: Mon Sep 30 17:14:46 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/include/openssl/opensslv.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/asm_avx2/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/configdata.pm b/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/configdata.pm index 1790bd78c651b1..350e5fdcdfdef2 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/configdata.pm +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/configdata.pm @@ -154,7 +154,7 @@ our %config = ( ], "dynamic_engines" => "0", "ex_libs" => [], - "full_version" => "3.0.13+quic", + "full_version" => "3.0.15+quic", "includes" => [], "lflags" => [], "lib_defines" => [ @@ -203,7 +203,7 @@ our %config = ( "openssl_sys_defines" => [], "openssldir" => "", "options" => "enable-ssl-trace enable-fips no-afalgeng no-asan no-asm no-buildtest-c++ no-comp no-crypto-mdebug no-crypto-mdebug-backtrace no-devcryptoeng no-dynamic-engine no-ec_nistp_64_gcc_128 no-egd no-external-tests no-fuzz-afl no-fuzz-libfuzzer no-ktls no-loadereng no-md2 no-msan no-rc5 no-sctp no-shared no-ssl3 no-ssl3-method no-trace no-ubsan no-unit-test no-uplink no-weak-ssl-ciphers no-zlib no-zlib-dynamic", - "patch" => "13", + "patch" => "15", "perl_archname" => "x86_64-linux-gnu-thread-multi", "perl_cmd" => "/usr/bin/perl", "perl_version" => "5.34.0", @@ -256,11 +256,11 @@ our %config = ( "prerelease" => "", "processor" => "", "rc4_int" => "unsigned int", - "release_date" => "30 Jan 2024", + "release_date" => "3 Sep 2024", "shlib_version" => "81.3", "sourcedir" => ".", "target" => "solaris64-x86_64-gcc", - "version" => "3.0.13" + "version" => "3.0.15" ); our %target = ( "AR" => "ar", @@ -325,6 +325,7 @@ our @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1242,6 +1243,9 @@ our %unified_info = ( "test/errtest" => { "noinst" => "1" }, + "test/evp_byname_test" => { + "noinst" => "1" + }, "test/evp_extra_test" => { "noinst" => "1" }, @@ -7678,6 +7682,10 @@ our %unified_info = ( "libcrypto", "test/libtestutil.a" ], + "test/evp_byname_test" => [ + "libcrypto", + "test/libtestutil.a" + ], "test/evp_extra_test" => [ "libcrypto.a", "providers/libcommon.a", @@ -18693,6 +18701,10 @@ our %unified_info = ( "include", "apps/include" ], + "test/evp_byname_test" => [ + "include", + "apps/include" + ], "test/evp_extra_test" => [ "include", "apps/include", @@ -20255,6 +20267,7 @@ our %unified_info = ( "test/endecoder_legacy_test", "test/enginetest", "test/errtest", + "test/evp_byname_test", "test/evp_extra_test", "test/evp_extra_test2", "test/evp_fetch_prov_test", @@ -26515,6 +26528,12 @@ our %unified_info = ( "test/errtest-bin-errtest.o" => [ "test/errtest.c" ], + "test/evp_byname_test" => [ + "test/evp_byname_test-bin-evp_byname_test.o" + ], + "test/evp_byname_test-bin-evp_byname_test.o" => [ + "test/evp_byname_test.c" + ], "test/evp_extra_test" => [ "providers/evp_extra_test-bin-legacyprov.o", "test/evp_extra_test-bin-evp_extra_test.o" diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/crypto/buildinf.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/crypto/buildinf.h index bcc7cb4282dcdb..f750c629cde372 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/crypto/buildinf.h +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/crypto/buildinf.h @@ -11,7 +11,7 @@ */ #define PLATFORM "platform: solaris64-x86_64-gcc" -#define DATE "built on: Wed Jan 31 13:06:57 2024 UTC" +#define DATE "built on: Mon Sep 30 17:15:02 2024 UTC" /* * Generate compiler_flags as an array of individual characters. This is a diff --git a/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/include/openssl/opensslv.h b/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/include/openssl/opensslv.h index 65f3bfa0540b2d..819878c21bf304 100644 --- a/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/include/openssl/opensslv.h +++ b/deps/openssl/config/archs/solaris64-x86_64-gcc/no-asm/include/openssl/opensslv.h @@ -29,7 +29,7 @@ extern "C" { */ # define OPENSSL_VERSION_MAJOR 3 # define OPENSSL_VERSION_MINOR 0 -# define OPENSSL_VERSION_PATCH 13 +# define OPENSSL_VERSION_PATCH 15 /* * Additional version information @@ -74,21 +74,21 @@ extern "C" { * longer variant with OPENSSL_VERSION_PRE_RELEASE_STR and * OPENSSL_VERSION_BUILD_METADATA_STR appended. */ -# define OPENSSL_VERSION_STR "3.0.13" -# define OPENSSL_FULL_VERSION_STR "3.0.13+quic" +# define OPENSSL_VERSION_STR "3.0.15" +# define OPENSSL_FULL_VERSION_STR "3.0.15+quic" /* * SECTION 3: ADDITIONAL METADATA * * These strings are defined separately to allow them to be parsable. */ -# define OPENSSL_RELEASE_DATE "30 Jan 2024" +# define OPENSSL_RELEASE_DATE "3 Sep 2024" /* * SECTION 4: BACKWARD COMPATIBILITY */ -# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.13+quic 30 Jan 2024" +# define OPENSSL_VERSION_TEXT "OpenSSL 3.0.15+quic 3 Sep 2024" /* Synthesize OPENSSL_VERSION_NUMBER with the layout 0xMNN00PPSL */ # ifdef OPENSSL_VERSION_PRE_RELEASE diff --git a/deps/openssl/openssl/CHANGES.md b/deps/openssl/openssl/CHANGES.md index 6f6e73db9b42c5..be359160542604 100644 --- a/deps/openssl/openssl/CHANGES.md +++ b/deps/openssl/openssl/CHANGES.md @@ -28,12 +28,110 @@ breaking changes, and mappings for the large list of deprecated functions. [Migration guide]: https://github.com/openssl/openssl/tree/master/doc/man7/migration_guide.pod -### Changes between 3.0.13 and 3.0.13+quic [30 Jan 2024] +### Changes between 3.0.15 and 3.0.15+quic [3 Sep 2024] * Add QUIC API support from BoringSSL *Todd Short* +### Changes between 3.0.14 and 3.0.15 [3 Sep 2024] + + * Fixed possible denial of service in X.509 name checks. + + Applications performing certificate name checks (e.g., TLS clients checking + server certificates) may attempt to read an invalid memory address when + comparing the expected name with an `otherName` subject alternative name of + an X.509 certificate. This may result in an exception that terminates the + application program. + + ([CVE-2024-6119]) + + *Viktor Dukhovni* + + * Fixed possible buffer overread in SSL_select_next_proto(). + + Calling the OpenSSL API function SSL_select_next_proto with an empty + supported client protocols buffer may cause a crash or memory contents + to be sent to the peer. + + ([CVE-2024-5535]) + + *Matt Caswell* + +### Changes between 3.0.13 and 3.0.14 [4 Jun 2024] + + * Fixed potential use after free after SSL_free_buffers() is called. + + The SSL_free_buffers function is used to free the internal OpenSSL + buffer used when processing an incoming record from the network. + The call is only expected to succeed if the buffer is not currently + in use. However, two scenarios have been identified where the buffer + is freed even when still in use. + + The first scenario occurs where a record header has been received + from the network and processed by OpenSSL, but the full record body + has not yet arrived. In this case calling SSL_free_buffers will succeed + even though a record has only been partially processed and the buffer + is still in use. + + The second scenario occurs where a full record containing application + data has been received and processed by OpenSSL but the application has + only read part of this data. Again a call to SSL_free_buffers will + succeed even though the buffer is still in use. + + ([CVE-2024-4741]) + + *Matt Caswell* + + * Fixed an issue where checking excessively long DSA keys or parameters may + be very slow. + + Applications that use the functions EVP_PKEY_param_check() or + EVP_PKEY_public_check() to check a DSA public key or DSA parameters may + experience long delays. Where the key or parameters that are being checked + have been obtained from an untrusted source this may lead to a Denial of + Service. + + To resolve this issue DSA keys larger than OPENSSL_DSA_MAX_MODULUS_BITS + will now fail the check immediately with a DSA_R_MODULUS_TOO_LARGE error + reason. + + ([CVE-2024-4603]) + + *Tomáš Mráz* + + * Improved EC/DSA nonce generation routines to avoid bias and timing + side channel leaks. + + Thanks to Florian Sieck from Universität zu Lübeck and George Pantelakis + and Hubert Kario from Red Hat for reporting the issues. + + *Tomáš Mráz and Paul Dale* + + * Fixed an issue where some non-default TLS server configurations can cause + unbounded memory growth when processing TLSv1.3 sessions. An attacker may + exploit certain server configurations to trigger unbounded memory growth that + would lead to a Denial of Service + + This problem can occur in TLSv1.3 if the non-default SSL_OP_NO_TICKET option + is being used (but not if early_data is also configured and the default + anti-replay protection is in use). In this case, under certain conditions, + the session cache can get into an incorrect state and it will fail to flush + properly as it fills. The session cache will continue to grow in an unbounded + manner. A malicious client could deliberately create the scenario for this + failure to force a Denial of Service. It may also happen by accident in + normal operation. + + ([CVE-2024-2511]) + + *Matt Caswell* + + * New atexit configuration switch, which controls whether the OPENSSL_cleanup + is registered when libcrypto is unloaded. This can be used on platforms + where using atexit() from shared libraries causes crashes on exit. + + *Randall S. Becker* + ### Changes between 3.0.12 and 3.0.13 [30 Jan 2024] * A file in PKCS12 format can contain certificates and keys and may come from @@ -19830,6 +19928,11 @@ ndif +[CVE-2024-6119]: https://www.openssl.org/news/vulnerabilities.html#CVE-2024-6119 +[CVE-2024-5535]: https://www.openssl.org/news/vulnerabilities.html#CVE-2024-5535 +[CVE-2024-4741]: https://www.openssl.org/news/vulnerabilities.html#CVE-2024-4741 +[CVE-2024-4603]: https://www.openssl.org/news/vulnerabilities.html#CVE-2024-4603 +[CVE-2024-2511]: https://www.openssl.org/news/vulnerabilities.html#CVE-2024-2511 [CVE-2024-0727]: https://www.openssl.org/news/vulnerabilities.html#CVE-2024-0727 [CVE-2023-6237]: https://www.openssl.org/news/vulnerabilities.html#CVE-2023-6237 [CVE-2023-6129]: https://www.openssl.org/news/vulnerabilities.html#CVE-2023-6129 diff --git a/deps/openssl/openssl/CONTRIBUTING.md b/deps/openssl/openssl/CONTRIBUTING.md index 15490fd9f620d7..cced15347d0542 100644 --- a/deps/openssl/openssl/CONTRIBUTING.md +++ b/deps/openssl/openssl/CONTRIBUTING.md @@ -3,13 +3,13 @@ HOW TO CONTRIBUTE TO OpenSSL Please visit our [Getting Started] page for other ideas about how to contribute. - [Getting Started]: + [Getting Started]: Development is done on GitHub in the [openssl/openssl] repository. [openssl/openssl]: -To request new a feature, ask a question, or report a bug, +To request a new feature, ask a question, or report a bug, please open an [issue on GitHub](https://github.com/openssl/openssl/issues). To submit a patch or implement a new feature, please open a @@ -67,7 +67,8 @@ guidelines: often. We do not accept merge commits, you will have to remove them (usually by rebasing) before it will be acceptable. - 4. Code provided should follow our [coding style] and compile without warnings. + 4. Code provided should follow our [coding style] and [documentation policy] + and compile without warnings. There is a [Perl tool](util/check-format.pl) that helps finding code formatting mistakes and other coding style nits. Where `gcc` or `clang` is available, you should use the @@ -76,7 +77,8 @@ guidelines: Clean builds via GitHub Actions are required. They are started automatically whenever a PR is created or updated by committers. - [coding style]: https://www.openssl.org/policies/technical/coding-style.html + [coding style]: https://openssl-library.org/policies/technical/coding-style/ + [documentation policy]: https://openssl-library.org/policies/technical/documentation-policy/ 5. When at all possible, code contributions should include tests. These can either be added to an existing test, or completely new. Please see diff --git a/deps/openssl/openssl/Configurations/10-main.conf b/deps/openssl/openssl/Configurations/10-main.conf index ff8af71463188c..e74adb50cc3cd3 100644 --- a/deps/openssl/openssl/Configurations/10-main.conf +++ b/deps/openssl/openssl/Configurations/10-main.conf @@ -784,7 +784,14 @@ my %targets = ( asm_arch => 'aarch64', perlasm_scheme => "linux64", }, - + "linux-arm64ilp32-clang" => { # clang config abi by --target + inherit_from => [ "linux-generic32" ], + CC => "clang", + CXX => "clang++", + bn_ops => "SIXTY_FOUR_BIT RC4_CHAR", + asm_arch => 'aarch64', + perlasm_scheme => "linux64", + }, "linux-mips32" => { # Configure script adds minimally required -march for assembly # support, if no -march was specified at command line. @@ -1257,6 +1264,25 @@ my %targets = ( AR => add("-X32"), RANLIB => add("-X32"), }, + # To enable openxl compiler for aix + # If 17.1 openxl runtime is available, -latomic can be used + # instead of -DBROKEN_CLANG_ATOMICS + "aix-clang" => { + inherit_from => [ "aix-common" ], + CC => "ibm-clang", + CFLAGS => picker(debug => "-O0 -g", + release => "-O"), + cflags => combine("-Wno-implicit-function-declaration -mcmodel=large -DBROKEN_CLANG_ATOMICS", + threads("-pthread")), + ex_libs => add(threads("-pthread")), + bn_ops => "BN_LLONG RC4_CHAR", + asm_arch => 'ppc32', + perlasm_scheme => "aix32", + shared_cflag => "-fpic", + shared_ldflag => add("-shared"), + AR => add("-X32"), + RANLIB => add("-X32"), + }, "aix64-cc" => { inherit_from => [ "aix-common" ], CC => "cc", @@ -1275,6 +1301,23 @@ my %targets = ( AR => add("-X64"), RANLIB => add("-X64"), }, + "aix64-clang" => { + inherit_from => [ "aix-common" ], + CC => "ibm-clang", + CFLAGS => picker(debug => "-O0 -g", + release => "-O"), + cflags => combine("-maix64 -Wno-implicit-function-declaration -mcmodel=large", + threads("-pthread")), + ex_libs => add(threads("-pthread")), + bn_ops => "SIXTY_FOUR_BIT_LONG RC4_CHAR", + asm_arch => 'ppc64', + perlasm_scheme => "aix64", + shared_cflag => "-fpic", + shared_ldflag => add("-shared"), + shared_extension => "64.so.\$(SHLIB_VERSION_NUMBER)", + AR => add("-X64"), + RANLIB => add("-X64"), + }, # SIEMENS BS2000/OSD: an EBCDIC-based mainframe "BS2000-OSD" => { diff --git a/deps/openssl/openssl/Configurations/15-ios.conf b/deps/openssl/openssl/Configurations/15-ios.conf index 54d37f63f445d4..84c9cfeb3a1400 100644 --- a/deps/openssl/openssl/Configurations/15-ios.conf +++ b/deps/openssl/openssl/Configurations/15-ios.conf @@ -10,7 +10,7 @@ my %targets = ( template => 1, inherit_from => [ "darwin-common" ], sys_id => "iOS", - disable => [ "shared", "async" ], + disable => [ "async" ], }, "ios-xcrun" => { inherit_from => [ "ios-common" ], @@ -49,16 +49,16 @@ my %targets = ( # "iphoneos-cross" => { inherit_from => [ "ios-common" ], - cflags => add("-isysroot \$(CROSS_TOP)/SDKs/\$(CROSS_SDK) -fno-common"), + cflags => add("-isysroot \"\$(CROSS_TOP)/SDKs/\$(CROSS_SDK)\" -fno-common"), }, "ios-cross" => { inherit_from => [ "ios-xcrun" ], CC => "cc", - cflags => add("-isysroot \$(CROSS_TOP)/SDKs/\$(CROSS_SDK)"), + cflags => add("-isysroot \"\$(CROSS_TOP)/SDKs/\$(CROSS_SDK)\""), }, "ios64-cross" => { inherit_from => [ "ios64-xcrun" ], CC => "cc", - cflags => add("-isysroot \$(CROSS_TOP)/SDKs/\$(CROSS_SDK)"), + cflags => add("-isysroot \"\$(CROSS_TOP)/SDKs/\$(CROSS_SDK)\""), }, ); diff --git a/deps/openssl/openssl/Configurations/unix-Makefile.tmpl b/deps/openssl/openssl/Configurations/unix-Makefile.tmpl index 3754595d38b50a..644540397de596 100644 --- a/deps/openssl/openssl/Configurations/unix-Makefile.tmpl +++ b/deps/openssl/openssl/Configurations/unix-Makefile.tmpl @@ -21,7 +21,7 @@ sub dependmagic { my $target = shift; - return "$target: build_generated\n\t\$(MAKE) depend && \$(MAKE) _$target\n_$target"; + return "$target: build_generated\n\t\"\$(MAKE)\" depend && \"\$(MAKE)\" _$target\n_$target"; } our $COLUMNS = $ENV{COLUMNS}; @@ -527,7 +527,7 @@ all: build_sw build_docs test: tests {- dependmagic('tests'); -}: build_programs_nodep build_modules_nodep link-utils - $(MAKE) run_tests + "$(MAKE)" run_tests run_tests: FORCE @ : {- output_off() if $disabled{tests}; "" -} ( SRCTOP=$(SRCDIR) \ @@ -542,7 +542,7 @@ run_tests: FORCE list-tests: @ : {- output_off() if $disabled{tests}; "" -} - $(MAKE) run_tests TESTS=list + "$(MAKE)" run_tests TESTS=list @ : {- if ($disabled{tests}) { output_on(); } else { output_off(); } "" -} @echo "Tests are not supported with your chosen Configure options" @ : {- output_on() if !$disabled{tests}; "" -} @@ -1193,12 +1193,12 @@ providers/fips.module.sources.new: configdata.pm cd sources-tmp \ && $$srcdir/Configure --banner=Configured enable-fips -O0 \ && ./configdata.pm --query 'get_sources("providers/fips")' > sources1 \ - && $(MAKE) -sj 4 build_generated providers/fips.so \ + && "$(MAKE)" -sj 4 build_generated providers/fips.so \ && find . -name '*.d' | xargs cat > dep1 \ - && $(MAKE) distclean \ + && "$(MAKE)" distclean \ && $$srcdir/Configure --banner=Configured enable-fips no-asm -O0 \ && ./configdata.pm --query 'get_sources("providers/fips")' > sources2 \ - && $(MAKE) -sj 4 build_generated providers/fips.so \ + && "$(MAKE)" -sj 4 build_generated providers/fips.so \ && find . -name '*.d' | xargs cat > dep2 \ && cat sources1 sources2 \ | grep -v ' : \\$$' | grep -v util/providers.num \ @@ -1332,7 +1332,7 @@ ordinals: build_generated $(SSLHEADERS) test_ordinals: - $(MAKE) run_tests TESTS=test_ordinals + "$(MAKE)" run_tests TESTS=test_ordinals tags TAGS: FORCE rm -f TAGS tags diff --git a/deps/openssl/openssl/Configure b/deps/openssl/openssl/Configure index 0d0e229eb77d67..1aa660a46c4dc4 100755 --- a/deps/openssl/openssl/Configure +++ b/deps/openssl/openssl/Configure @@ -1,6 +1,6 @@ #! /usr/bin/env perl # -*- mode: perl; -*- -# Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -178,7 +178,6 @@ my @gcc_devteam_warn = qw( # -Wextended-offsetof -- no, needed in CMS ASN1 code my @clang_devteam_warn = qw( -Wno-unknown-warning-option - -Wswitch-default -Wno-parentheses-equality -Wno-language-extension-token -Wno-extended-offsetof @@ -405,6 +404,7 @@ my @disablables = ( "asan", "asm", "async", + "atexit", "autoalginit", "autoerrinit", "autoload-config", @@ -1585,7 +1585,7 @@ if (!$disabled{makedepend}) { disable('unavailable', 'makedepend') unless $config{makedep_scheme}; } -if (!$disabled{asm} && !$predefined_C{__MACH__} && $^O ne 'VMS') { +if (!$disabled{asm} && !$predefined_C{__MACH__} && $^O ne 'VMS' && !$predefined_C{_AIX}) { # probe for -Wa,--noexecstack option... if ($predefined_C{__clang__}) { # clang has builtin assembler, which doesn't recognize --help, @@ -3409,6 +3409,13 @@ sub absolutedir { return rel2abs($dir); } + # realpath() on Windows seems to check if the directory actually exists, + # which isn't what is wanted here. All we want to know is if a directory + # spec is absolute, not if it exists. + if ($^O eq "MSWin32") { + return rel2abs($dir); + } + # We use realpath() on Unix, since no other will properly clean out # a directory spec. use Cwd qw/realpath/; diff --git a/deps/openssl/openssl/FAQ.md b/deps/openssl/openssl/FAQ.md deleted file mode 100644 index 30f5010ce3a480..00000000000000 --- a/deps/openssl/openssl/FAQ.md +++ /dev/null @@ -1,6 +0,0 @@ -Frequently Asked Questions (FAQ) -================================ - -The [Frequently Asked Questions][FAQ] are now maintained on the OpenSSL homepage. - - [FAQ]: https://www.openssl.org/docs/faq.html diff --git a/deps/openssl/openssl/INSTALL.md b/deps/openssl/openssl/INSTALL.md index 87b1faef90f719..107a9b56e4c689 100644 --- a/deps/openssl/openssl/INSTALL.md +++ b/deps/openssl/openssl/INSTALL.md @@ -480,7 +480,7 @@ Setting the FIPS HMAC key As part of its self-test validation, the FIPS module must verify itself by performing a SHA-256 HMAC computation on itself. The default key is -the SHA256 value of "the holy handgrenade of antioch" and is sufficient +the SHA256 value of "holy hand grenade of antioch" and is sufficient for meeting the FIPS requirements. To change the key to a different value, use this flag. The value should @@ -546,6 +546,13 @@ be used even with this option. Do not build support for async operations. +### no-atexit + +Do not use `atexit()` in libcrypto builds. + +`atexit()` has varied semantics between platforms and can cause SIGSEGV in some +circumstances. This option disables the atexit registration of OPENSSL_cleanup. + ### no-autoalginit Don't automatically load all supported ciphers and digests. @@ -1161,7 +1168,7 @@ Configure OpenSSL ### Automatic Configuration In previous version, the `config` script determined the platform type and -compiler and then called `Configure`. Starting with this release, they are +compiler and then called `Configure`. Starting with version 3.0, they are the same. #### Unix / Linux / macOS @@ -1615,7 +1622,7 @@ More about our support resources can be found in the [SUPPORT] file. ### Configuration Errors -If the `./Configure` or `./Configure` command fails with an error message, +If the `./config` or `./Configure` command fails with an error message, read the error message carefully and try to figure out whether you made a mistake (e.g., by providing a wrong option), or whether the script is working incorrectly. If you think you encountered a bug, please diff --git a/deps/openssl/openssl/NEWS.md b/deps/openssl/openssl/NEWS.md index d9a48b157eb14b..e0a81703ee8dc3 100644 --- a/deps/openssl/openssl/NEWS.md +++ b/deps/openssl/openssl/NEWS.md @@ -18,6 +18,31 @@ OpenSSL Releases OpenSSL 3.0 ----------- +### Major changes between OpenSSL 3.0.14 and OpenSSL 3.0.15 [3 Sep 2024] + +OpenSSL 3.0.15 is a security patch release. The most severe CVE fixed in this +release is Moderate. + +This release incorporates the following bug fixes and mitigations: + + * Fixed possible denial of service in X.509 name checks + ([CVE-2024-6119]) + + * Fixed possible buffer overread in SSL_select_next_proto() + ([CVE-2024-5535]) + +### Major changes between OpenSSL 3.0.13 and OpenSSL 3.0.14 [4 Jun 2024] + + * Fixed potential use after free after SSL_free_buffers() is called + ([CVE-2024-4741]) + + * Fixed an issue where checking excessively long DSA keys or parameters may + be very slow + ([CVE-2024-4603]) + + * Fixed unbounded memory growth with session handling in TLSv1.3 + ([CVE-2024-2511]) + ### Major changes between OpenSSL 3.0.12 and OpenSSL 3.0.13 [30 Jan 2024] * Fixed PKCS12 Decoding crashes @@ -1470,6 +1495,11 @@ OpenSSL 0.9.x +[CVE-2024-6119]: https://www.openssl.org/news/vulnerabilities.html#CVE-2024-6119 +[CVE-2024-5535]: https://www.openssl.org/news/vulnerabilities.html#CVE-2024-5535 +[CVE-2024-4741]: https://www.openssl.org/news/vulnerabilities.html#CVE-2024-4741 +[CVE-2024-4603]: https://www.openssl.org/news/vulnerabilities.html#CVE-2024-4603 +[CVE-2024-2511]: https://www.openssl.org/news/vulnerabilities.html#CVE-2024-2511 [CVE-2024-0727]: https://www.openssl.org/news/vulnerabilities.html#CVE-2024-0727 [CVE-2023-6237]: https://www.openssl.org/news/vulnerabilities.html#CVE-2023-6237 [CVE-2023-6129]: https://www.openssl.org/news/vulnerabilities.html#CVE-2023-6129 diff --git a/deps/openssl/openssl/NOTES-NONSTOP.md b/deps/openssl/openssl/NOTES-NONSTOP.md index 68438b998884e2..ab13de7d3a760a 100644 --- a/deps/openssl/openssl/NOTES-NONSTOP.md +++ b/deps/openssl/openssl/NOTES-NONSTOP.md @@ -56,7 +56,10 @@ relating to `atexit()` processing when a shared library is unloaded and when the program terminates. This limitation applies to all OpenSSL shared library components. -A resolution to this situation is under investigation. +It is possible to configure the build with `no-atexit` to avoid the SIGSEGV. +Preferably, you can explicitly call `OPENSSL_cleanup()` from your application. +It is not mandatory as it just deallocates various global data structures +OpenSSL allocated. About Prefix and OpenSSLDir --------------------------- diff --git a/deps/openssl/openssl/README.md b/deps/openssl/openssl/README.md index 9b057e1f2c51c1..702cc3979a1826 100644 --- a/deps/openssl/openssl/README.md +++ b/deps/openssl/openssl/README.md @@ -4,7 +4,7 @@ What This Is This is a fork of [OpenSSL](https://www.openssl.org) to enable QUIC. In addition to the website, the official source distribution is at . The OpenSSL `README` can be found at -[README-OpenSSL.md](https://github.com/quictls/openssl/blob/openssl-3.0.13%2Bquic/README-OpenSSL.md) +[README-OpenSSL.md](https://github.com/quictls/openssl/blob/openssl-3.0.15%2Bquic/README-OpenSSL.md) This fork adds APIs that can be used by QUIC implementations for connection handshakes. Quoting the IETF Working group diff --git a/deps/openssl/openssl/VERSION.dat b/deps/openssl/openssl/VERSION.dat index e11fb41ddac0fc..9f3b18e8899778 100644 --- a/deps/openssl/openssl/VERSION.dat +++ b/deps/openssl/openssl/VERSION.dat @@ -1,7 +1,7 @@ MAJOR=3 MINOR=0 -PATCH=13 +PATCH=15 PRE_RELEASE_TAG= BUILD_METADATA=quic -RELEASE_DATE="30 Jan 2024" +RELEASE_DATE="3 Sep 2024" SHLIB_VERSION=81.3 diff --git a/deps/openssl/openssl/apps/cms.c b/deps/openssl/openssl/apps/cms.c index 3994cb0fcd58cf..abb9f196a76090 100644 --- a/deps/openssl/openssl/apps/cms.c +++ b/deps/openssl/openssl/apps/cms.c @@ -1,5 +1,5 @@ /* - * Copyright 2008-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2008-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -132,7 +132,7 @@ const OPTIONS cms_options[] = { {"binary", OPT_BINARY, '-', "Treat input as binary: do not translate to canonical form"}, {"crlfeol", OPT_CRLFEOL, '-', - "Use CRLF as EOL termination instead of CR only" }, + "Use CRLF as EOL termination instead of LF only" }, {"asciicrlf", OPT_ASCIICRLF, '-', "Perform CRLF canonicalisation when signing"}, diff --git a/deps/openssl/openssl/apps/dgst.c b/deps/openssl/openssl/apps/dgst.c index 3f02af0d5738ab..51383bec26ca07 100644 --- a/deps/openssl/openssl/apps/dgst.c +++ b/deps/openssl/openssl/apps/dgst.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -478,7 +478,7 @@ int dgst_main(int argc, char **argv) static void show_digests(const OBJ_NAME *name, void *arg) { struct doall_dgst_digests *dec = (struct doall_dgst_digests *)arg; - const EVP_MD *md = NULL; + EVP_MD *md = NULL; /* Filter out signed digests (a.k.a signature algorithms) */ if (strstr(name->name, "rsa") != NULL || strstr(name->name, "RSA") != NULL) @@ -490,8 +490,7 @@ static void show_digests(const OBJ_NAME *name, void *arg) /* Filter out message digests that we cannot use */ md = EVP_MD_fetch(app_get0_libctx(), name->name, app_get0_propq()); if (md == NULL) { - md = EVP_get_digestbyname(name->name); - if (md == NULL) + if (EVP_get_digestbyname(name->name) == NULL) return; } @@ -502,6 +501,8 @@ static void show_digests(const OBJ_NAME *name, void *arg) } else { BIO_printf(dec->bio, " "); } + + EVP_MD_free(md); } /* diff --git a/deps/openssl/openssl/apps/lib/opt.c b/deps/openssl/openssl/apps/lib/opt.c index d56964dbe7ba9a..88db9ad6947b54 100644 --- a/deps/openssl/openssl/apps/lib/opt.c +++ b/deps/openssl/openssl/apps/lib/opt.c @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2015-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -616,7 +616,7 @@ int opt_uintmax(const char *value, ossl_uintmax_t *result) opt_number_error(value); return 0; } - *result = (ossl_intmax_t)m; + *result = (ossl_uintmax_t)m; errno = oerrno; return 1; } diff --git a/deps/openssl/openssl/apps/lib/s_cb.c b/deps/openssl/openssl/apps/lib/s_cb.c index f2ddd94c3de4df..6440b496099e2d 100644 --- a/deps/openssl/openssl/apps/lib/s_cb.c +++ b/deps/openssl/openssl/apps/lib/s_cb.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -649,7 +649,7 @@ void msg_cb(int write_p, int version, int content_type, const void *buf, (void)BIO_flush(bio); } -static STRINT_PAIR tlsext_types[] = { +static const STRINT_PAIR tlsext_types[] = { {"server name", TLSEXT_TYPE_server_name}, {"max fragment length", TLSEXT_TYPE_max_fragment_length}, {"client certificate URL", TLSEXT_TYPE_client_certificate_url}, @@ -688,6 +688,7 @@ static STRINT_PAIR tlsext_types[] = { {"psk kex modes", TLSEXT_TYPE_psk_kex_modes}, {"certificate authorities", TLSEXT_TYPE_certificate_authorities}, {"post handshake auth", TLSEXT_TYPE_post_handshake_auth}, + {"early_data", TLSEXT_TYPE_early_data}, {NULL} }; @@ -1318,7 +1319,8 @@ int ssl_load_stores(SSL_CTX *ctx, if (vfyCAstore != NULL && !X509_STORE_load_store(vfy, vfyCAstore)) goto err; add_crls_store(vfy, crls); - SSL_CTX_set1_verify_cert_store(ctx, vfy); + if (SSL_CTX_set1_verify_cert_store(ctx, vfy) == 0) + goto err; if (crl_download) store_setup_crl_download(vfy); } @@ -1332,7 +1334,8 @@ int ssl_load_stores(SSL_CTX *ctx, goto err; if (chCAstore != NULL && !X509_STORE_load_store(ch, chCAstore)) goto err; - SSL_CTX_set1_chain_cert_store(ctx, ch); + if (SSL_CTX_set1_chain_cert_store(ctx, ch) == 0) + goto err; } rv = 1; err: diff --git a/deps/openssl/openssl/apps/list.c b/deps/openssl/openssl/apps/list.c index 0fcbcbb083cbbc..7d3136a8a161e1 100644 --- a/deps/openssl/openssl/apps/list.c +++ b/deps/openssl/openssl/apps/list.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -1230,6 +1230,7 @@ static void list_provider_info(void) } if (OSSL_PROVIDER_do_all(NULL, &collect_providers, providers) != 1) { + sk_OSSL_PROVIDER_free(providers); BIO_printf(bio_err, "ERROR: Memory allocation\n"); return; } diff --git a/deps/openssl/openssl/apps/ocsp.c b/deps/openssl/openssl/apps/ocsp.c index 821e224c6ce45d..fb3105da552660 100644 --- a/deps/openssl/openssl/apps/ocsp.c +++ b/deps/openssl/openssl/apps/ocsp.c @@ -1,5 +1,5 @@ /* - * Copyright 2001-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2001-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -11,7 +11,7 @@ #ifdef OPENSSL_SYS_VMS /* So fd_set and friends get properly defined on OpenVMS */ -# define _XOPEN_SOURCE_EXTENDED +# define _XOPEN_SOURCE_EXTENDED 1 #endif #include diff --git a/deps/openssl/openssl/apps/pkcs12.c b/deps/openssl/openssl/apps/pkcs12.c index b442d358f8b703..ab78903ee9cdcf 100644 --- a/deps/openssl/openssl/apps/pkcs12.c +++ b/deps/openssl/openssl/apps/pkcs12.c @@ -1,5 +1,5 @@ /* - * Copyright 1999-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1999-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -712,9 +712,6 @@ int pkcs12_main(int argc, char **argv) in = bio_open_default(infile, 'r', FORMAT_PKCS12); if (in == NULL) goto end; - out = bio_open_owner(outfile, FORMAT_PEM, private); - if (out == NULL) - goto end; p12 = PKCS12_init_ex(NID_pkcs7_data, app_get0_libctx(), app_get0_propq()); if (p12 == NULL) { @@ -814,6 +811,11 @@ int pkcs12_main(int argc, char **argv) dump: assert(private); + + out = bio_open_owner(outfile, FORMAT_PEM, private); + if (out == NULL) + goto end; + if (!dump_certs_keys_p12(out, p12, cpass, -1, options, passout, enc)) { BIO_printf(bio_err, "Error outputting keys and certificates\n"); ERR_print_errors(bio_err); @@ -855,7 +857,11 @@ int dump_certs_keys_p12(BIO *out, const PKCS12 *p12, const char *pass, } else if (bagnid == NID_pkcs7_encrypted) { if (options & INFO) { BIO_printf(bio_err, "PKCS7 Encrypted data: "); - alg_print(p7->d.encrypted->enc_data->algorithm); + if (p7->d.encrypted == NULL) { + BIO_printf(bio_err, "\n"); + } else { + alg_print(p7->d.encrypted->enc_data->algorithm); + } } bags = PKCS12_unpack_p7encdata(p7, pass, passlen); } else { diff --git a/deps/openssl/openssl/apps/req.c b/deps/openssl/openssl/apps/req.c index c7d4c7822cda91..2fc53d4bfcfa23 100644 --- a/deps/openssl/openssl/apps/req.c +++ b/deps/openssl/openssl/apps/req.c @@ -569,7 +569,7 @@ int req_main(int argc, char **argv) X509V3_CTX ctx; X509V3_set_ctx_test(&ctx); - X509V3_set_nconf(&ctx, addext_conf); + X509V3_set_nconf(&ctx, req_conf); if (!X509V3_EXT_add_nconf(addext_conf, &ctx, "default", NULL)) { BIO_printf(bio_err, "Error checking extensions defined using -addext\n"); goto end; diff --git a/deps/openssl/openssl/apps/smime.c b/deps/openssl/openssl/apps/smime.c index 52b4a01c232f9f..651294e46daa92 100644 --- a/deps/openssl/openssl/apps/smime.c +++ b/deps/openssl/openssl/apps/smime.c @@ -1,5 +1,5 @@ /* - * Copyright 1999-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1999-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -118,7 +118,7 @@ const OPTIONS smime_options[] = { "Do not load certificates from the default certificates store"}, {"nochain", OPT_NOCHAIN, '-', "set PKCS7_NOCHAIN so certificates contained in the message are not used as untrusted CAs" }, - {"crlfeol", OPT_CRLFEOL, '-', "Use CRLF as EOL termination instead of CR only"}, + {"crlfeol", OPT_CRLFEOL, '-', "Use CRLF as EOL termination instead of LF only"}, OPT_R_OPTIONS, OPT_V_OPTIONS, diff --git a/deps/openssl/openssl/apps/speed.c b/deps/openssl/openssl/apps/speed.c index 1113d775b8ab98..d8e2c70e6128b5 100644 --- a/deps/openssl/openssl/apps/speed.c +++ b/deps/openssl/openssl/apps/speed.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright (c) 2002, Oracle and/or its affiliates. All rights reserved * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -727,8 +727,12 @@ static int EVP_Update_loop(void *args) unsigned char *buf = tempargs->buf; EVP_CIPHER_CTX *ctx = tempargs->ctx; int outl, count, rc; + unsigned char faketag[16] = { 0xcc }; if (decrypt) { + if (EVP_CIPHER_get_flags(EVP_CIPHER_CTX_get0_cipher(ctx)) & EVP_CIPH_FLAG_AEAD_CIPHER) { + (void)EVP_CIPHER_CTX_ctrl(ctx, EVP_CTRL_AEAD_SET_TAG, sizeof(faketag), faketag); + } for (count = 0; COND(c[D_EVP][testnum]); count++) { rc = EVP_DecryptUpdate(ctx, buf, &outl, buf, lengths[testnum]); if (rc != 1) { diff --git a/deps/openssl/openssl/apps/ts.c b/deps/openssl/openssl/apps/ts.c index 57292e187cd223..01b73f380428e8 100644 --- a/deps/openssl/openssl/apps/ts.c +++ b/deps/openssl/openssl/apps/ts.c @@ -1,5 +1,5 @@ /* - * Copyright 2006-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2006-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -535,15 +535,18 @@ static int create_digest(BIO *input, const char *digest, const EVP_MD *md, *md_value = OPENSSL_hexstr2buf(digest, &digest_len); if (*md_value == NULL || md_value_len != digest_len) { - OPENSSL_free(*md_value); - *md_value = NULL; BIO_printf(bio_err, "bad digest, %d bytes " "must be specified\n", md_value_len); - return 0; + goto err; } } rv = md_value_len; err: + if (rv <= 0) { + OPENSSL_free(*md_value); + *md_value = NULL; + rv = 0; + } EVP_MD_CTX_free(md_ctx); return rv; } diff --git a/deps/openssl/openssl/crypto/aes/asm/aesp8-ppc.pl b/deps/openssl/openssl/crypto/aes/asm/aesp8-ppc.pl index 60cf86f52aed20..f7f78d04b0e13f 100755 --- a/deps/openssl/openssl/crypto/aes/asm/aesp8-ppc.pl +++ b/deps/openssl/openssl/crypto/aes/asm/aesp8-ppc.pl @@ -1,5 +1,5 @@ #! /usr/bin/env perl -# Copyright 2014-2020 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2014-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -99,11 +99,12 @@ .long 0x1b000000, 0x1b000000, 0x1b000000, 0x1b000000 ?rev .long 0x0d0e0f0c, 0x0d0e0f0c, 0x0d0e0f0c, 0x0d0e0f0c ?rev .long 0,0,0,0 ?asis +.long 0x0f102132, 0x43546576, 0x8798a9ba, 0xcbdcedfe Lconsts: mflr r0 bcl 20,31,\$+4 mflr $ptr #vvvvv "distance between . and rcon - addi $ptr,$ptr,-0x48 + addi $ptr,$ptr,-0x58 mtlr r0 blr .long 0 @@ -2405,7 +2406,7 @@ () my $key_=$key2; my ($x00,$x10,$x20,$x30,$x40,$x50,$x60,$x70)=map("r$_",(0,3,26..31)); $x00=0 if ($flavour =~ /osx/); -my ($in0, $in1, $in2, $in3, $in4, $in5 )=map("v$_",(0..5)); +my ($in0, $in1, $in2, $in3, $in4, $in5)=map("v$_",(0..5)); my ($out0, $out1, $out2, $out3, $out4, $out5)=map("v$_",(7,12..16)); my ($twk0, $twk1, $twk2, $twk3, $twk4, $twk5)=map("v$_",(17..22)); my $rndkey0="v23"; # v24-v25 rotating buffer for first found keys @@ -2460,6 +2461,18 @@ () li $x70,0x70 mtspr 256,r0 + # Reverse eighty7 to 0x010101..87 + xxlor 2, 32+$eighty7, 32+$eighty7 + vsldoi $eighty7,$tmp,$eighty7,1 # 0x010101..87 + xxlor 1, 32+$eighty7, 32+$eighty7 + + # Load XOR contents. 0xf102132435465768798a9bacbdcedfe + mr $x70, r6 + bl Lconsts + lxvw4x 0, $x40, r6 # load XOR contents + mr r6, $x70 + li $x70,0x70 + subi $rounds,$rounds,3 # -4 in total lvx $rndkey0,$x00,$key1 # load key schedule @@ -2502,69 +2515,77 @@ () ?vperm v31,v31,$twk5,$keyperm lvx v25,$x10,$key_ # pre-load round[2] + # Switch to use the following codes with 0x010101..87 to generate tweak. + # eighty7 = 0x010101..87 + # vsrab tmp, tweak, seven # next tweak value, right shift 7 bits + # vand tmp, tmp, eighty7 # last byte with carry + # vaddubm tweak, tweak, tweak # left shift 1 bit (x2) + # xxlor vsx, 0, 0 + # vpermxor tweak, tweak, tmp, vsx + vperm $in0,$inout,$inptail,$inpperm subi $inp,$inp,31 # undo "caller" vxor $twk0,$tweak,$rndkey0 vsrab $tmp,$tweak,$seven # next tweak value vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 vand $tmp,$tmp,$eighty7 vxor $out0,$in0,$twk0 - vxor $tweak,$tweak,$tmp + xxlor 32+$in1, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in1 lvx_u $in1,$x10,$inp vxor $twk1,$tweak,$rndkey0 vsrab $tmp,$tweak,$seven # next tweak value vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 le?vperm $in1,$in1,$in1,$leperm vand $tmp,$tmp,$eighty7 vxor $out1,$in1,$twk1 - vxor $tweak,$tweak,$tmp + xxlor 32+$in2, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in2 lvx_u $in2,$x20,$inp andi. $taillen,$len,15 vxor $twk2,$tweak,$rndkey0 vsrab $tmp,$tweak,$seven # next tweak value vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 le?vperm $in2,$in2,$in2,$leperm vand $tmp,$tmp,$eighty7 vxor $out2,$in2,$twk2 - vxor $tweak,$tweak,$tmp + xxlor 32+$in3, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in3 lvx_u $in3,$x30,$inp sub $len,$len,$taillen vxor $twk3,$tweak,$rndkey0 vsrab $tmp,$tweak,$seven # next tweak value vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 le?vperm $in3,$in3,$in3,$leperm vand $tmp,$tmp,$eighty7 vxor $out3,$in3,$twk3 - vxor $tweak,$tweak,$tmp + xxlor 32+$in4, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in4 lvx_u $in4,$x40,$inp subi $len,$len,0x60 vxor $twk4,$tweak,$rndkey0 vsrab $tmp,$tweak,$seven # next tweak value vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 le?vperm $in4,$in4,$in4,$leperm vand $tmp,$tmp,$eighty7 vxor $out4,$in4,$twk4 - vxor $tweak,$tweak,$tmp + xxlor 32+$in5, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in5 lvx_u $in5,$x50,$inp addi $inp,$inp,0x60 vxor $twk5,$tweak,$rndkey0 vsrab $tmp,$tweak,$seven # next tweak value vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 le?vperm $in5,$in5,$in5,$leperm vand $tmp,$tmp,$eighty7 vxor $out5,$in5,$twk5 - vxor $tweak,$tweak,$tmp + xxlor 32+$in0, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in0 vxor v31,v31,$rndkey0 mtctr $rounds @@ -2590,6 +2611,8 @@ () lvx v25,$x10,$key_ # round[4] bdnz Loop_xts_enc6x + xxlor 32+$eighty7, 1, 1 # 0x010101..87 + subic $len,$len,96 # $len-=96 vxor $in0,$twk0,v31 # xor with last round key vcipher $out0,$out0,v24 @@ -2599,7 +2622,6 @@ () vaddubm $tweak,$tweak,$tweak vcipher $out2,$out2,v24 vcipher $out3,$out3,v24 - vsldoi $tmp,$tmp,$tmp,15 vcipher $out4,$out4,v24 vcipher $out5,$out5,v24 @@ -2607,7 +2629,8 @@ () vand $tmp,$tmp,$eighty7 vcipher $out0,$out0,v25 vcipher $out1,$out1,v25 - vxor $tweak,$tweak,$tmp + xxlor 32+$in1, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in1 vcipher $out2,$out2,v25 vcipher $out3,$out3,v25 vxor $in1,$twk1,v31 @@ -2618,13 +2641,13 @@ () and r0,r0,$len vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 vcipher $out0,$out0,v26 vcipher $out1,$out1,v26 vand $tmp,$tmp,$eighty7 vcipher $out2,$out2,v26 vcipher $out3,$out3,v26 - vxor $tweak,$tweak,$tmp + xxlor 32+$in2, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in2 vcipher $out4,$out4,v26 vcipher $out5,$out5,v26 @@ -2638,7 +2661,6 @@ () vaddubm $tweak,$tweak,$tweak vcipher $out0,$out0,v27 vcipher $out1,$out1,v27 - vsldoi $tmp,$tmp,$tmp,15 vcipher $out2,$out2,v27 vcipher $out3,$out3,v27 vand $tmp,$tmp,$eighty7 @@ -2646,7 +2668,8 @@ () vcipher $out5,$out5,v27 addi $key_,$sp,$FRAME+15 # rewind $key_ - vxor $tweak,$tweak,$tmp + xxlor 32+$in3, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in3 vcipher $out0,$out0,v28 vcipher $out1,$out1,v28 vxor $in3,$twk3,v31 @@ -2655,7 +2678,6 @@ () vcipher $out2,$out2,v28 vcipher $out3,$out3,v28 vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 vcipher $out4,$out4,v28 vcipher $out5,$out5,v28 lvx v24,$x00,$key_ # re-pre-load round[1] @@ -2663,7 +2685,8 @@ () vcipher $out0,$out0,v29 vcipher $out1,$out1,v29 - vxor $tweak,$tweak,$tmp + xxlor 32+$in4, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in4 vcipher $out2,$out2,v29 vcipher $out3,$out3,v29 vxor $in4,$twk4,v31 @@ -2673,14 +2696,14 @@ () vcipher $out5,$out5,v29 lvx v25,$x10,$key_ # re-pre-load round[2] vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 vcipher $out0,$out0,v30 vcipher $out1,$out1,v30 vand $tmp,$tmp,$eighty7 vcipher $out2,$out2,v30 vcipher $out3,$out3,v30 - vxor $tweak,$tweak,$tmp + xxlor 32+$in5, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in5 vcipher $out4,$out4,v30 vcipher $out5,$out5,v30 vxor $in5,$twk5,v31 @@ -2690,7 +2713,6 @@ () vcipherlast $out0,$out0,$in0 lvx_u $in0,$x00,$inp # load next input block vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 vcipherlast $out1,$out1,$in1 lvx_u $in1,$x10,$inp vcipherlast $out2,$out2,$in2 @@ -2703,7 +2725,10 @@ () vcipherlast $out4,$out4,$in4 le?vperm $in2,$in2,$in2,$leperm lvx_u $in4,$x40,$inp - vxor $tweak,$tweak,$tmp + xxlor 10, 32+$in0, 32+$in0 + xxlor 32+$in0, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in0 + xxlor 32+$in0, 10, 10 vcipherlast $tmp,$out5,$in5 # last block might be needed # in stealing mode le?vperm $in3,$in3,$in3,$leperm @@ -2736,6 +2761,8 @@ () mtctr $rounds beq Loop_xts_enc6x # did $len-=96 borrow? + xxlor 32+$eighty7, 2, 2 # 0x870101..01 + addic. $len,$len,0x60 beq Lxts_enc6x_zero cmpwi $len,0x20 @@ -3112,6 +3139,18 @@ () li $x70,0x70 mtspr 256,r0 + # Reverse eighty7 to 0x010101..87 + xxlor 2, 32+$eighty7, 32+$eighty7 + vsldoi $eighty7,$tmp,$eighty7,1 # 0x010101..87 + xxlor 1, 32+$eighty7, 32+$eighty7 + + # Load XOR contents. 0xf102132435465768798a9bacbdcedfe + mr $x70, r6 + bl Lconsts + lxvw4x 0, $x40, r6 # load XOR contents + mr r6, $x70 + li $x70,0x70 + subi $rounds,$rounds,3 # -4 in total lvx $rndkey0,$x00,$key1 # load key schedule @@ -3159,64 +3198,64 @@ () vxor $twk0,$tweak,$rndkey0 vsrab $tmp,$tweak,$seven # next tweak value vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 vand $tmp,$tmp,$eighty7 vxor $out0,$in0,$twk0 - vxor $tweak,$tweak,$tmp + xxlor 32+$in1, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in1 lvx_u $in1,$x10,$inp vxor $twk1,$tweak,$rndkey0 vsrab $tmp,$tweak,$seven # next tweak value vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 le?vperm $in1,$in1,$in1,$leperm vand $tmp,$tmp,$eighty7 vxor $out1,$in1,$twk1 - vxor $tweak,$tweak,$tmp + xxlor 32+$in2, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in2 lvx_u $in2,$x20,$inp andi. $taillen,$len,15 vxor $twk2,$tweak,$rndkey0 vsrab $tmp,$tweak,$seven # next tweak value vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 le?vperm $in2,$in2,$in2,$leperm vand $tmp,$tmp,$eighty7 vxor $out2,$in2,$twk2 - vxor $tweak,$tweak,$tmp + xxlor 32+$in3, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in3 lvx_u $in3,$x30,$inp sub $len,$len,$taillen vxor $twk3,$tweak,$rndkey0 vsrab $tmp,$tweak,$seven # next tweak value vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 le?vperm $in3,$in3,$in3,$leperm vand $tmp,$tmp,$eighty7 vxor $out3,$in3,$twk3 - vxor $tweak,$tweak,$tmp + xxlor 32+$in4, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in4 lvx_u $in4,$x40,$inp subi $len,$len,0x60 vxor $twk4,$tweak,$rndkey0 vsrab $tmp,$tweak,$seven # next tweak value vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 le?vperm $in4,$in4,$in4,$leperm vand $tmp,$tmp,$eighty7 vxor $out4,$in4,$twk4 - vxor $tweak,$tweak,$tmp + xxlor 32+$in5, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in5 lvx_u $in5,$x50,$inp addi $inp,$inp,0x60 vxor $twk5,$tweak,$rndkey0 vsrab $tmp,$tweak,$seven # next tweak value vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 le?vperm $in5,$in5,$in5,$leperm vand $tmp,$tmp,$eighty7 vxor $out5,$in5,$twk5 - vxor $tweak,$tweak,$tmp + xxlor 32+$in0, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in0 vxor v31,v31,$rndkey0 mtctr $rounds @@ -3242,6 +3281,8 @@ () lvx v25,$x10,$key_ # round[4] bdnz Loop_xts_dec6x + xxlor 32+$eighty7, 1, 1 + subic $len,$len,96 # $len-=96 vxor $in0,$twk0,v31 # xor with last round key vncipher $out0,$out0,v24 @@ -3251,7 +3292,6 @@ () vaddubm $tweak,$tweak,$tweak vncipher $out2,$out2,v24 vncipher $out3,$out3,v24 - vsldoi $tmp,$tmp,$tmp,15 vncipher $out4,$out4,v24 vncipher $out5,$out5,v24 @@ -3259,7 +3299,8 @@ () vand $tmp,$tmp,$eighty7 vncipher $out0,$out0,v25 vncipher $out1,$out1,v25 - vxor $tweak,$tweak,$tmp + xxlor 32+$in1, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in1 vncipher $out2,$out2,v25 vncipher $out3,$out3,v25 vxor $in1,$twk1,v31 @@ -3270,13 +3311,13 @@ () and r0,r0,$len vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 vncipher $out0,$out0,v26 vncipher $out1,$out1,v26 vand $tmp,$tmp,$eighty7 vncipher $out2,$out2,v26 vncipher $out3,$out3,v26 - vxor $tweak,$tweak,$tmp + xxlor 32+$in2, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in2 vncipher $out4,$out4,v26 vncipher $out5,$out5,v26 @@ -3290,7 +3331,6 @@ () vaddubm $tweak,$tweak,$tweak vncipher $out0,$out0,v27 vncipher $out1,$out1,v27 - vsldoi $tmp,$tmp,$tmp,15 vncipher $out2,$out2,v27 vncipher $out3,$out3,v27 vand $tmp,$tmp,$eighty7 @@ -3298,7 +3338,8 @@ () vncipher $out5,$out5,v27 addi $key_,$sp,$FRAME+15 # rewind $key_ - vxor $tweak,$tweak,$tmp + xxlor 32+$in3, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in3 vncipher $out0,$out0,v28 vncipher $out1,$out1,v28 vxor $in3,$twk3,v31 @@ -3307,7 +3348,6 @@ () vncipher $out2,$out2,v28 vncipher $out3,$out3,v28 vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 vncipher $out4,$out4,v28 vncipher $out5,$out5,v28 lvx v24,$x00,$key_ # re-pre-load round[1] @@ -3315,7 +3355,8 @@ () vncipher $out0,$out0,v29 vncipher $out1,$out1,v29 - vxor $tweak,$tweak,$tmp + xxlor 32+$in4, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in4 vncipher $out2,$out2,v29 vncipher $out3,$out3,v29 vxor $in4,$twk4,v31 @@ -3325,14 +3366,14 @@ () vncipher $out5,$out5,v29 lvx v25,$x10,$key_ # re-pre-load round[2] vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 vncipher $out0,$out0,v30 vncipher $out1,$out1,v30 vand $tmp,$tmp,$eighty7 vncipher $out2,$out2,v30 vncipher $out3,$out3,v30 - vxor $tweak,$tweak,$tmp + xxlor 32+$in5, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in5 vncipher $out4,$out4,v30 vncipher $out5,$out5,v30 vxor $in5,$twk5,v31 @@ -3342,7 +3383,6 @@ () vncipherlast $out0,$out0,$in0 lvx_u $in0,$x00,$inp # load next input block vaddubm $tweak,$tweak,$tweak - vsldoi $tmp,$tmp,$tmp,15 vncipherlast $out1,$out1,$in1 lvx_u $in1,$x10,$inp vncipherlast $out2,$out2,$in2 @@ -3355,7 +3395,10 @@ () vncipherlast $out4,$out4,$in4 le?vperm $in2,$in2,$in2,$leperm lvx_u $in4,$x40,$inp - vxor $tweak,$tweak,$tmp + xxlor 10, 32+$in0, 32+$in0 + xxlor 32+$in0, 0, 0 + vpermxor $tweak, $tweak, $tmp, $in0 + xxlor 32+$in0, 10, 10 vncipherlast $out5,$out5,$in5 le?vperm $in3,$in3,$in3,$leperm lvx_u $in5,$x50,$inp @@ -3386,6 +3429,8 @@ () mtctr $rounds beq Loop_xts_dec6x # did $len-=96 borrow? + xxlor 32+$eighty7, 2, 2 + addic. $len,$len,0x60 beq Lxts_dec6x_zero cmpwi $len,0x20 diff --git a/deps/openssl/openssl/crypto/aes/build.info b/deps/openssl/openssl/crypto/aes/build.info index b250903fa6e26c..d6ad4ea3d0681e 100644 --- a/deps/openssl/openssl/crypto/aes/build.info +++ b/deps/openssl/openssl/crypto/aes/build.info @@ -38,7 +38,11 @@ IF[{- !$disabled{asm} -}] $AESASM_parisc20_64=$AESASM_parisc11 $AESDEF_parisc20_64=$AESDEF_parisc11 + IF[{- $target{sys_id} ne "MACOSX" -}] $AESASM_ppc32=aes_core.c aes_cbc.c aes-ppc.s vpaes-ppc.s aesp8-ppc.s + ELSE + $AESASM_ppc32=aes_core.c aes_cbc.c aes-ppc.s vpaes-ppc.s + ENDIF $AESDEF_ppc32=AES_ASM VPAES_ASM $AESASM_ppc64=$AESASM_ppc32 $AESDEF_ppc64=$AESDEF_ppc32 @@ -76,7 +80,7 @@ DEFINE[../../providers/libdefault.a]=$AESDEF # already gets everything that the static libcrypto.a has, and doesn't need it # added again. IF[{- !$disabled{module} && !$disabled{shared} -}] - DEFINE[../providers/liblegacy.a]=$AESDEF + DEFINE[../../providers/liblegacy.a]=$AESDEF ENDIF GENERATE[aes-ia64.s]=asm/aes-ia64.S diff --git a/deps/openssl/openssl/crypto/asn1/a_d2i_fp.c b/deps/openssl/openssl/crypto/asn1/a_d2i_fp.c index e8602053f974f9..bd549215b4008e 100644 --- a/deps/openssl/openssl/crypto/asn1/a_d2i_fp.c +++ b/deps/openssl/openssl/crypto/asn1/a_d2i_fp.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -148,6 +148,9 @@ int asn1_d2i_read_bio(BIO *in, BUF_MEM **pb) goto err; } len += i; + if ((size_t)i < want) + continue; + } } /* else data already loaded */ diff --git a/deps/openssl/openssl/crypto/asn1/a_mbstr.c b/deps/openssl/openssl/crypto/asn1/a_mbstr.c index 22dea873eeba56..bca1458ad6a175 100644 --- a/deps/openssl/openssl/crypto/asn1/a_mbstr.c +++ b/deps/openssl/openssl/crypto/asn1/a_mbstr.c @@ -1,5 +1,5 @@ /* - * Copyright 1999-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1999-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -139,9 +139,7 @@ int ASN1_mbstring_ncopy(ASN1_STRING **out, const unsigned char *in, int len, if (*out) { free_out = 0; dest = *out; - OPENSSL_free(dest->data); - dest->data = NULL; - dest->length = 0; + ASN1_STRING_set0(dest, NULL, 0); dest->type = str_type; } else { free_out = 1; @@ -155,6 +153,10 @@ int ASN1_mbstring_ncopy(ASN1_STRING **out, const unsigned char *in, int len, /* If both the same type just copy across */ if (inform == outform) { if (!ASN1_STRING_set(dest, in, len)) { + if (free_out) { + ASN1_STRING_free(dest); + *out = NULL; + } ERR_raise(ERR_LIB_ASN1, ERR_R_MALLOC_FAILURE); return -1; } @@ -185,8 +187,10 @@ int ASN1_mbstring_ncopy(ASN1_STRING **out, const unsigned char *in, int len, break; } if ((p = OPENSSL_malloc(outlen + 1)) == NULL) { - if (free_out) + if (free_out) { ASN1_STRING_free(dest); + *out = NULL; + } ERR_raise(ERR_LIB_ASN1, ERR_R_MALLOC_FAILURE); return -1; } diff --git a/deps/openssl/openssl/crypto/asn1/a_strex.c b/deps/openssl/openssl/crypto/asn1/a_strex.c index b31761aae6f52c..a6049f7dd2ed95 100644 --- a/deps/openssl/openssl/crypto/asn1/a_strex.c +++ b/deps/openssl/openssl/crypto/asn1/a_strex.c @@ -1,5 +1,5 @@ /* - * Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -10,6 +10,7 @@ #include #include #include "internal/cryptlib.h" +#include "internal/sizes.h" #include "crypto/asn1.h" #include #include @@ -345,8 +346,10 @@ static int do_print_ex(char_io *io_ch, void *arg, unsigned long lflags, if (lflags & ASN1_STRFLGS_SHOW_TYPE) { const char *tagname; + tagname = ASN1_tag2str(type); - outlen += strlen(tagname); + /* We can directly cast here as tagname will never be too large. */ + outlen += (int)strlen(tagname); if (!io_ch(arg, tagname, outlen) || !io_ch(arg, ":", 1)) return -1; outlen++; @@ -372,7 +375,7 @@ static int do_print_ex(char_io *io_ch, void *arg, unsigned long lflags, if (type == -1) { len = do_dump(lflags, io_ch, arg, str); - if (len < 0) + if (len < 0 || len > INT_MAX - outlen) return -1; outlen += len; return outlen; @@ -391,7 +394,7 @@ static int do_print_ex(char_io *io_ch, void *arg, unsigned long lflags, } len = do_buf(str->data, str->length, type, flags, "es, io_ch, NULL); - if (len < 0) + if (len < 0 || len > INT_MAX - 2 - outlen) return -1; outlen += len; if (quotes) diff --git a/deps/openssl/openssl/crypto/asn1/a_verify.c b/deps/openssl/openssl/crypto/asn1/a_verify.c index 9bf9bdd14ecc56..66809bd6d2ff53 100644 --- a/deps/openssl/openssl/crypto/asn1/a_verify.c +++ b/deps/openssl/openssl/crypto/asn1/a_verify.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -203,10 +203,12 @@ int ASN1_item_verify_ctx(const ASN1_ITEM *it, const X509_ALGOR *alg, inl = ASN1_item_i2d(data, &buf_in, it); if (inl <= 0) { ERR_raise(ERR_LIB_ASN1, ERR_R_INTERNAL_ERROR); + ret = -1; goto err; } if (buf_in == NULL) { ERR_raise(ERR_LIB_ASN1, ERR_R_MALLOC_FAILURE); + ret = -1; goto err; } inll = inl; diff --git a/deps/openssl/openssl/crypto/asn1/tasn_fre.c b/deps/openssl/openssl/crypto/asn1/tasn_fre.c index 13aa6a728e2cce..f8068832ab674f 100644 --- a/deps/openssl/openssl/crypto/asn1/tasn_fre.c +++ b/deps/openssl/openssl/crypto/asn1/tasn_fre.c @@ -1,5 +1,5 @@ /* - * Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -85,8 +85,12 @@ void ossl_asn1_item_embed_free(ASN1_VALUE **pval, const ASN1_ITEM *it, int embed case ASN1_ITYPE_NDEF_SEQUENCE: case ASN1_ITYPE_SEQUENCE: - if (ossl_asn1_do_lock(pval, -1, it) != 0) /* if error or ref-counter > 0 */ + if (ossl_asn1_do_lock(pval, -1, it) != 0) { + /* if error or ref-counter > 0 */ + OPENSSL_assert(embed == 0); + *pval = NULL; return; + } if (asn1_cb) { i = asn1_cb(ASN1_OP_FREE_PRE, pval, it, NULL); if (i == 2) diff --git a/deps/openssl/openssl/crypto/bio/bf_readbuff.c b/deps/openssl/openssl/crypto/bio/bf_readbuff.c index 135ccef83bf3c4..2409c9db97cc6d 100644 --- a/deps/openssl/openssl/crypto/bio/bf_readbuff.c +++ b/deps/openssl/openssl/crypto/bio/bf_readbuff.c @@ -1,5 +1,5 @@ /* - * Copyright 2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2021-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -222,10 +222,13 @@ static int readbuffer_gets(BIO *b, char *buf, int size) char *p; int i, j; - if (size == 0) + if (buf == NULL || size == 0) return 0; --size; /* the passed in size includes the terminator - so remove it here */ ctx = (BIO_F_BUFFER_CTX *)b->ptr; + + if (ctx == NULL || b->next_bio == NULL) + return 0; BIO_clear_retry_flags(b); /* If data is already buffered then use this first */ diff --git a/deps/openssl/openssl/crypto/bio/bio_addr.c b/deps/openssl/openssl/crypto/bio/bio_addr.c index a80774bbd7cac9..04d62f45b198ef 100644 --- a/deps/openssl/openssl/crypto/bio/bio_addr.c +++ b/deps/openssl/openssl/crypto/bio/bio_addr.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -778,14 +778,12 @@ int BIO_lookup_ex(const char *host, const char *service, int lookup_type, if (!RUN_ONCE(&bio_lookup_init, do_bio_lookup_init)) { ERR_raise(ERR_LIB_BIO, ERR_R_MALLOC_FAILURE); - ret = 0; - goto err; + return 0; } - if (!CRYPTO_THREAD_write_lock(bio_lookup_lock)) { - ret = 0; - goto err; - } + if (!CRYPTO_THREAD_write_lock(bio_lookup_lock)) + return 0; + he_fallback_address = INADDR_ANY; if (host == NULL) { he = &he_fallback; diff --git a/deps/openssl/openssl/crypto/bio/bio_lib.c b/deps/openssl/openssl/crypto/bio/bio_lib.c index c86b9ac198cab0..245a75afa1b820 100644 --- a/deps/openssl/openssl/crypto/bio/bio_lib.c +++ b/deps/openssl/openssl/crypto/bio/bio_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -869,8 +869,12 @@ static int bio_wait(BIO *bio, time_t max_time, unsigned int nap_milliseconds) return 1; #ifndef OPENSSL_NO_SOCK - if (BIO_get_fd(bio, &fd) > 0 && fd < FD_SETSIZE) - return BIO_socket_wait(fd, BIO_should_read(bio), max_time); + if (BIO_get_fd(bio, &fd) > 0) { + int ret = BIO_socket_wait(fd, BIO_should_read(bio), max_time); + + if (ret != -1) + return ret; + } #endif /* fall back to polling since no sockets are available */ diff --git a/deps/openssl/openssl/crypto/bio/bio_sock.c b/deps/openssl/openssl/crypto/bio/bio_sock.c index 476cbcc5cef161..12e6a68e3a25d8 100644 --- a/deps/openssl/openssl/crypto/bio/bio_sock.c +++ b/deps/openssl/openssl/crypto/bio/bio_sock.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -396,7 +396,11 @@ int BIO_socket_wait(int fd, int for_read, time_t max_time) struct timeval tv; time_t now; +#ifdef _WIN32 + if ((SOCKET)fd == INVALID_SOCKET) +#else if (fd < 0 || fd >= FD_SETSIZE) +#endif return -1; if (max_time == 0) return 1; diff --git a/deps/openssl/openssl/crypto/bn/bn_lib.c b/deps/openssl/openssl/crypto/bn/bn_lib.c index cf1bfe8ab08503..9677a603cb2dae 100644 --- a/deps/openssl/openssl/crypto/bn/bn_lib.c +++ b/deps/openssl/openssl/crypto/bn/bn_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -618,14 +618,29 @@ int BN_ucmp(const BIGNUM *a, const BIGNUM *b) int i; BN_ULONG t1, t2, *ap, *bp; + ap = a->d; + bp = b->d; + + if (BN_get_flags(a, BN_FLG_CONSTTIME) + && a->top == b->top) { + int res = 0; + + for (i = 0; i < b->top; i++) { + res = constant_time_select_int(constant_time_lt_bn(ap[i], bp[i]), + -1, res); + res = constant_time_select_int(constant_time_lt_bn(bp[i], ap[i]), + 1, res); + } + return res; + } + bn_check_top(a); bn_check_top(b); i = a->top - b->top; if (i != 0) return i; - ap = a->d; - bp = b->d; + for (i = a->top - 1; i >= 0; i--) { t1 = ap[i]; t2 = bp[i]; @@ -737,11 +752,10 @@ int BN_is_bit_set(const BIGNUM *a, int n) return (int)(((a->d[i]) >> j) & ((BN_ULONG)1)); } -int BN_mask_bits(BIGNUM *a, int n) +int ossl_bn_mask_bits_fixed_top(BIGNUM *a, int n) { int b, w; - bn_check_top(a); if (n < 0) return 0; @@ -755,10 +769,21 @@ int BN_mask_bits(BIGNUM *a, int n) a->top = w + 1; a->d[w] &= ~(BN_MASK2 << b); } - bn_correct_top(a); + a->flags |= BN_FLG_FIXED_TOP; return 1; } +int BN_mask_bits(BIGNUM *a, int n) +{ + int ret; + + bn_check_top(a); + ret = ossl_bn_mask_bits_fixed_top(a, n); + if (ret) + bn_correct_top(a); + return ret; +} + void BN_set_negative(BIGNUM *a, int b) { if (b && !BN_is_zero(a)) @@ -935,6 +960,22 @@ int BN_is_word(const BIGNUM *a, const BN_ULONG w) return BN_abs_is_word(a, w) && (!w || !a->neg); } +int ossl_bn_is_word_fixed_top(const BIGNUM *a, BN_ULONG w) +{ + int res, i; + const BN_ULONG *ap = a->d; + + if (a->neg || a->top == 0) + return 0; + + res = constant_time_select_int(constant_time_eq_bn(ap[0], w), 1, 0); + + for (i = 1; i < a->top; i++) + res = constant_time_select_int(constant_time_is_zero_bn(ap[i]), + res, 0); + return res; +} + int BN_is_odd(const BIGNUM *a) { return (a->top > 0) && (a->d[0] & 1); diff --git a/deps/openssl/openssl/crypto/bn/bn_rand.c b/deps/openssl/openssl/crypto/bn/bn_rand.c index 2ca426ff76ed98..ba0970b1f87dc6 100644 --- a/deps/openssl/openssl/crypto/bn/bn_rand.c +++ b/deps/openssl/openssl/crypto/bn/bn_rand.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -186,8 +186,8 @@ static int bnrand_range(BNRAND_FLAG flag, BIGNUM *r, const BIGNUM *range, } else { do { /* range = 11..._2 or range = 101..._2 */ - if (!bnrand(flag, r, n, BN_RAND_TOP_ANY, BN_RAND_BOTTOM_ANY, 0, - ctx)) + if (!bnrand(flag, r, n, BN_RAND_TOP_ANY, BN_RAND_BOTTOM_ANY, + strength, ctx)) return 0; if (!--count) { @@ -240,17 +240,63 @@ int BN_pseudo_rand_range(BIGNUM *r, const BIGNUM *range) # endif #endif +int ossl_bn_priv_rand_range_fixed_top(BIGNUM *r, const BIGNUM *range, + unsigned int strength, BN_CTX *ctx) +{ + int n; + int count = 100; + + if (r == NULL) { + ERR_raise(ERR_LIB_BN, ERR_R_PASSED_NULL_PARAMETER); + return 0; + } + + if (range->neg || BN_is_zero(range)) { + ERR_raise(ERR_LIB_BN, BN_R_INVALID_RANGE); + return 0; + } + + n = BN_num_bits(range); /* n > 0 */ + + /* BN_is_bit_set(range, n - 1) always holds */ + + if (n == 1) { + BN_zero(r); + } else { + BN_set_flags(r, BN_FLG_CONSTTIME); + do { + if (!bnrand(PRIVATE, r, n + 1, BN_RAND_TOP_ONE, BN_RAND_BOTTOM_ANY, + strength, ctx)) + return 0; + + if (!--count) { + ERR_raise(ERR_LIB_BN, BN_R_TOO_MANY_ITERATIONS); + return 0; + } + ossl_bn_mask_bits_fixed_top(r, n); + } + while (BN_ucmp(r, range) >= 0); +#ifdef BN_DEBUG + /* With BN_DEBUG on a fixed top number cannot be returned */ + bn_correct_top(r); +#endif + } + + return 1; +} + /* - * BN_generate_dsa_nonce generates a random number 0 <= out < range. Unlike - * BN_rand_range, it also includes the contents of |priv| and |message| in - * the generation so that an RNG failure isn't fatal as long as |priv| + * ossl_bn_gen_dsa_nonce_fixed_top generates a random number 0 <= out < range. + * Unlike BN_rand_range, it also includes the contents of |priv| and |message| + * in the generation so that an RNG failure isn't fatal as long as |priv| * remains secret. This is intended for use in DSA and ECDSA where an RNG * weakness leads directly to private key exposure unless this function is * used. */ -int BN_generate_dsa_nonce(BIGNUM *out, const BIGNUM *range, - const BIGNUM *priv, const unsigned char *message, - size_t message_len, BN_CTX *ctx) +int ossl_bn_gen_dsa_nonce_fixed_top(BIGNUM *out, const BIGNUM *range, + const BIGNUM *priv, + const unsigned char *message, + size_t message_len, BN_CTX *ctx) { EVP_MD_CTX *mdctx = EVP_MD_CTX_new(); /* @@ -260,20 +306,24 @@ int BN_generate_dsa_nonce(BIGNUM *out, const BIGNUM *range, unsigned char random_bytes[64]; unsigned char digest[SHA512_DIGEST_LENGTH]; unsigned done, todo; - /* We generate |range|+8 bytes of random output. */ - const unsigned num_k_bytes = BN_num_bytes(range) + 8; + /* We generate |range|+1 bytes of random output. */ + const unsigned num_k_bytes = BN_num_bytes(range) + 1; unsigned char private_bytes[96]; unsigned char *k_bytes = NULL; + const int max_n = 64; /* Pr(failure to generate) < 2^max_n */ + int n; int ret = 0; EVP_MD *md = NULL; OSSL_LIB_CTX *libctx = ossl_bn_get_libctx(ctx); if (mdctx == NULL) - goto err; + goto end; k_bytes = OPENSSL_malloc(num_k_bytes); if (k_bytes == NULL) - goto err; + goto end; + /* Ensure top byte is set to avoid non-constant time in bin2bn */ + k_bytes[0] = 0xff; /* We copy |priv| into a local buffer to avoid exposing its length. */ if (BN_bn2binpad(priv, private_bytes, sizeof(private_bytes)) < 0) { @@ -283,41 +333,60 @@ int BN_generate_dsa_nonce(BIGNUM *out, const BIGNUM *range, * length of the private key. */ ERR_raise(ERR_LIB_BN, BN_R_PRIVATE_KEY_TOO_LARGE); - goto err; + goto end; } md = EVP_MD_fetch(libctx, "SHA512", NULL); if (md == NULL) { ERR_raise(ERR_LIB_BN, BN_R_NO_SUITABLE_DIGEST); - goto err; - } - for (done = 0; done < num_k_bytes;) { - if (RAND_priv_bytes_ex(libctx, random_bytes, sizeof(random_bytes), 0) <= 0) - goto err; - - if (!EVP_DigestInit_ex(mdctx, md, NULL) - || !EVP_DigestUpdate(mdctx, &done, sizeof(done)) - || !EVP_DigestUpdate(mdctx, private_bytes, - sizeof(private_bytes)) - || !EVP_DigestUpdate(mdctx, message, message_len) - || !EVP_DigestUpdate(mdctx, random_bytes, sizeof(random_bytes)) - || !EVP_DigestFinal_ex(mdctx, digest, NULL)) - goto err; - - todo = num_k_bytes - done; - if (todo > SHA512_DIGEST_LENGTH) - todo = SHA512_DIGEST_LENGTH; - memcpy(k_bytes + done, digest, todo); - done += todo; + goto end; } + for (n = 0; n < max_n; n++) { + unsigned char i = 0; + + for (done = 1; done < num_k_bytes;) { + if (RAND_priv_bytes_ex(libctx, random_bytes, sizeof(random_bytes), + 0) <= 0) + goto end; + + if (!EVP_DigestInit_ex(mdctx, md, NULL) + || !EVP_DigestUpdate(mdctx, &i, sizeof(i)) + || !EVP_DigestUpdate(mdctx, private_bytes, + sizeof(private_bytes)) + || !EVP_DigestUpdate(mdctx, message, message_len) + || !EVP_DigestUpdate(mdctx, random_bytes, + sizeof(random_bytes)) + || !EVP_DigestFinal_ex(mdctx, digest, NULL)) + goto end; + + todo = num_k_bytes - done; + if (todo > SHA512_DIGEST_LENGTH) + todo = SHA512_DIGEST_LENGTH; + memcpy(k_bytes + done, digest, todo); + done += todo; + ++i; + } - if (!BN_bin2bn(k_bytes, num_k_bytes, out)) - goto err; - if (BN_mod(out, out, range, ctx) != 1) - goto err; - ret = 1; + if (!BN_bin2bn(k_bytes, num_k_bytes, out)) + goto end; - err: + /* Clear out the top bits and rejection filter into range */ + BN_set_flags(out, BN_FLG_CONSTTIME); + ossl_bn_mask_bits_fixed_top(out, BN_num_bits(range)); + + if (BN_ucmp(out, range) < 0) { + ret = 1; +#ifdef BN_DEBUG + /* With BN_DEBUG on a fixed top number cannot be returned */ + bn_correct_top(out); +#endif + goto end; + } + } + /* Failed to generate anything */ + ERR_raise(ERR_LIB_BN, ERR_R_INTERNAL_ERROR); + + end: EVP_MD_CTX_free(mdctx); EVP_MD_free(md); OPENSSL_clear_free(k_bytes, num_k_bytes); @@ -326,3 +395,20 @@ int BN_generate_dsa_nonce(BIGNUM *out, const BIGNUM *range, OPENSSL_cleanse(private_bytes, sizeof(private_bytes)); return ret; } + +int BN_generate_dsa_nonce(BIGNUM *out, const BIGNUM *range, + const BIGNUM *priv, const unsigned char *message, + size_t message_len, BN_CTX *ctx) +{ + int ret; + + ret = ossl_bn_gen_dsa_nonce_fixed_top(out, range, priv, message, + message_len, ctx); + /* + * This call makes the BN_generate_dsa_nonce non-const-time, thus we + * do not use it internally. But fixed_top BNs currently cannot be returned + * from public API calls. + */ + bn_correct_top(out); + return ret; +} diff --git a/deps/openssl/openssl/crypto/bn/bn_shift.c b/deps/openssl/openssl/crypto/bn/bn_shift.c index 8fcb04324e6d59..d67331f1f634cd 100644 --- a/deps/openssl/openssl/crypto/bn/bn_shift.c +++ b/deps/openssl/openssl/crypto/bn/bn_shift.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2020 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -156,6 +156,9 @@ int BN_rshift(BIGNUM *r, const BIGNUM *a, int n) return 0; } + bn_check_top(r); + bn_check_top(a); + ret = bn_rshift_fixed_top(r, a, n); bn_correct_top(r); @@ -177,9 +180,6 @@ int bn_rshift_fixed_top(BIGNUM *r, const BIGNUM *a, int n) BN_ULONG *t, *f; BN_ULONG l, m, mask; - bn_check_top(r); - bn_check_top(a); - assert(n >= 0); nw = n / BN_BITS2; diff --git a/deps/openssl/openssl/crypto/cmp/cmp_vfy.c b/deps/openssl/openssl/crypto/cmp/cmp_vfy.c index 7ce91ec5d16792..b9951045c2e863 100644 --- a/deps/openssl/openssl/crypto/cmp/cmp_vfy.c +++ b/deps/openssl/openssl/crypto/cmp/cmp_vfy.c @@ -1,5 +1,5 @@ /* - * Copyright 2007-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2007-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright Nokia 2007-2020 * Copyright Siemens AG 2015-2020 * @@ -619,7 +619,7 @@ int OSSL_CMP_validate_msg(OSSL_CMP_CTX *ctx, const OSSL_CMP_MSG *msg) default: scrt = ctx->srvCert; if (scrt == NULL) { - if (ctx->trusted == NULL) { + if (ctx->trusted == NULL && ctx->secretValue != NULL) { ossl_cmp_info(ctx, "no trust store nor pinned server cert available for verifying signature-based CMP message protection"); ERR_raise(ERR_LIB_CMP, CMP_R_MISSING_TRUST_ANCHOR); return 0; diff --git a/deps/openssl/openssl/crypto/conf/conf_def.c b/deps/openssl/openssl/crypto/conf/conf_def.c index 5acc90b69e1c99..cda2f3e267924f 100644 --- a/deps/openssl/openssl/crypto/conf/conf_def.c +++ b/deps/openssl/openssl/crypto/conf/conf_def.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -332,7 +332,7 @@ static int def_load_bio(CONF *conf, BIO *in, long *line) v = NULL; /* check for line continuation */ - if (bufnum >= 1) { + if (!again && bufnum >= 1) { /* * If we have bytes and the last char '\\' and second last char * is not '\\' diff --git a/deps/openssl/openssl/crypto/conf/conf_lib.c b/deps/openssl/openssl/crypto/conf/conf_lib.c index a2360035257a97..719af7cb75c60c 100644 --- a/deps/openssl/openssl/crypto/conf/conf_lib.c +++ b/deps/openssl/openssl/crypto/conf/conf_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -464,6 +464,9 @@ int OPENSSL_INIT_set_config_appname(OPENSSL_INIT_SETTINGS *settings, void OPENSSL_INIT_free(OPENSSL_INIT_SETTINGS *settings) { + if (settings == NULL) + return; + free(settings->filename); free(settings->appname); free(settings); diff --git a/deps/openssl/openssl/crypto/conf/conf_sap.c b/deps/openssl/openssl/crypto/conf/conf_sap.c index 3019bcf31af81a..106434dcbf3d79 100644 --- a/deps/openssl/openssl/crypto/conf/conf_sap.c +++ b/deps/openssl/openssl/crypto/conf/conf_sap.c @@ -1,5 +1,5 @@ /* - * Copyright 2002-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2002-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -38,6 +38,8 @@ void OPENSSL_config(const char *appname) settings.appname = strdup(appname); settings.flags = DEFAULT_CONF_MFLAGS; OPENSSL_init_crypto(OPENSSL_INIT_LOAD_CONFIG, &settings); + + free(settings.appname); } #endif diff --git a/deps/openssl/openssl/crypto/context.c b/deps/openssl/openssl/crypto/context.c index 548665fba265f4..ac6938e619eb1d 100644 --- a/deps/openssl/openssl/crypto/context.c +++ b/deps/openssl/openssl/crypto/context.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -240,7 +240,7 @@ int OSSL_LIB_CTX_load_config(OSSL_LIB_CTX *ctx, const char *config_file) void OSSL_LIB_CTX_free(OSSL_LIB_CTX *ctx) { - if (ossl_lib_ctx_is_default(ctx)) + if (ctx == NULL || ossl_lib_ctx_is_default(ctx)) return; #ifndef FIPS_MODULE diff --git a/deps/openssl/openssl/crypto/dsa/dsa_check.c b/deps/openssl/openssl/crypto/dsa/dsa_check.c index fb0e9129a2956b..801b932d87244e 100644 --- a/deps/openssl/openssl/crypto/dsa/dsa_check.c +++ b/deps/openssl/openssl/crypto/dsa/dsa_check.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -19,8 +19,34 @@ #include "dsa_local.h" #include "crypto/dsa.h" +static int dsa_precheck_params(const DSA *dsa, int *ret) +{ + if (dsa->params.p == NULL || dsa->params.q == NULL) { + ERR_raise(ERR_LIB_DSA, DSA_R_BAD_FFC_PARAMETERS); + *ret = FFC_CHECK_INVALID_PQ; + return 0; + } + + if (BN_num_bits(dsa->params.p) > OPENSSL_DSA_MAX_MODULUS_BITS) { + ERR_raise(ERR_LIB_DSA, DSA_R_MODULUS_TOO_LARGE); + *ret = FFC_CHECK_INVALID_PQ; + return 0; + } + + if (BN_num_bits(dsa->params.q) >= BN_num_bits(dsa->params.p)) { + ERR_raise(ERR_LIB_DSA, DSA_R_BAD_Q_VALUE); + *ret = FFC_CHECK_INVALID_PQ; + return 0; + } + + return 1; +} + int ossl_dsa_check_params(const DSA *dsa, int checktype, int *ret) { + if (!dsa_precheck_params(dsa, ret)) + return 0; + if (checktype == OSSL_KEYMGMT_VALIDATE_QUICK_CHECK) return ossl_ffc_params_simple_validate(dsa->libctx, &dsa->params, FFC_PARAM_TYPE_DSA, ret); @@ -39,6 +65,9 @@ int ossl_dsa_check_params(const DSA *dsa, int checktype, int *ret) */ int ossl_dsa_check_pub_key(const DSA *dsa, const BIGNUM *pub_key, int *ret) { + if (!dsa_precheck_params(dsa, ret)) + return 0; + return ossl_ffc_validate_public_key(&dsa->params, pub_key, ret) && *ret == 0; } @@ -50,6 +79,9 @@ int ossl_dsa_check_pub_key(const DSA *dsa, const BIGNUM *pub_key, int *ret) */ int ossl_dsa_check_pub_key_partial(const DSA *dsa, const BIGNUM *pub_key, int *ret) { + if (!dsa_precheck_params(dsa, ret)) + return 0; + return ossl_ffc_validate_public_key_partial(&dsa->params, pub_key, ret) && *ret == 0; } @@ -58,8 +90,10 @@ int ossl_dsa_check_priv_key(const DSA *dsa, const BIGNUM *priv_key, int *ret) { *ret = 0; - return (dsa->params.q != NULL - && ossl_ffc_validate_private_key(dsa->params.q, priv_key, ret)); + if (!dsa_precheck_params(dsa, ret)) + return 0; + + return ossl_ffc_validate_private_key(dsa->params.q, priv_key, ret); } /* @@ -72,8 +106,10 @@ int ossl_dsa_check_pairwise(const DSA *dsa) BN_CTX *ctx = NULL; BIGNUM *pub_key = NULL; - if (dsa->params.p == NULL - || dsa->params.g == NULL + if (!dsa_precheck_params(dsa, &ret)) + return 0; + + if (dsa->params.g == NULL || dsa->priv_key == NULL || dsa->pub_key == NULL) return 0; diff --git a/deps/openssl/openssl/crypto/dsa/dsa_ossl.c b/deps/openssl/openssl/crypto/dsa/dsa_ossl.c index 8fd66a950e3739..0c18b78f76336a 100644 --- a/deps/openssl/openssl/crypto/dsa/dsa_ossl.c +++ b/deps/openssl/openssl/crypto/dsa/dsa_ossl.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -262,12 +262,13 @@ static int dsa_sign_setup(DSA *dsa, BN_CTX *ctx_in, * We calculate k from SHA512(private_key + H(message) + random). * This protects the private key from a weak PRNG. */ - if (!BN_generate_dsa_nonce(k, dsa->params.q, dsa->priv_key, dgst, - dlen, ctx)) + if (!ossl_bn_gen_dsa_nonce_fixed_top(k, dsa->params.q, + dsa->priv_key, dgst, + dlen, ctx)) goto err; - } else if (!BN_priv_rand_range_ex(k, dsa->params.q, 0, ctx)) + } else if (!ossl_bn_priv_rand_range_fixed_top(k, dsa->params.q, 0, ctx)) goto err; - } while (BN_is_zero(k)); + } while (ossl_bn_is_word_fixed_top(k, 0)); BN_set_flags(k, BN_FLG_CONSTTIME); BN_set_flags(l, BN_FLG_CONSTTIME); diff --git a/deps/openssl/openssl/crypto/dsa/dsa_sign.c b/deps/openssl/openssl/crypto/dsa/dsa_sign.c index ddfbfa18af157e..91d334ea533a50 100644 --- a/deps/openssl/openssl/crypto/dsa/dsa_sign.c +++ b/deps/openssl/openssl/crypto/dsa/dsa_sign.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -156,6 +156,11 @@ int ossl_dsa_sign_int(int type, const unsigned char *dgst, int dlen, { DSA_SIG *s; + if (sig == NULL) { + *siglen = DSA_size(dsa); + return 1; + } + /* legacy case uses the method table */ if (dsa->libctx == NULL || dsa->meth != DSA_get_default_method()) s = DSA_do_sign(dgst, dlen, dsa); @@ -165,7 +170,7 @@ int ossl_dsa_sign_int(int type, const unsigned char *dgst, int dlen, *siglen = 0; return 0; } - *siglen = i2d_DSA_SIG(s, sig != NULL ? &sig : NULL); + *siglen = i2d_DSA_SIG(s, &sig); DSA_SIG_free(s); return 1; } diff --git a/deps/openssl/openssl/crypto/ec/build.info b/deps/openssl/openssl/crypto/ec/build.info index a511e887a9ba19..6dd98e9f4f1724 100644 --- a/deps/openssl/openssl/crypto/ec/build.info +++ b/deps/openssl/openssl/crypto/ec/build.info @@ -77,7 +77,7 @@ DEFINE[../../providers/libdefault.a]=$ECDEF # Otherwise, it already gets everything that the static libcrypto.a # has, and doesn't need it added again. IF[{- !$disabled{module} && !$disabled{shared} -}] - DEFINE[../providers/liblegacy.a]=$ECDEF + DEFINE[../../providers/liblegacy.a]=$ECDEF ENDIF GENERATE[ecp_nistz256-x86.S]=asm/ecp_nistz256-x86.pl diff --git a/deps/openssl/openssl/crypto/ec/curve448/arch_64/f_impl64.c b/deps/openssl/openssl/crypto/ec/curve448/arch_64/f_impl64.c index 8f7a7dd391bd8d..dfe75b8fc5caed 100644 --- a/deps/openssl/openssl/crypto/ec/curve448/arch_64/f_impl64.c +++ b/deps/openssl/openssl/crypto/ec/curve448/arch_64/f_impl64.c @@ -1,5 +1,5 @@ /* - * Copyright 2017-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2017-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright 2014 Cryptography Research, Inc. * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -45,9 +45,9 @@ void gf_mul(gf_s * RESTRICT cs, const gf as, const gf bs) accum0 += widemul(a[j + 4], b[i - j + 4]); } for (; j < 4; j++) { - accum2 += widemul(a[j], b[i - j + 8]); - accum1 += widemul(aa[j], bbb[i - j + 4]); - accum0 += widemul(a[j + 4], bb[i - j + 4]); + accum2 += widemul(a[j], b[i + 8 - j]); + accum1 += widemul(aa[j], bbb[i + 4 - j]); + accum0 += widemul(a[j + 4], bb[i + 4 - j]); } accum1 -= accum2; diff --git a/deps/openssl/openssl/crypto/ec/ecdsa_ossl.c b/deps/openssl/openssl/crypto/ec/ecdsa_ossl.c index 0bf4635e2f9723..775b7ec911bef1 100644 --- a/deps/openssl/openssl/crypto/ec/ecdsa_ossl.c +++ b/deps/openssl/openssl/crypto/ec/ecdsa_ossl.c @@ -1,5 +1,5 @@ /* - * Copyright 2002-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2002-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -70,6 +70,11 @@ int ossl_ecdsa_sign(int type, const unsigned char *dgst, int dlen, { ECDSA_SIG *s; + if (sig == NULL && (kinv == NULL || r == NULL)) { + *siglen = ECDSA_size(eckey); + return 1; + } + s = ECDSA_do_sign_ex(dgst, dlen, kinv, r, eckey); if (s == NULL) { *siglen = 0; @@ -125,7 +130,11 @@ static int ecdsa_sign_setup(EC_KEY *eckey, BN_CTX *ctx_in, ERR_raise(ERR_LIB_EC, ERR_R_EC_LIB); goto err; } - order = EC_GROUP_get0_order(group); + + if ((order = EC_GROUP_get0_order(group)) == NULL) { + ERR_raise(ERR_LIB_EC, ERR_R_EC_LIB); + goto err; + } /* Preallocate space */ order_bits = BN_num_bits(order); @@ -140,18 +149,18 @@ static int ecdsa_sign_setup(EC_KEY *eckey, BN_CTX *ctx_in, /* get random k */ do { if (dgst != NULL) { - if (!BN_generate_dsa_nonce(k, order, priv_key, - dgst, dlen, ctx)) { + if (!ossl_bn_gen_dsa_nonce_fixed_top(k, order, priv_key, + dgst, dlen, ctx)) { ERR_raise(ERR_LIB_EC, EC_R_RANDOM_NUMBER_GENERATION_FAILED); goto err; } } else { - if (!BN_priv_rand_range_ex(k, order, 0, ctx)) { + if (!ossl_bn_priv_rand_range_fixed_top(k, order, 0, ctx)) { ERR_raise(ERR_LIB_EC, EC_R_RANDOM_NUMBER_GENERATION_FAILED); goto err; } } - } while (BN_is_zero(k)); + } while (ossl_bn_is_word_fixed_top(k, 0)); /* compute r the x-coordinate of generator * k */ if (!EC_POINT_mul(group, tmp_point, k, NULL, NULL, ctx)) { @@ -250,7 +259,11 @@ ECDSA_SIG *ossl_ecdsa_simple_sign_sig(const unsigned char *dgst, int dgst_len, goto err; } - order = EC_GROUP_get0_order(group); + if ((order = EC_GROUP_get0_order(group)) == NULL) { + ERR_raise(ERR_LIB_EC, ERR_R_EC_LIB); + goto err; + } + i = BN_num_bits(order); /* * Need to truncate digest if it is too long: first truncate whole bytes. diff --git a/deps/openssl/openssl/crypto/encode_decode/encoder_lib.c b/deps/openssl/openssl/crypto/encode_decode/encoder_lib.c index 7a55c7ab9a2730..a88332b79d5295 100644 --- a/deps/openssl/openssl/crypto/encode_decode/encoder_lib.c +++ b/deps/openssl/openssl/crypto/encode_decode/encoder_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -59,6 +59,11 @@ int OSSL_ENCODER_to_bio(OSSL_ENCODER_CTX *ctx, BIO *out) return 0; } + if (ctx->cleanup == NULL || ctx->construct == NULL) { + ERR_raise(ERR_LIB_OSSL_ENCODER, ERR_R_INIT_FAIL); + return 0; + } + return encoder_process(&data) > 0; } diff --git a/deps/openssl/openssl/crypto/engine/eng_pkey.c b/deps/openssl/openssl/crypto/engine/eng_pkey.c index f84fcde4601629..d18d837e625c03 100644 --- a/deps/openssl/openssl/crypto/engine/eng_pkey.c +++ b/deps/openssl/openssl/crypto/engine/eng_pkey.c @@ -1,5 +1,5 @@ /* - * Copyright 2001-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2001-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -79,48 +79,6 @@ EVP_PKEY *ENGINE_load_private_key(ENGINE *e, const char *key_id, ERR_raise(ERR_LIB_ENGINE, ENGINE_R_FAILED_LOADING_PRIVATE_KEY); return NULL; } - /* We enforce check for legacy key */ - switch (EVP_PKEY_get_id(pkey)) { - case EVP_PKEY_RSA: - { - RSA *rsa = EVP_PKEY_get1_RSA(pkey); - EVP_PKEY_set1_RSA(pkey, rsa); - RSA_free(rsa); - } - break; -# ifndef OPENSSL_NO_EC - case EVP_PKEY_SM2: - case EVP_PKEY_EC: - { - EC_KEY *ec = EVP_PKEY_get1_EC_KEY(pkey); - EVP_PKEY_set1_EC_KEY(pkey, ec); - EC_KEY_free(ec); - } - break; -# endif -# ifndef OPENSSL_NO_DSA - case EVP_PKEY_DSA: - { - DSA *dsa = EVP_PKEY_get1_DSA(pkey); - EVP_PKEY_set1_DSA(pkey, dsa); - DSA_free(dsa); - } - break; -#endif -# ifndef OPENSSL_NO_DH - case EVP_PKEY_DH: - { - DH *dh = EVP_PKEY_get1_DH(pkey); - EVP_PKEY_set1_DH(pkey, dh); - DH_free(dh); - } - break; -#endif - default: - /*Do nothing */ - break; - } - return pkey; } diff --git a/deps/openssl/openssl/crypto/engine/eng_table.c b/deps/openssl/openssl/crypto/engine/eng_table.c index 9dc3144bbfd7b6..6280965cc0265f 100644 --- a/deps/openssl/openssl/crypto/engine/eng_table.c +++ b/deps/openssl/openssl/crypto/engine/eng_table.c @@ -1,5 +1,5 @@ /* - * Copyright 2001-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2001-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -215,9 +215,11 @@ ENGINE *ossl_engine_table_select(ENGINE_TABLE **table, int nid, f, l, nid); return NULL; } - ERR_set_mark(); + if (!CRYPTO_THREAD_write_lock(global_engine_lock)) - goto end; + return NULL; + + ERR_set_mark(); /* * Check again inside the lock otherwise we could race against cleanup * operations. But don't worry about a debug printout diff --git a/deps/openssl/openssl/crypto/err/openssl.ec b/deps/openssl/openssl/crypto/err/openssl.ec index 3612c195f09f3e..f3802a05b5c325 100644 --- a/deps/openssl/openssl/crypto/err/openssl.ec +++ b/deps/openssl/openssl/crypto/err/openssl.ec @@ -76,6 +76,6 @@ R SSL_R_TLSV1_CERTIFICATE_UNOBTAINABLE 1111 R SSL_R_TLSV1_UNRECOGNIZED_NAME 1112 R SSL_R_TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE 1113 R SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE 1114 -R TLS1_AD_UNKNOWN_PSK_IDENTITY 1115 +R SSL_R_TLSV1_ALERT_UNKNOWN_PSK_IDENTITY 1115 R SSL_R_TLSV13_ALERT_CERTIFICATE_REQUIRED 1116 -R TLS1_AD_NO_APPLICATION_PROTOCOL 1120 +R SSL_R_TLSV1_ALERT_NO_APPLICATION_PROTOCOL 1120 diff --git a/deps/openssl/openssl/crypto/ess/ess_lib.c b/deps/openssl/openssl/crypto/ess/ess_lib.c index 65444d383ff4bd..cd42f951f7b815 100644 --- a/deps/openssl/openssl/crypto/ess/ess_lib.c +++ b/deps/openssl/openssl/crypto/ess/ess_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -293,7 +293,7 @@ int OSSL_ESS_check_signing_certs(const ESS_SIGNING_CERT *ss, int i, ret; if (require_signing_cert && ss == NULL && ssv2 == NULL) { - ERR_raise(ERR_LIB_CMS, ESS_R_MISSING_SIGNING_CERTIFICATE_ATTRIBUTE); + ERR_raise(ERR_LIB_ESS, ESS_R_MISSING_SIGNING_CERTIFICATE_ATTRIBUTE); return -1; } if (n_v1 == 0 || n_v2 == 0) { diff --git a/deps/openssl/openssl/crypto/evp/ctrl_params_translate.c b/deps/openssl/openssl/crypto/evp/ctrl_params_translate.c index dcd53b43f92b9c..de6c215e205a22 100644 --- a/deps/openssl/openssl/crypto/evp/ctrl_params_translate.c +++ b/deps/openssl/openssl/crypto/evp/ctrl_params_translate.c @@ -1,5 +1,5 @@ /* - * Copyright 2021-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2021-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -2777,7 +2777,7 @@ static int evp_pkey_ctx_setget_params_to_ctrl(EVP_PKEY_CTX *pctx, fixup_args_fn *fixup = default_fixup_args; int ret; - tmpl.action_type = action_type; + ctx.action_type = tmpl.action_type = action_type; tmpl.keytype1 = tmpl.keytype2 = keytype; tmpl.optype = optype; tmpl.param_key = params->key; @@ -2786,7 +2786,6 @@ static int evp_pkey_ctx_setget_params_to_ctrl(EVP_PKEY_CTX *pctx, if (translation != NULL) { if (translation->fixup_args != NULL) fixup = translation->fixup_args; - ctx.action_type = translation->action_type; ctx.ctrl_cmd = translation->ctrl_num; } ctx.pctx = pctx; diff --git a/deps/openssl/openssl/crypto/evp/digest.c b/deps/openssl/openssl/crypto/evp/digest.c index eefed523ec1256..aca05186ec10ed 100644 --- a/deps/openssl/openssl/crypto/evp/digest.c +++ b/deps/openssl/openssl/crypto/evp/digest.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -409,7 +409,7 @@ int EVP_DigestUpdate(EVP_MD_CTX *ctx, const void *data, size_t count) /* Code below to be removed when legacy support is dropped. */ legacy: - return ctx->update(ctx, data, count); + return ctx->update != NULL ? ctx->update(ctx, data, count) : 0; } /* The caller can assume that this removes any secret data from the context */ diff --git a/deps/openssl/openssl/crypto/evp/keymgmt_lib.c b/deps/openssl/openssl/crypto/evp/keymgmt_lib.c index 8369d9578cbd0e..9512cc9cf0f735 100644 --- a/deps/openssl/openssl/crypto/evp/keymgmt_lib.c +++ b/deps/openssl/openssl/crypto/evp/keymgmt_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -243,10 +243,15 @@ OP_CACHE_ELEM *evp_keymgmt_util_find_operation_cache(EVP_PKEY *pk, /* * A comparison and sk_P_CACHE_ELEM_find() are avoided to not cause * problems when we've only a read lock. + * A keymgmt is a match if the |keymgmt| pointers are identical or if the + * provider and the name ID match */ for (i = 0; i < end; i++) { p = sk_OP_CACHE_ELEM_value(pk->operation_cache, i); - if (keymgmt == p->keymgmt && (p->selection & selection) == selection) + if ((p->selection & selection) == selection + && (keymgmt == p->keymgmt + || (keymgmt->name_id == p->keymgmt->name_id + && keymgmt->prov == p->keymgmt->prov))) return p; } return NULL; diff --git a/deps/openssl/openssl/crypto/evp/names.c b/deps/openssl/openssl/crypto/evp/names.c index 19c03a3085e843..7ff850f9975304 100644 --- a/deps/openssl/openssl/crypto/evp/names.c +++ b/deps/openssl/openssl/crypto/evp/names.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -78,6 +78,7 @@ const EVP_CIPHER *evp_get_cipherbyname_ex(OSSL_LIB_CTX *libctx, const EVP_CIPHER *cp; OSSL_NAMEMAP *namemap; int id; + int do_retry = 1; if (!OPENSSL_init_crypto(OPENSSL_INIT_ADD_ALL_CIPHERS, NULL)) return NULL; @@ -94,9 +95,21 @@ const EVP_CIPHER *evp_get_cipherbyname_ex(OSSL_LIB_CTX *libctx, */ namemap = ossl_namemap_stored(libctx); + retry: id = ossl_namemap_name2num(namemap, name); - if (id == 0) - return NULL; + if (id == 0) { + EVP_CIPHER *fetched_cipher; + + /* Try to fetch it because the name might not be known yet. */ + if (!do_retry) + return NULL; + do_retry = 0; + ERR_set_mark(); + fetched_cipher = EVP_CIPHER_fetch(libctx, name, NULL); + EVP_CIPHER_free(fetched_cipher); + ERR_pop_to_mark(); + goto retry; + } if (!ossl_namemap_doall_names(namemap, id, cipher_from_name, &cp)) return NULL; @@ -124,6 +137,7 @@ const EVP_MD *evp_get_digestbyname_ex(OSSL_LIB_CTX *libctx, const char *name) const EVP_MD *dp; OSSL_NAMEMAP *namemap; int id; + int do_retry = 1; if (!OPENSSL_init_crypto(OPENSSL_INIT_ADD_ALL_DIGESTS, NULL)) return NULL; @@ -140,9 +154,21 @@ const EVP_MD *evp_get_digestbyname_ex(OSSL_LIB_CTX *libctx, const char *name) */ namemap = ossl_namemap_stored(libctx); + retry: id = ossl_namemap_name2num(namemap, name); - if (id == 0) - return NULL; + if (id == 0) { + EVP_MD *fetched_md; + + /* Try to fetch it because the name might not be known yet. */ + if (!do_retry) + return NULL; + do_retry = 0; + ERR_set_mark(); + fetched_md = EVP_MD_fetch(libctx, name, NULL); + EVP_MD_free(fetched_md); + ERR_pop_to_mark(); + goto retry; + } if (!ossl_namemap_doall_names(namemap, id, digest_from_name, &dp)) return NULL; diff --git a/deps/openssl/openssl/crypto/evp/p_lib.c b/deps/openssl/openssl/crypto/evp/p_lib.c index 04b148a912187e..6ff7eb7e02cfd4 100644 --- a/deps/openssl/openssl/crypto/evp/p_lib.c +++ b/deps/openssl/openssl/crypto/evp/p_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -1902,7 +1902,15 @@ void *evp_pkey_export_to_provider(EVP_PKEY *pk, OSSL_LIB_CTX *libctx, * If |tmp_keymgmt| is present in the operation cache, it means * that export doesn't need to be redone. In that case, we take * token copies of the cached pointers, to have token success - * values to return. + * values to return. It is possible (e.g. in a no-cached-fetch + * build), for op->keymgmt to be a different pointer to tmp_keymgmt + * even though the name/provider must be the same. In other words + * the keymgmt instance may be different but still equivalent, i.e. + * same algorithm/provider instance - but we make the simplifying + * assumption that the keydata can be used with either keymgmt + * instance. Not doing so introduces significant complexity and + * probably requires refactoring - since we would have to ripple + * the change in keymgmt instance up the call chain. */ if (op != NULL && op->keymgmt != NULL) { keydata = op->keydata; diff --git a/deps/openssl/openssl/crypto/evp/pmeth_lib.c b/deps/openssl/openssl/crypto/evp/pmeth_lib.c index ba1971ce461d57..5cd0c4b27f6db3 100644 --- a/deps/openssl/openssl/crypto/evp/pmeth_lib.c +++ b/deps/openssl/openssl/crypto/evp/pmeth_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 2006-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2006-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -1028,6 +1028,78 @@ static int evp_pkey_ctx_set1_octet_string(EVP_PKEY_CTX *ctx, int fallback, return EVP_PKEY_CTX_set_params(ctx, octet_string_params); } +static int evp_pkey_ctx_add1_octet_string(EVP_PKEY_CTX *ctx, int fallback, + const char *param, int op, int ctrl, + const unsigned char *data, + int datalen) +{ + OSSL_PARAM os_params[2]; + const OSSL_PARAM *gettables; + unsigned char *info = NULL; + size_t info_len = 0; + size_t info_alloc = 0; + int ret = 0; + + if (ctx == NULL || (ctx->operation & op) == 0) { + ERR_raise(ERR_LIB_EVP, EVP_R_COMMAND_NOT_SUPPORTED); + /* Uses the same return values as EVP_PKEY_CTX_ctrl */ + return -2; + } + + /* Code below to be removed when legacy support is dropped. */ + if (fallback) + return EVP_PKEY_CTX_ctrl(ctx, -1, op, ctrl, datalen, (void *)(data)); + /* end of legacy support */ + + if (datalen < 0) { + ERR_raise(ERR_LIB_EVP, EVP_R_INVALID_LENGTH); + return 0; + } else if (datalen == 0) { + return 1; + } + + /* Check for older provider that doesn't support getting this parameter */ + gettables = EVP_PKEY_CTX_gettable_params(ctx); + if (gettables == NULL || OSSL_PARAM_locate_const(gettables, param) == NULL) + return evp_pkey_ctx_set1_octet_string(ctx, fallback, param, op, ctrl, + data, datalen); + + /* Get the original value length */ + os_params[0] = OSSL_PARAM_construct_octet_string(param, NULL, 0); + os_params[1] = OSSL_PARAM_construct_end(); + + if (!EVP_PKEY_CTX_get_params(ctx, os_params)) + return 0; + + /* This should not happen but check to be sure. */ + if (os_params[0].return_size == OSSL_PARAM_UNMODIFIED) + return 0; + + info_alloc = os_params[0].return_size + datalen; + if (info_alloc == 0) + return 0; + info = OPENSSL_zalloc(info_alloc); + if (info == NULL) + return 0; + info_len = os_params[0].return_size; + + os_params[0] = OSSL_PARAM_construct_octet_string(param, info, info_alloc); + + /* if we have data, then go get it */ + if (info_len > 0) { + if (!EVP_PKEY_CTX_get_params(ctx, os_params)) + goto error; + } + + /* Copy the input data */ + memcpy(&info[info_len], data, datalen); + ret = EVP_PKEY_CTX_set_params(ctx, os_params); + + error: + OPENSSL_clear_free(info, info_alloc); + return ret; +} + int EVP_PKEY_CTX_set1_tls1_prf_secret(EVP_PKEY_CTX *ctx, const unsigned char *sec, int seclen) { @@ -1078,7 +1150,7 @@ int EVP_PKEY_CTX_set1_hkdf_key(EVP_PKEY_CTX *ctx, int EVP_PKEY_CTX_add1_hkdf_info(EVP_PKEY_CTX *ctx, const unsigned char *info, int infolen) { - return evp_pkey_ctx_set1_octet_string(ctx, ctx->op.kex.algctx == NULL, + return evp_pkey_ctx_add1_octet_string(ctx, ctx->op.kex.algctx == NULL, OSSL_KDF_PARAM_INFO, EVP_PKEY_OP_DERIVE, EVP_PKEY_CTRL_HKDF_INFO, diff --git a/deps/openssl/openssl/crypto/evp/signature.c b/deps/openssl/openssl/crypto/evp/signature.c index fb269b3bfd0717..8adf254d5eecbe 100644 --- a/deps/openssl/openssl/crypto/evp/signature.c +++ b/deps/openssl/openssl/crypto/evp/signature.c @@ -1,5 +1,5 @@ /* - * Copyright 2006-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2006-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -403,8 +403,8 @@ static int evp_pkey_signature_init(EVP_PKEY_CTX *ctx, int operation, int iter; if (ctx == NULL) { - ERR_raise(ERR_LIB_EVP, EVP_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE); - return -2; + ERR_raise(ERR_LIB_EVP, ERR_R_PASSED_NULL_PARAMETER); + return -1; } evp_pkey_ctx_free_old_ops(ctx); @@ -634,8 +634,8 @@ int EVP_PKEY_sign(EVP_PKEY_CTX *ctx, int ret; if (ctx == NULL) { - ERR_raise(ERR_LIB_EVP, EVP_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE); - return -2; + ERR_raise(ERR_LIB_EVP, ERR_R_PASSED_NULL_PARAMETER); + return -1; } if (ctx->operation != EVP_PKEY_OP_SIGN) { @@ -646,6 +646,11 @@ int EVP_PKEY_sign(EVP_PKEY_CTX *ctx, if (ctx->op.sig.algctx == NULL) goto legacy; + if (ctx->op.sig.signature->sign == NULL) { + ERR_raise(ERR_LIB_EVP, EVP_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE); + return -2; + } + ret = ctx->op.sig.signature->sign(ctx->op.sig.algctx, sig, siglen, (sig == NULL) ? 0 : *siglen, tbs, tbslen); @@ -678,8 +683,8 @@ int EVP_PKEY_verify(EVP_PKEY_CTX *ctx, int ret; if (ctx == NULL) { - ERR_raise(ERR_LIB_EVP, EVP_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE); - return -2; + ERR_raise(ERR_LIB_EVP, ERR_R_PASSED_NULL_PARAMETER); + return -1; } if (ctx->operation != EVP_PKEY_OP_VERIFY) { @@ -690,6 +695,11 @@ int EVP_PKEY_verify(EVP_PKEY_CTX *ctx, if (ctx->op.sig.algctx == NULL) goto legacy; + if (ctx->op.sig.signature->verify == NULL) { + ERR_raise(ERR_LIB_EVP, EVP_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE); + return -2; + } + ret = ctx->op.sig.signature->verify(ctx->op.sig.algctx, sig, siglen, tbs, tbslen); @@ -721,8 +731,8 @@ int EVP_PKEY_verify_recover(EVP_PKEY_CTX *ctx, int ret; if (ctx == NULL) { - ERR_raise(ERR_LIB_EVP, EVP_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE); - return -2; + ERR_raise(ERR_LIB_EVP, ERR_R_PASSED_NULL_PARAMETER); + return -1; } if (ctx->operation != EVP_PKEY_OP_VERIFYRECOVER) { @@ -733,6 +743,11 @@ int EVP_PKEY_verify_recover(EVP_PKEY_CTX *ctx, if (ctx->op.sig.algctx == NULL) goto legacy; + if (ctx->op.sig.signature->verify_recover == NULL) { + ERR_raise(ERR_LIB_EVP, EVP_R_OPERATION_NOT_SUPPORTED_FOR_THIS_KEYTYPE); + return -2; + } + ret = ctx->op.sig.signature->verify_recover(ctx->op.sig.algctx, rout, routlen, (rout == NULL ? 0 : *routlen), diff --git a/deps/openssl/openssl/crypto/init.c b/deps/openssl/openssl/crypto/init.c index cacf637c89f8e8..659a660eeced68 100644 --- a/deps/openssl/openssl/crypto/init.c +++ b/deps/openssl/openssl/crypto/init.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -97,17 +97,19 @@ static int win32atexit(void) DEFINE_RUN_ONCE_STATIC(ossl_init_register_atexit) { -#ifdef OPENSSL_INIT_DEBUG +#ifndef OPENSSL_NO_ATEXIT +# ifdef OPENSSL_INIT_DEBUG fprintf(stderr, "OPENSSL_INIT: ossl_init_register_atexit()\n"); -#endif -#ifndef OPENSSL_SYS_UEFI -# if defined(_WIN32) && !defined(__BORLANDC__) +# endif +# ifndef OPENSSL_SYS_UEFI +# if defined(_WIN32) && !defined(__BORLANDC__) /* We use _onexit() in preference because it gets called on DLL unload */ if (_onexit(win32atexit) == NULL) return 0; -# else +# else if (atexit(OPENSSL_cleanup) != 0) return 0; +# endif # endif #endif diff --git a/deps/openssl/openssl/crypto/o_str.c b/deps/openssl/openssl/crypto/o_str.c index 7fa487dd5fcdec..a6598171535e18 100644 --- a/deps/openssl/openssl/crypto/o_str.c +++ b/deps/openssl/openssl/crypto/o_str.c @@ -1,5 +1,5 @@ /* - * Copyright 2003-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2003-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -229,12 +229,14 @@ static int buf2hexstr_sep(char *str, size_t str_n, size_t *strlength, int has_sep = (sep != CH_ZERO); size_t len = has_sep ? buflen * 3 : 1 + buflen * 2; + if (len == 0) + ++len; if (strlength != NULL) *strlength = len; if (str == NULL) return 1; - if (str_n < (unsigned long)len) { + if (str_n < len) { ERR_raise(ERR_LIB_CRYPTO, CRYPTO_R_TOO_SMALL_BUFFER); return 0; } @@ -246,12 +248,12 @@ static int buf2hexstr_sep(char *str, size_t str_n, size_t *strlength, if (has_sep) *q++ = sep; } - if (has_sep) + if (has_sep && buflen > 0) --q; *q = CH_ZERO; #ifdef CHARSET_EBCDIC - ebcdic2ascii(str, str, q - str - 1); + ebcdic2ascii(str, str, q - str); #endif return 1; } diff --git a/deps/openssl/openssl/crypto/pkcs12/p12_crt.c b/deps/openssl/openssl/crypto/pkcs12/p12_crt.c index 26a444f868b028..1a48e5c611da5c 100644 --- a/deps/openssl/openssl/crypto/pkcs12/p12_crt.c +++ b/deps/openssl/openssl/crypto/pkcs12/p12_crt.c @@ -1,5 +1,5 @@ /* - * Copyright 1999-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1999-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -213,16 +213,19 @@ PKCS12_SAFEBAG *PKCS12_add_key_ex(STACK_OF(PKCS12_SAFEBAG) **pbags, if (key_usage && !PKCS8_add_keyusage(p8, key_usage)) goto err; if (nid_key != -1) { + /* This call does not take ownership of p8 */ bag = PKCS12_SAFEBAG_create_pkcs8_encrypt_ex(nid_key, pass, -1, NULL, 0, iter, p8, ctx, propq); - PKCS8_PRIV_KEY_INFO_free(p8); - } else + } else { bag = PKCS12_SAFEBAG_create0_p8inf(p8); + if (bag != NULL) + p8 = NULL; /* bag takes ownership of p8 */ + } + /* This does not need to be in the error path */ + if (p8 != NULL) + PKCS8_PRIV_KEY_INFO_free(p8); - if (!bag) - goto err; - - if (!pkcs12_add_bag(pbags, bag)) + if (bag == NULL || !pkcs12_add_bag(pbags, bag)) goto err; return bag; diff --git a/deps/openssl/openssl/crypto/pkcs7/pk7_doit.c b/deps/openssl/openssl/crypto/pkcs7/pk7_doit.c index 1cef67b211af77..d7791e5c4f4705 100644 --- a/deps/openssl/openssl/crypto/pkcs7/pk7_doit.c +++ b/deps/openssl/openssl/crypto/pkcs7/pk7_doit.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -1239,36 +1239,29 @@ static int add_attribute(STACK_OF(X509_ATTRIBUTE) **sk, int nid, int atrtype, void *value) { X509_ATTRIBUTE *attr = NULL; + int i, n; if (*sk == NULL) { if ((*sk = sk_X509_ATTRIBUTE_new_null()) == NULL) return 0; - new_attrib: - if ((attr = X509_ATTRIBUTE_create(nid, atrtype, value)) == NULL) - return 0; - if (!sk_X509_ATTRIBUTE_push(*sk, attr)) { - X509_ATTRIBUTE_free(attr); - return 0; - } - } else { - int i; - - for (i = 0; i < sk_X509_ATTRIBUTE_num(*sk); i++) { - attr = sk_X509_ATTRIBUTE_value(*sk, i); - if (OBJ_obj2nid(X509_ATTRIBUTE_get0_object(attr)) == nid) { - X509_ATTRIBUTE_free(attr); - attr = X509_ATTRIBUTE_create(nid, atrtype, value); - if (attr == NULL) - return 0; - if (!sk_X509_ATTRIBUTE_set(*sk, i, attr)) { - X509_ATTRIBUTE_free(attr); - return 0; - } - goto end; - } - } - goto new_attrib; } + n = sk_X509_ATTRIBUTE_num(*sk); + for (i = 0; i < n; i++) { + attr = sk_X509_ATTRIBUTE_value(*sk, i); + if (OBJ_obj2nid(X509_ATTRIBUTE_get0_object(attr)) == nid) + goto end; + } + if (!sk_X509_ATTRIBUTE_push(*sk, NULL)) + return 0; + end: + attr = X509_ATTRIBUTE_create(nid, atrtype, value); + if (attr == NULL) { + if (i == n) + sk_X509_ATTRIBUTE_pop(*sk); + return 0; + } + X509_ATTRIBUTE_free(sk_X509_ATTRIBUTE_value(*sk, i)); + (void) sk_X509_ATTRIBUTE_set(*sk, i, attr); return 1; } diff --git a/deps/openssl/openssl/crypto/property/property.c b/deps/openssl/openssl/crypto/property/property.c index 602db0f3ff54e9..75615d39af3664 100644 --- a/deps/openssl/openssl/crypto/property/property.c +++ b/deps/openssl/openssl/crypto/property/property.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -95,6 +95,8 @@ typedef struct { DEFINE_SPARSE_ARRAY_OF(ALGORITHM); +DEFINE_STACK_OF(ALGORITHM) + typedef struct ossl_global_properties_st { OSSL_PROPERTY_LIST *list; #ifndef FIPS_MODULE @@ -469,33 +471,45 @@ static void alg_do_one(ALGORITHM *alg, IMPLEMENTATION *impl, fn(alg->nid, impl->method.method, fnarg); } -struct alg_do_each_data_st { - void (*fn)(int id, void *method, void *fnarg); - void *fnarg; -}; - -static void alg_do_each(ossl_uintmax_t idx, ALGORITHM *alg, void *arg) +static void alg_copy(ossl_uintmax_t idx, ALGORITHM *alg, void *arg) { - struct alg_do_each_data_st *data = arg; - int i, end = sk_IMPLEMENTATION_num(alg->impls); - - for (i = 0; i < end; i++) { - IMPLEMENTATION *impl = sk_IMPLEMENTATION_value(alg->impls, i); + STACK_OF(ALGORITHM) *newalg = arg; - alg_do_one(alg, impl, data->fn, data->fnarg); - } + (void)sk_ALGORITHM_push(newalg, alg); } void ossl_method_store_do_all(OSSL_METHOD_STORE *store, void (*fn)(int id, void *method, void *fnarg), void *fnarg) { - struct alg_do_each_data_st data; + int i, j; + int numalgs, numimps; + STACK_OF(ALGORITHM) *tmpalgs; + ALGORITHM *alg; - data.fn = fn; - data.fnarg = fnarg; - if (store != NULL) - ossl_sa_ALGORITHM_doall_arg(store->algs, alg_do_each, &data); + if (store != NULL) { + + if (!ossl_property_read_lock(store)) + return; + + tmpalgs = sk_ALGORITHM_new_reserve(NULL, + ossl_sa_ALGORITHM_num(store->algs)); + if (tmpalgs == NULL) { + ossl_property_unlock(store); + return; + } + + ossl_sa_ALGORITHM_doall_arg(store->algs, alg_copy, tmpalgs); + ossl_property_unlock(store); + numalgs = sk_ALGORITHM_num(tmpalgs); + for (i = 0; i < numalgs; i++) { + alg = sk_ALGORITHM_value(tmpalgs, i); + numimps = sk_IMPLEMENTATION_num(alg->impls); + for (j = 0; j < numimps; j++) + alg_do_one(alg, sk_IMPLEMENTATION_value(alg->impls, j), fn, fnarg); + } + sk_ALGORITHM_free(tmpalgs); + } } int ossl_method_store_fetch(OSSL_METHOD_STORE *store, @@ -651,10 +665,13 @@ static void impl_cache_flush_one_alg(ossl_uintmax_t idx, ALGORITHM *alg, void *v) { IMPL_CACHE_FLUSH *state = (IMPL_CACHE_FLUSH *)v; + unsigned long orig_down_load = lh_QUERY_get_down_load(alg->cache); state->cache = alg->cache; + lh_QUERY_set_down_load(alg->cache, 0); lh_QUERY_doall_IMPL_CACHE_FLUSH(state->cache, &impl_cache_flush_cache, state); + lh_QUERY_set_down_load(alg->cache, orig_down_load); } static void ossl_method_cache_flush_some(OSSL_METHOD_STORE *store) diff --git a/deps/openssl/openssl/crypto/property/property_parse.c b/deps/openssl/openssl/crypto/property/property_parse.c index 19ea39a786eb05..45c798f1b50b10 100644 --- a/deps/openssl/openssl/crypto/property/property_parse.c +++ b/deps/openssl/openssl/crypto/property/property_parse.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright (c) 2019, Oracle and/or its affiliates. All rights reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -14,6 +14,7 @@ #include #include "internal/propertyerr.h" #include "internal/property.h" +#include "internal/numbers.h" #include "crypto/ctype.h" #include "internal/nelem.h" #include "property_local.h" diff --git a/deps/openssl/openssl/crypto/provider_core.c b/deps/openssl/openssl/crypto/provider_core.c index 4cadb6a9f02e59..cb4233eb52fd8d 100644 --- a/deps/openssl/openssl/crypto/provider_core.c +++ b/deps/openssl/openssl/crypto/provider_core.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -567,8 +567,15 @@ OSSL_PROVIDER *ossl_provider_new(OSSL_LIB_CTX *libctx, const char *name, } /* provider_new() generates an error, so no need here */ - if ((prov = provider_new(name, template.init, template.parameters)) == NULL) + prov = provider_new(name, template.init, template.parameters); + + if (prov == NULL) + return NULL; + + if (!ossl_provider_set_module_path(prov, template.path)) { + ossl_provider_free(prov); return NULL; + } prov->libctx = libctx; #ifndef FIPS_MODULE diff --git a/deps/openssl/openssl/crypto/rand/randfile.c b/deps/openssl/openssl/crypto/rand/randfile.c index 82f41637387b21..b4854a4c4eabdf 100644 --- a/deps/openssl/openssl/crypto/rand/randfile.c +++ b/deps/openssl/openssl/crypto/rand/randfile.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -16,6 +16,7 @@ # include #endif +#include "e_os.h" #include "internal/cryptlib.h" #include @@ -208,8 +209,16 @@ int RAND_write_file(const char *file) * should be restrictive from the start */ int fd = open(file, O_WRONLY | O_CREAT | O_BINARY, 0600); - if (fd != -1) + + if (fd != -1) { out = fdopen(fd, "wb"); + if (out == NULL) { + close(fd); + ERR_raise_data(ERR_LIB_RAND, RAND_R_CANNOT_OPEN_FILE, + "Filename=%s", file); + return -1; + } + } } #endif diff --git a/deps/openssl/openssl/crypto/rsa/rsa_oaep.c b/deps/openssl/openssl/crypto/rsa/rsa_oaep.c index d9be1a4f98c7ba..ffe24edcb6eea3 100644 --- a/deps/openssl/openssl/crypto/rsa/rsa_oaep.c +++ b/deps/openssl/openssl/crypto/rsa/rsa_oaep.c @@ -1,5 +1,5 @@ /* - * Copyright 1999-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1999-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -186,7 +186,7 @@ int RSA_padding_check_PKCS1_OAEP_mgf1(unsigned char *to, int tlen, mdlen = EVP_MD_get_size(md); - if (tlen <= 0 || flen <= 0) + if (tlen <= 0 || flen <= 0 || mdlen <= 0) return -1; /* * |num| is the length of the modulus; |flen| is the length of the diff --git a/deps/openssl/openssl/crypto/sha/build.info b/deps/openssl/openssl/crypto/sha/build.info index d61f7de9b6bde8..186ec13cc82a12 100644 --- a/deps/openssl/openssl/crypto/sha/build.info +++ b/deps/openssl/openssl/crypto/sha/build.info @@ -88,7 +88,7 @@ DEFINE[../../providers/libdefault.a]=$SHA1DEF $KECCAK1600DEF # linked with libcrypto. Otherwise, it already gets everything that # the static libcrypto.a has, and doesn't need it added again. IF[{- !$disabled{module} && !$disabled{shared} -}] - DEFINE[../providers/liblegacy.a]=$SHA1DEF $KECCAK1600DEF + DEFINE[../../providers/liblegacy.a]=$SHA1DEF $KECCAK1600DEF ENDIF GENERATE[sha1-586.S]=asm/sha1-586.pl diff --git a/deps/openssl/openssl/crypto/sm2/sm2_crypt.c b/deps/openssl/openssl/crypto/sm2/sm2_crypt.c index 5318c6199f6801..ff8171e39280dc 100644 --- a/deps/openssl/openssl/crypto/sm2/sm2_crypt.c +++ b/deps/openssl/openssl/crypto/sm2/sm2_crypt.c @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2017-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright 2017 Ribose Inc. All Rights Reserved. * Ported from Ribose contributions from Botan. * @@ -67,6 +67,18 @@ static size_t ec_field_size(const EC_GROUP *group) return field_size; } +static int is_all_zeros(const unsigned char *msg, size_t msglen) +{ + unsigned char re = 0; + size_t i; + + for (i = 0; i < msglen; i++) { + re |= msg[i]; + } + + return re == 0 ? 1 : 0; +} + int ossl_sm2_plaintext_size(const unsigned char *ct, size_t ct_size, size_t *pt_size) { @@ -179,6 +191,13 @@ int ossl_sm2_encrypt(const EC_KEY *key, memset(ciphertext_buf, 0, *ciphertext_len); + msg_mask = OPENSSL_zalloc(msg_len); + if (msg_mask == NULL) { + ERR_raise(ERR_LIB_SM2, ERR_R_MALLOC_FAILURE); + goto done; + } + +again: if (!BN_priv_rand_range_ex(k, order, 0, ctx)) { ERR_raise(ERR_LIB_SM2, ERR_R_INTERNAL_ERROR); goto done; @@ -198,12 +217,6 @@ int ossl_sm2_encrypt(const EC_KEY *key, goto done; } - msg_mask = OPENSSL_zalloc(msg_len); - if (msg_mask == NULL) { - ERR_raise(ERR_LIB_SM2, ERR_R_MALLOC_FAILURE); - goto done; - } - /* X9.63 with no salt happens to match the KDF used in SM2 */ if (!ossl_ecdh_kdf_X9_63(msg_mask, msg_len, x2y2, 2 * field_size, NULL, 0, digest, libctx, propq)) { @@ -211,6 +224,11 @@ int ossl_sm2_encrypt(const EC_KEY *key, goto done; } + if (is_all_zeros(msg_mask, msg_len)) { + memset(x2y2, 0, 2 * field_size); + goto again; + } + for (i = 0; i != msg_len; ++i) msg_mask[i] ^= msg[i]; @@ -364,6 +382,11 @@ int ossl_sm2_decrypt(const EC_KEY *key, goto done; } + if (is_all_zeros(msg_mask, msg_len)) { + ERR_raise(ERR_LIB_SM2, SM2_R_INVALID_ENCODING); + goto done; + } + for (i = 0; i != msg_len; ++i) ptext_buf[i] = C2[i] ^ msg_mask[i]; diff --git a/deps/openssl/openssl/crypto/sm2/sm2_sign.c b/deps/openssl/openssl/crypto/sm2/sm2_sign.c index ff5be9b73e9fb0..71ccfcfc4c3dee 100644 --- a/deps/openssl/openssl/crypto/sm2/sm2_sign.c +++ b/deps/openssl/openssl/crypto/sm2/sm2_sign.c @@ -1,5 +1,5 @@ /* - * Copyright 2017-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2017-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright 2017 Ribose Inc. All Rights Reserved. * Ported from Ribose contributions from Botan. * @@ -29,6 +29,7 @@ int ossl_sm2_compute_z_digest(uint8_t *out, { int rc = 0; const EC_GROUP *group = EC_KEY_get0_group(key); + const EC_POINT *pubkey = EC_KEY_get0_public_key(key); BN_CTX *ctx = NULL; EVP_MD_CTX *hash = NULL; BIGNUM *p = NULL; @@ -43,6 +44,12 @@ int ossl_sm2_compute_z_digest(uint8_t *out, uint16_t entl = 0; uint8_t e_byte = 0; + /* SM2 Signatures require a public key, check for it */ + if (pubkey == NULL) { + ERR_raise(ERR_LIB_SM2, ERR_R_PASSED_NULL_PARAMETER); + goto done; + } + hash = EVP_MD_CTX_new(); ctx = BN_CTX_new_ex(ossl_ec_key_get_libctx(key)); if (hash == NULL || ctx == NULL) { @@ -118,7 +125,7 @@ int ossl_sm2_compute_z_digest(uint8_t *out, || BN_bn2binpad(yG, buf, p_bytes) < 0 || !EVP_DigestUpdate(hash, buf, p_bytes) || !EC_POINT_get_affine_coordinates(group, - EC_KEY_get0_public_key(key), + pubkey, xA, yA, ctx) || BN_bn2binpad(xA, buf, p_bytes) < 0 || !EVP_DigestUpdate(hash, buf, p_bytes) @@ -442,6 +449,11 @@ int ossl_sm2_internal_sign(const unsigned char *dgst, int dgstlen, int sigleni; int ret = -1; + if (sig == NULL) { + ERR_raise(ERR_LIB_SM2, ERR_R_PASSED_NULL_PARAMETER); + goto done; + } + e = BN_bin2bn(dgst, dgstlen, NULL); if (e == NULL) { ERR_raise(ERR_LIB_SM2, ERR_R_BN_LIB); @@ -454,7 +466,7 @@ int ossl_sm2_internal_sign(const unsigned char *dgst, int dgstlen, goto done; } - sigleni = i2d_ECDSA_SIG(s, sig != NULL ? &sig : NULL); + sigleni = i2d_ECDSA_SIG(s, &sig); if (sigleni < 0) { ERR_raise(ERR_LIB_SM2, ERR_R_INTERNAL_ERROR); goto done; diff --git a/deps/openssl/openssl/crypto/x509/v3_addr.c b/deps/openssl/openssl/crypto/x509/v3_addr.c index 4930f33124222f..20f3d2ba70deaa 100644 --- a/deps/openssl/openssl/crypto/x509/v3_addr.c +++ b/deps/openssl/openssl/crypto/x509/v3_addr.c @@ -397,11 +397,11 @@ static int make_addressPrefix(IPAddressOrRange **result, unsigned char *addr, const int prefixlen, const int afilen) { int bytelen = (prefixlen + 7) / 8, bitlen = prefixlen % 8; - IPAddressOrRange *aor = IPAddressOrRange_new(); + IPAddressOrRange *aor; if (prefixlen < 0 || prefixlen > (afilen * 8)) return 0; - if (aor == NULL) + if ((aor = IPAddressOrRange_new()) == NULL) return 0; aor->type = IPAddressOrRange_addressPrefix; if (aor->u.addressPrefix == NULL && diff --git a/deps/openssl/openssl/crypto/x509/v3_utl.c b/deps/openssl/openssl/crypto/x509/v3_utl.c index 6e4ef26ed6082d..56ee36d4521ebc 100644 --- a/deps/openssl/openssl/crypto/x509/v3_utl.c +++ b/deps/openssl/openssl/crypto/x509/v3_utl.c @@ -1,5 +1,5 @@ /* - * Copyright 1999-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1999-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -916,36 +916,64 @@ static int do_x509_check(X509 *x, const char *chk, size_t chklen, ASN1_STRING *cstr; gen = sk_GENERAL_NAME_value(gens, i); - if ((gen->type == GEN_OTHERNAME) && (check_type == GEN_EMAIL)) { - if (OBJ_obj2nid(gen->d.otherName->type_id) == - NID_id_on_SmtpUTF8Mailbox) { - san_present = 1; - - /* - * If it is not a UTF8String then that is unexpected and we - * treat it as no match - */ - if (gen->d.otherName->value->type == V_ASN1_UTF8STRING) { - cstr = gen->d.otherName->value->value.utf8string; - - /* Positive on success, negative on error! */ - if ((rv = do_check_string(cstr, 0, equal, flags, - chk, chklen, peername)) != 0) - break; - } - } else + switch (gen->type) { + default: + continue; + case GEN_OTHERNAME: + switch (OBJ_obj2nid(gen->d.otherName->type_id)) { + default: continue; - } else { - if ((gen->type != check_type) && (gen->type != GEN_OTHERNAME)) + case NID_id_on_SmtpUTF8Mailbox: + /*- + * https://datatracker.ietf.org/doc/html/rfc8398#section-3 + * + * Due to name constraint compatibility reasons described + * in Section 6, SmtpUTF8Mailbox subjectAltName MUST NOT + * be used unless the local-part of the email address + * contains non-ASCII characters. When the local-part is + * ASCII, rfc822Name subjectAltName MUST be used instead + * of SmtpUTF8Mailbox. This is compatible with legacy + * software that supports only rfc822Name (and not + * SmtpUTF8Mailbox). [...] + * + * SmtpUTF8Mailbox is encoded as UTF8String. + * + * If it is not a UTF8String then that is unexpected, and + * we ignore the invalid SAN (neither set san_present nor + * consider it a candidate for equality). This does mean + * that the subject CN may be considered, as would be the + * case when the malformed SmtpUtf8Mailbox SAN is instead + * simply absent. + * + * When CN-ID matching is not desirable, applications can + * choose to turn it off, doing so is at this time a best + * practice. + */ + if (check_type != GEN_EMAIL + || gen->d.otherName->value->type != V_ASN1_UTF8STRING) + continue; + alt_type = 0; + cstr = gen->d.otherName->value->value.utf8string; + break; + } + break; + case GEN_EMAIL: + if (check_type != GEN_EMAIL) continue; - } - san_present = 1; - if (check_type == GEN_EMAIL) cstr = gen->d.rfc822Name; - else if (check_type == GEN_DNS) + break; + case GEN_DNS: + if (check_type != GEN_DNS) + continue; cstr = gen->d.dNSName; - else + break; + case GEN_IPADD: + if (check_type != GEN_IPADD) + continue; cstr = gen->d.iPAddress; + break; + } + san_present = 1; /* Positive on success, negative on error! */ if ((rv = do_check_string(cstr, alt_type, equal, flags, chk, chklen, peername)) != 0) diff --git a/deps/openssl/openssl/crypto/x509/x_name.c b/deps/openssl/openssl/crypto/x509/x_name.c index 944eb9992486d3..eded80246df91b 100644 --- a/deps/openssl/openssl/crypto/x509/x_name.c +++ b/deps/openssl/openssl/crypto/x509/x_name.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -476,8 +476,8 @@ static int i2d_name_canon(const STACK_OF(STACK_OF_X509_NAME_ENTRY) * _intname, v = sk_ASN1_VALUE_value(intname, i); ltmp = ASN1_item_ex_i2d(&v, in, ASN1_ITEM_rptr(X509_NAME_ENTRIES), -1, -1); - if (ltmp < 0) - return ltmp; + if (ltmp < 0 || len > INT_MAX - ltmp) + return -1; len += ltmp; } return len; diff --git a/deps/openssl/openssl/demos/digest/EVP_MD_demo.c b/deps/openssl/openssl/demos/digest/EVP_MD_demo.c index 99589bd3446b61..79e585d0ba4f60 100644 --- a/deps/openssl/openssl/demos/digest/EVP_MD_demo.c +++ b/deps/openssl/openssl/demos/digest/EVP_MD_demo.c @@ -1,5 +1,5 @@ /*- - * Copyright 2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2021-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -83,7 +83,7 @@ int demonstrate_digest(void) const char *option_properties = NULL; EVP_MD *message_digest = NULL; EVP_MD_CTX *digest_context = NULL; - unsigned int digest_length; + int digest_length; unsigned char *digest_value = NULL; int j; diff --git a/deps/openssl/openssl/demos/digest/EVP_MD_stdin.c b/deps/openssl/openssl/demos/digest/EVP_MD_stdin.c index 71a3d325a364e0..47e6b523857c7d 100644 --- a/deps/openssl/openssl/demos/digest/EVP_MD_stdin.c +++ b/deps/openssl/openssl/demos/digest/EVP_MD_stdin.c @@ -1,5 +1,5 @@ /*- - * Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -38,7 +38,7 @@ int demonstrate_digest(BIO *input) const char * option_properties = NULL; EVP_MD *message_digest = NULL; EVP_MD_CTX *digest_context = NULL; - unsigned int digest_length; + int digest_length; unsigned char *digest_value = NULL; unsigned char buffer[512]; int ii; diff --git a/deps/openssl/openssl/doc/HOWTO/certificates.txt b/deps/openssl/openssl/doc/HOWTO/certificates.txt index 78ab97b4192800..82ce502a1d3de4 100644 --- a/deps/openssl/openssl/doc/HOWTO/certificates.txt +++ b/deps/openssl/openssl/doc/HOWTO/certificates.txt @@ -89,7 +89,7 @@ was kind enough, your certificate is a raw DER thing in PEM format. Your key most definitely is if you have followed the examples above. However, some (most?) certificate authorities will encode them with things like PKCS7 or PKCS12, or something else. Depending on your -applications, this may be perfectly OK, it all depends on what they +applications, this may be perfectly OK. It all depends on what they know how to decode. If not, there are a number of OpenSSL tools to convert between some (most?) formats. diff --git a/deps/openssl/openssl/doc/fingerprints.txt b/deps/openssl/openssl/doc/fingerprints.txt index 9a26f7c66722c8..bdcad1472309ee 100644 --- a/deps/openssl/openssl/doc/fingerprints.txt +++ b/deps/openssl/openssl/doc/fingerprints.txt @@ -12,8 +12,8 @@ in the file named openssl-1.0.1h.tar.gz.asc. The following is the list of fingerprints for the keys that are currently in use to sign OpenSSL distributions: -OpenSSL OMC: -EFC0 A467 D613 CB83 C7ED 6D30 D894 E2CE 8B3D 79F5 +OpenSSL: +BA54 73A2 B058 7B07 FB27 CF2D 2160 94DF D0CB 81EF Richard Levitte: 7953 AC1F BC3D C8B3 B292 393E D5E9 E43F 7DF9 EE8C diff --git a/deps/openssl/openssl/doc/internal/man3/OPTIONS.pod b/deps/openssl/openssl/doc/internal/man3/OPTIONS.pod index 90593ca46f6fd3..dbdd39a2ee645a 100644 --- a/deps/openssl/openssl/doc/internal/man3/OPTIONS.pod +++ b/deps/openssl/openssl/doc/internal/man3/OPTIONS.pod @@ -155,7 +155,7 @@ on multiple lines; each entry should use B, like this: {OPT_MORE_STR, 0, 0, "This flag is not really needed on Unix systems"}, {OPT_MORE_STR, 0, 0, - "(Unix and descendents for ths win!)"} + "(Unix and descendents for the win!)"} Each subsequent line will be indented the correct amount. @@ -333,7 +333,7 @@ things very differently. =head1 COPYRIGHT -Copyright 2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2021-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy in the file diff --git a/deps/openssl/openssl/doc/internal/man3/ossl_method_construct.pod b/deps/openssl/openssl/doc/internal/man3/ossl_method_construct.pod index 3683798b06b49b..422d7a5b6850a2 100644 --- a/deps/openssl/openssl/doc/internal/man3/ossl_method_construct.pod +++ b/deps/openssl/openssl/doc/internal/man3/ossl_method_construct.pod @@ -93,7 +93,7 @@ This default store should be stored in the library context I. The method to be looked up should be identified with data found in I (which is the I that was passed to ossl_construct_method()). In other words, the ossl_method_construct() caller is entirely responsible -for ensuring the necesssary data is made available. +for ensuring the necessary data is made available. Optionally, I may be given as a search criterion, to narrow down the search of a method belonging to just one provider. @@ -148,7 +148,7 @@ This functionality was added to OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy in the file diff --git a/deps/openssl/openssl/doc/internal/man3/ossl_provider_new.pod b/deps/openssl/openssl/doc/internal/man3/ossl_provider_new.pod index 8bd5594c484c47..193472462b38a6 100644 --- a/deps/openssl/openssl/doc/internal/man3/ossl_provider_new.pod +++ b/deps/openssl/openssl/doc/internal/man3/ossl_provider_new.pod @@ -297,7 +297,7 @@ in a bitstring that's internal to I. ossl_provider_test_operation_bit() checks if the bit operation I is set (1) or not (0) in the internal I bitstring, and sets -I<*result> to 1 or 0 accorddingly. +I<*result> to 1 or 0 accordingly. ossl_provider_init_as_child() stores in the library context I references to the necessary upcalls for managing child providers. The I and I @@ -390,7 +390,7 @@ The functions described here were all added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/internal/man3/ossl_random_add_conf_module.pod b/deps/openssl/openssl/doc/internal/man3/ossl_random_add_conf_module.pod index 6d4f5810dcddb7..a3c1285fe01bb7 100644 --- a/deps/openssl/openssl/doc/internal/man3/ossl_random_add_conf_module.pod +++ b/deps/openssl/openssl/doc/internal/man3/ossl_random_add_conf_module.pod @@ -15,7 +15,7 @@ ossl_random_add_conf_module - internal random configuration module ossl_random_add_conf_module() adds the random configuration module for providers. -This allows the type and parameters of the stardard setup of random number +This allows the type and parameters of the standard setup of random number generators to be configured with an OpenSSL L file. =head1 RETURN VALUES @@ -32,7 +32,7 @@ The functions described here were all added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/internal/man7/EVP_PKEY.pod b/deps/openssl/openssl/doc/internal/man7/EVP_PKEY.pod index cc738b9c28ebcc..3dc10fa4104cf1 100644 --- a/deps/openssl/openssl/doc/internal/man7/EVP_PKEY.pod +++ b/deps/openssl/openssl/doc/internal/man7/EVP_PKEY.pod @@ -19,7 +19,7 @@ private/public key pairs, but has had other uses as well. =for comment "uses" could as well be "abuses"... -The private/public key pair that an B contains is refered to +The private/public key pair that an B contains is referred to as its "internal key" or "origin" (the reason for "origin" is explained further down, in L), and it can take one of the following forms: @@ -202,7 +202,7 @@ L =head1 COPYRIGHT -Copyright 2020-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man1/openssl-crl.pod.in b/deps/openssl/openssl/doc/man1/openssl-crl.pod.in index 7e15f6445a6f25..5ace18f5807f31 100644 --- a/deps/openssl/openssl/doc/man1/openssl-crl.pod.in +++ b/deps/openssl/openssl/doc/man1/openssl-crl.pod.in @@ -95,6 +95,9 @@ Print out the CRL in text form. Verify the signature in the CRL. +This option is implicitly enabled if any of B<-CApath>, B<-CAfile> +or B<-CAstore> is specified. + =item B<-noout> Don't output the encoded version of the CRL. @@ -162,7 +165,7 @@ L =head1 COPYRIGHT -Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man1/openssl-enc.pod.in b/deps/openssl/openssl/doc/man1/openssl-enc.pod.in index e6d5103bd91a25..a47e783e2d6306 100644 --- a/deps/openssl/openssl/doc/man1/openssl-enc.pod.in +++ b/deps/openssl/openssl/doc/man1/openssl-enc.pod.in @@ -97,13 +97,19 @@ Base64 process the data. This means that if encryption is taking place the data is base64 encoded after encryption. If decryption is set then the input data is base64 decoded before being decrypted. +When the B<-A> option not given, +on encoding a newline is inserted after each 64 characters, and +on decoding a newline is expected among the first 1024 bytes of input. + =item B<-base64> Same as B<-a> =item B<-A> -If the B<-a> option is set then base64 process the data on one line. +If the B<-a> option is set then base64 encoding produces output without any +newline character, and base64 decoding does not require any newlines. +Therefore it can be helpful to use the B<-A> option when decoding unknown input. =item B<-k> I @@ -434,6 +440,9 @@ Base64 decode a file then decrypt it using a password supplied in a file: =head1 BUGS The B<-A> option when used with large files doesn't work properly. +On the other hand, when base64 decoding without the B<-A> option, +if the first 1024 bytes of input do not include a newline character +the first two lines of input are ignored. The B command only supports a fixed number of algorithms with certain parameters. So if, for example, you want to use RC2 with a @@ -449,7 +458,7 @@ The B<-ciphers> and B<-engine> options were deprecated in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2000-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man1/openssl-mac.pod.in b/deps/openssl/openssl/doc/man1/openssl-mac.pod.in index 56397479910993..5ed97969738182 100644 --- a/deps/openssl/openssl/doc/man1/openssl-mac.pod.in +++ b/deps/openssl/openssl/doc/man1/openssl-mac.pod.in @@ -123,26 +123,31 @@ To see the list of supported MAC's use the command C =head1 COPYRIGHT -Copyright 2018-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2018-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man1/openssl-passphrase-options.pod b/deps/openssl/openssl/doc/man1/openssl-passphrase-options.pod index abc43fb41e6fe6..2260dce8a65026 100644 --- a/deps/openssl/openssl/doc/man1/openssl-passphrase-options.pod +++ b/deps/openssl/openssl/doc/man1/openssl-passphrase-options.pod @@ -46,26 +46,32 @@ the environment of other processes is visible on certain platforms =item BI -The first line of I is the password. If the same I -argument is supplied to B<-passin> and B<-passout> arguments then the first -line will be used for the input password and the next line for the output -password. I need not refer to a regular file: it could for example -refer to a device or named pipe. +Reads the password from the specified file I, which can be a regular +file, device, or named pipe. Only the first line, up to the newline character, +is read from the stream. + +If the same I argument is supplied to both B<-passin> and B<-passout> +arguments, the first line will be used for the input password, and the next +line will be used for the output password. =item BI -Read the password from the file descriptor I. This can be used to -send the data via a pipe for example. +Reads the password from the file descriptor I. This can be useful for +sending data via a pipe, for example. The same line handling as described for +B applies to passwords read from file descriptors. + +B is not supported on Windows. =item B -Read the password from standard input. +Reads the password from standard input. The same line handling as described for +B applies to passwords read from standard input. =back =head1 COPYRIGHT -Copyright 2000-2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man1/openssl-req.pod.in b/deps/openssl/openssl/doc/man1/openssl-req.pod.in index 31fd71418773cf..a56f548de8ee08 100644 --- a/deps/openssl/openssl/doc/man1/openssl-req.pod.in +++ b/deps/openssl/openssl/doc/man1/openssl-req.pod.in @@ -472,16 +472,29 @@ any digest that has been set. =item B This option masks out the use of certain string types in certain -fields. Most users will not need to change this option. +fields. Most users will not need to change this option. It can be set to +several values: -It can be set to several values B which is also the default -option uses PrintableStrings, T61Strings and BMPStrings if the -B value is used then only PrintableStrings and BMPStrings will -be used. This follows the PKIX recommendation in RFC2459. If the -B option is used then only UTF8Strings will be used: this -is the PKIX recommendation in RFC2459 after 2003. Finally the B -option just uses PrintableStrings and T61Strings: certain software has -problems with BMPStrings and UTF8Strings: in particular Netscape. +=over 4 + +=item B +- only UTF8Strings are used (this is the default value) + +=item B +- any string type except T61Strings + +=item B +- any string type except BMPStrings and UTF8Strings + +=item B +- any kind of string type + +=back + +Note that B is the PKIX recommendation in RFC2459 after 2003, and the +default B; B is not the default option. The B +value is a workaround for some software that has problems with variable-sized +BMPStrings and UTF8Strings. =item B @@ -765,7 +778,7 @@ The <-nodes> option was deprecated in OpenSSL 3.0, too; use B<-noenc> instead. =head1 COPYRIGHT -Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man1/openssl-s_client.pod.in b/deps/openssl/openssl/doc/man1/openssl-s_client.pod.in index 4b7b58b72d5537..bd6171aa265c69 100644 --- a/deps/openssl/openssl/doc/man1/openssl-s_client.pod.in +++ b/deps/openssl/openssl/doc/man1/openssl-s_client.pod.in @@ -616,7 +616,11 @@ For example strings, see L =item B<-curves> I Specifies the list of supported curves to be sent by the client. The curve is -ultimately selected by the server. For a list of all curves, use: +ultimately selected by the server. + +The list of all supported groups includes named EC parameters as well as X25519 +and X448 or FFDHE groups, and may also include groups implemented in 3rd-party +providers. For a list of named EC parameters, use: $ openssl ecparam -list_curves @@ -910,7 +914,7 @@ The B<-engine> option was deprecated in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2000-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man1/openssl-s_server.pod.in b/deps/openssl/openssl/doc/man1/openssl-s_server.pod.in index 319f1e342b4855..99a252a8225453 100644 --- a/deps/openssl/openssl/doc/man1/openssl-s_server.pod.in +++ b/deps/openssl/openssl/doc/man1/openssl-s_server.pod.in @@ -641,7 +641,10 @@ Signature algorithms to support for client certificate authentication =item B<-named_curve> I Specifies the elliptic curve to use. NOTE: this is single curve, not a list. -For a list of all possible curves, use: + +The list of all supported groups includes named EC parameters as well as X25519 +and X448 or FFDHE groups, and may also include groups implemented in 3rd-party +providers. For a list of named EC parameters, use: $ openssl ecparam -list_curves @@ -930,7 +933,7 @@ option were deprecated in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2000-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man1/openssl-smime.pod.in b/deps/openssl/openssl/doc/man1/openssl-smime.pod.in index e438c866c38307..0b5dbb5df8f7fc 100644 --- a/deps/openssl/openssl/doc/man1/openssl-smime.pod.in +++ b/deps/openssl/openssl/doc/man1/openssl-smime.pod.in @@ -195,14 +195,14 @@ Don't try to verify the signatures on the message. =item B<-nocerts> -When signing a message the signer's certificate is normally included -with this option it is excluded. This will reduce the size of the -signed message but the verifier must have a copy of the signers certificate +When signing a message, the signer's certificate is normally included. +With this option it is excluded. This will reduce the size of the +signed message, but the verifier must have a copy of the signers certificate available locally (passed using the B<-certfile> option for example). =item B<-noattr> -Normally when a message is signed a set of attributes are included which +Normally, when a message is signed, a set of attributes are included which include the signing time and supported symmetric algorithms. With this option they are not included. @@ -243,14 +243,6 @@ used multiple times if more than one signer is required. If a message is being verified then the signers certificates will be written to this file if the verification was successful. -=item B<-nocerts> - -Don't include signers certificate when signing. - -=item B<-noattr> - -Don't include any signed attributes when signing. - =item B<-recip> I The recipients certificate when decrypting a message. This certificate @@ -482,7 +474,7 @@ The B<-engine> option was deprecated in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man1/openssl-storeutl.pod.in b/deps/openssl/openssl/doc/man1/openssl-storeutl.pod.in index 26d5ee28e647d2..2b619d7c356e81 100644 --- a/deps/openssl/openssl/doc/man1/openssl-storeutl.pod.in +++ b/deps/openssl/openssl/doc/man1/openssl-storeutl.pod.in @@ -79,6 +79,9 @@ returned. Note that all options must be given before the I argument. Otherwise they are ignored. +Note I<-keys> selects exclusively private keys, there is no selector for public +keys only. + =item B<-subject> I Search for an object having the subject name I. @@ -137,7 +140,7 @@ The B<-engine> option was deprecated in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man1/openssl-ts.pod.in b/deps/openssl/openssl/doc/man1/openssl-ts.pod.in index 3e7f7c4be94b2d..5f4895b34d6c4c 100644 --- a/deps/openssl/openssl/doc/man1/openssl-ts.pod.in +++ b/deps/openssl/openssl/doc/man1/openssl-ts.pod.in @@ -163,9 +163,9 @@ use its own default policy. (Optional) =item B<-no_nonce> No nonce is specified in the request if this option is -given. Otherwise a 64 bit long pseudo-random none is -included in the request. It is recommended to use nonce to -protect against replay-attacks. (Optional) +given. Otherwise, a 64-bit long pseudo-random nonce is +included in the request. It is recommended to use a nonce to +protect against replay attacks. (Optional) =item B<-cert> @@ -652,7 +652,7 @@ L =head1 COPYRIGHT -Copyright 2006-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2006-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man1/openssl-verification-options.pod b/deps/openssl/openssl/doc/man1/openssl-verification-options.pod index 4998e452b54957..bf9ed9c1a62e1c 100644 --- a/deps/openssl/openssl/doc/man1/openssl-verification-options.pod +++ b/deps/openssl/openssl/doc/man1/openssl-verification-options.pod @@ -430,7 +430,7 @@ This option may be used multiple times. =item B<-policy> I Enable policy processing and add I to the user-initial-policy-set (see -RFC5280). The policy I can be an object name an OID in numeric form. +RFC5280). The policy I can be an object name or an OID in numeric form. This argument can appear more than once. =item B<-explicit_policy> @@ -686,7 +686,7 @@ The checks enabled by B<-x509_strict> have been extended in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2000-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/ASN1_INTEGER_new.pod b/deps/openssl/openssl/doc/man3/ASN1_INTEGER_new.pod index 4722f880c0b28d..869ac754f4b736 100644 --- a/deps/openssl/openssl/doc/man3/ASN1_INTEGER_new.pod +++ b/deps/openssl/openssl/doc/man3/ASN1_INTEGER_new.pod @@ -18,6 +18,7 @@ ASN1_INTEGER_new, ASN1_INTEGER_free - ASN1_INTEGER allocation functions ASN1_INTEGER_new() returns an allocated B structure. ASN1_INTEGER_free() frees up a single B object. +If the argument is NULL, nothing is done. B structure representing the ASN.1 INTEGER type @@ -34,7 +35,7 @@ L =head1 COPYRIGHT -Copyright 2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/ASYNC_WAIT_CTX_new.pod b/deps/openssl/openssl/doc/man3/ASYNC_WAIT_CTX_new.pod index 7621a8b3a166b1..d85c51e5556d8f 100644 --- a/deps/openssl/openssl/doc/man3/ASYNC_WAIT_CTX_new.pod +++ b/deps/openssl/openssl/doc/man3/ASYNC_WAIT_CTX_new.pod @@ -178,6 +178,9 @@ operation, normally it is detected by a polling function or an interrupt, as the user code set a callback by calling ASYNC_WAIT_CTX_set_callback() previously, then the registered callback will be called. +ASYNC_WAIT_CTX_free() frees up a single B object. +If the argument is NULL, nothing is done. + =head1 RETURN VALUES ASYNC_WAIT_CTX_new() returns a pointer to the newly allocated B @@ -216,7 +219,7 @@ were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/BIO_ADDR.pod b/deps/openssl/openssl/doc/man3/BIO_ADDR.pod index 5ab88622cee0ba..12ec84d15ca041 100644 --- a/deps/openssl/openssl/doc/man3/BIO_ADDR.pod +++ b/deps/openssl/openssl/doc/man3/BIO_ADDR.pod @@ -38,6 +38,7 @@ with routines that will fill it with information, such as BIO_accept_ex(). BIO_ADDR_free() frees a B created with BIO_ADDR_new(). +If the argument is NULL, nothing is done. BIO_ADDR_clear() clears any data held within the provided B and sets it back to an uninitialised state. @@ -115,7 +116,7 @@ L, L =head1 COPYRIGHT -Copyright 2016-2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/BIO_ADDRINFO.pod b/deps/openssl/openssl/doc/man3/BIO_ADDRINFO.pod index 626052e7f8b907..71a14ff4f0d706 100644 --- a/deps/openssl/openssl/doc/man3/BIO_ADDRINFO.pod +++ b/deps/openssl/openssl/doc/man3/BIO_ADDRINFO.pod @@ -78,7 +78,7 @@ BIO_ADDRINFO_next() returns the next B in the chain from the given one. BIO_ADDRINFO_free() frees the chain of B starting -with the given one. +with the given one. If the argument is NULL, nothing is done. =head1 RETURN VALUES @@ -103,7 +103,7 @@ The BIO_lookup_ex() function was added in OpenSSL 1.1.1. =head1 COPYRIGHT -Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/BIO_f_base64.pod b/deps/openssl/openssl/doc/man3/BIO_f_base64.pod index c865f0a17ac169..c951d492619087 100644 --- a/deps/openssl/openssl/doc/man3/BIO_f_base64.pod +++ b/deps/openssl/openssl/doc/man3/BIO_f_base64.pod @@ -21,25 +21,23 @@ any data read through it. Base64 BIOs do not support BIO_gets() or BIO_puts(). -For writing, output is by default divided to lines of length 64 -characters and there is always a newline at the end of output. +For writing, by default output is divided to lines of length 64 +characters and there is a newline at the end of output. +This behavior can be changed with B flag. -For reading, first line should be at most 1024 -characters long. If it is longer then it is ignored completely. -Other input lines can be of any length. There must be a newline -at the end of input. - -This behavior can be changed with BIO_FLAGS_BASE64_NO_NL flag. +For reading, first line should be at most 1024 bytes long including newline +unless the flag B is set. +Further input lines can be of any length (i.e., newlines may appear anywhere +in the input) and a newline at the end of input is not needed. BIO_flush() on a base64 BIO that is being written through is used to signal that no more data is to be encoded: this is used to flush the final block through the BIO. -The flag BIO_FLAGS_BASE64_NO_NL can be set with BIO_set_flags(). +The flag B can be set with BIO_set_flags(). For writing, it causes all data to be written on one line without newline at the end. -For reading, it expects the data to be all on one line (with or -without a trailing newline). +For reading, it removes all expectations on newlines in the input data. =head1 NOTES @@ -85,6 +83,10 @@ data to standard output: =head1 BUGS +On decoding, if the flag B is not set and +the first 1024 bytes of input do not include a newline character +the first two lines of input are ignored. + The ambiguity of EOF in base64 encoded data can cause additional data following the base64 encoded block to be misinterpreted. @@ -93,7 +95,7 @@ to reliably determine EOF (for example a MIME boundary). =head1 COPYRIGHT -Copyright 2000-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/BIO_meth_new.pod b/deps/openssl/openssl/doc/man3/BIO_meth_new.pod index 86301b97189f71..d626277c03b1ed 100644 --- a/deps/openssl/openssl/doc/man3/BIO_meth_new.pod +++ b/deps/openssl/openssl/doc/man3/BIO_meth_new.pod @@ -76,7 +76,7 @@ additionally have the "descriptor" bit set (B). See the L page for more information. BIO_meth_free() destroys a B structure and frees up any memory -associated with it. +associated with it. If the argument is NULL, nothing is done. BIO_meth_get_write_ex() and BIO_meth_set_write_ex() get and set the function used for writing arbitrary length data to the BIO respectively. This function @@ -157,7 +157,7 @@ The functions described here were added in OpenSSL 1.1.0. =head1 COPYRIGHT -Copyright 2016-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/BN_add.pod b/deps/openssl/openssl/doc/man3/BN_add.pod index 35cfdd1495fd12..46966d99637925 100644 --- a/deps/openssl/openssl/doc/man3/BN_add.pod +++ b/deps/openssl/openssl/doc/man3/BN_add.pod @@ -14,9 +14,9 @@ arithmetic operations on BIGNUMs int BN_sub(BIGNUM *r, const BIGNUM *a, const BIGNUM *b); - int BN_mul(BIGNUM *r, BIGNUM *a, BIGNUM *b, BN_CTX *ctx); + int BN_mul(BIGNUM *r, const BIGNUM *a, const BIGNUM *b, BN_CTX *ctx); - int BN_sqr(BIGNUM *r, BIGNUM *a, BN_CTX *ctx); + int BN_sqr(BIGNUM *r, const BIGNUM *a, BN_CTX *ctx); int BN_div(BIGNUM *dv, BIGNUM *rem, const BIGNUM *a, const BIGNUM *d, BN_CTX *ctx); @@ -25,25 +25,25 @@ arithmetic operations on BIGNUMs int BN_nnmod(BIGNUM *r, const BIGNUM *a, const BIGNUM *m, BN_CTX *ctx); - int BN_mod_add(BIGNUM *r, BIGNUM *a, BIGNUM *b, const BIGNUM *m, + int BN_mod_add(BIGNUM *r, const BIGNUM *a, const BIGNUM *b, const BIGNUM *m, BN_CTX *ctx); - int BN_mod_sub(BIGNUM *r, BIGNUM *a, BIGNUM *b, const BIGNUM *m, + int BN_mod_sub(BIGNUM *r, const BIGNUM *a, const BIGNUM *b, const BIGNUM *m, BN_CTX *ctx); - int BN_mod_mul(BIGNUM *r, BIGNUM *a, BIGNUM *b, const BIGNUM *m, + int BN_mod_mul(BIGNUM *r, const BIGNUM *a, const BIGNUM *b, const BIGNUM *m, BN_CTX *ctx); - int BN_mod_sqr(BIGNUM *r, BIGNUM *a, const BIGNUM *m, BN_CTX *ctx); + int BN_mod_sqr(BIGNUM *r, const BIGNUM *a, const BIGNUM *m, BN_CTX *ctx); - BIGNUM *BN_mod_sqrt(BIGNUM *in, BIGNUM *a, const BIGNUM *p, BN_CTX *ctx); + BIGNUM *BN_mod_sqrt(BIGNUM *in, const BIGNUM *a, const BIGNUM *p, BN_CTX *ctx); - int BN_exp(BIGNUM *r, BIGNUM *a, BIGNUM *p, BN_CTX *ctx); + int BN_exp(BIGNUM *r, const BIGNUM *a, const BIGNUM *p, BN_CTX *ctx); - int BN_mod_exp(BIGNUM *r, BIGNUM *a, const BIGNUM *p, + int BN_mod_exp(BIGNUM *r, const BIGNUM *a, const BIGNUM *p, const BIGNUM *m, BN_CTX *ctx); - int BN_gcd(BIGNUM *r, BIGNUM *a, BIGNUM *b, BN_CTX *ctx); + int BN_gcd(BIGNUM *r, const BIGNUM *a, const BIGNUM *b, BN_CTX *ctx); =head1 DESCRIPTION @@ -135,7 +135,7 @@ L, L =head1 COPYRIGHT -Copyright 2000-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/BN_generate_prime.pod b/deps/openssl/openssl/doc/man3/BN_generate_prime.pod index b536bcb3b781ca..accc8a749f0c33 100644 --- a/deps/openssl/openssl/doc/man3/BN_generate_prime.pod +++ b/deps/openssl/openssl/doc/man3/BN_generate_prime.pod @@ -167,7 +167,8 @@ programs should prefer the "new" style, whilst the "old" style is provided for backwards compatibility purposes. A B structure should be created through a call to BN_GENCB_new(), -and freed through a call to BN_GENCB_free(). +and freed through a call to BN_GENCB_free(). If the argument is NULL, +nothing is done. For "new" style callbacks a BN_GENCB structure should be initialised with a call to BN_GENCB_set(), where B is a B, B is of @@ -245,7 +246,7 @@ BN_check_prime() was added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/BN_set_bit.pod b/deps/openssl/openssl/doc/man3/BN_set_bit.pod index 349ef9e0562cc3..ddc27d0c407a4f 100644 --- a/deps/openssl/openssl/doc/man3/BN_set_bit.pod +++ b/deps/openssl/openssl/doc/man3/BN_set_bit.pod @@ -33,8 +33,11 @@ error occurs if B is shorter than B bits. BN_is_bit_set() tests if bit B in B is set. BN_mask_bits() truncates B to an B bit number -(CEn)>). An error occurs if B already is -shorter than B bits. +(CEn)>). An error occurs if B is negative. An error is +also returned if the internal representation of B is already shorter than +B bits. The internal representation depends on the platform's word size, and +this error can be safely ignored. Use L to determine the exact +number of bits if needed. BN_lshift() shifts B left by B bits and places the result in B (C). Note that B must be nonnegative. BN_lshift1() shifts @@ -59,7 +62,7 @@ L, L =head1 COPYRIGHT -Copyright 2000-2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/BUF_MEM_new.pod b/deps/openssl/openssl/doc/man3/BUF_MEM_new.pod index 262e18f31bfe92..79de43a1df9a0a 100644 --- a/deps/openssl/openssl/doc/man3/BUF_MEM_new.pod +++ b/deps/openssl/openssl/doc/man3/BUF_MEM_new.pod @@ -34,6 +34,7 @@ should be allocated on the secure heap; see L. BUF_MEM_free() frees up an already existing buffer. The data is zeroed before freeing up in case the buffer contains sensitive data. +If the argument is NULL, nothing is done. BUF_MEM_grow() changes the size of an already existing buffer to B. Any data already in the buffer is preserved if it increases in @@ -65,7 +66,7 @@ The BUF_MEM_new_ex() function was added in OpenSSL 1.1.0. =head1 COPYRIGHT -Copyright 2000-2018 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/CRYPTO_THREAD_run_once.pod b/deps/openssl/openssl/doc/man3/CRYPTO_THREAD_run_once.pod index a51679b97edde4..420ff7278b26c2 100644 --- a/deps/openssl/openssl/doc/man3/CRYPTO_THREAD_run_once.pod +++ b/deps/openssl/openssl/doc/man3/CRYPTO_THREAD_run_once.pod @@ -69,6 +69,7 @@ CRYPTO_THREAD_unlock() unlocks the previously locked I. =item * CRYPTO_THREAD_lock_free() frees the provided I. +If the argument is NULL, nothing is done. =item * @@ -163,10 +164,13 @@ This example safely initializes and uses a lock. { int ret = 0; - if (mylock()) { - /* Your code here, do not return without releasing the lock! */ - ret = ... ; + if (!mylock()) { + /* Do not unlock unless the lock was successfully acquired. */ + return 0; } + + /* Your code here, do not return without releasing the lock! */ + ret = ... ; myunlock(); return ret; } @@ -183,7 +187,7 @@ L, L. =head1 COPYRIGHT -Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/CTLOG_STORE_new.pod b/deps/openssl/openssl/doc/man3/CTLOG_STORE_new.pod index 801b1447e15599..361eda57b1821f 100644 --- a/deps/openssl/openssl/doc/man3/CTLOG_STORE_new.pod +++ b/deps/openssl/openssl/doc/man3/CTLOG_STORE_new.pod @@ -52,7 +52,7 @@ The expected format of the file is: Once a CTLOG_STORE is no longer required, it should be passed to CTLOG_STORE_free(). This will delete all of the CTLOGs stored within, along -with the CTLOG_STORE itself. +with the CTLOG_STORE itself. If the argument is NULL, nothing is done. =head1 NOTES @@ -78,7 +78,7 @@ added in OpenSSL 1.1.0. =head1 COPYRIGHT -Copyright 2016-2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/CTLOG_new.pod b/deps/openssl/openssl/doc/man3/CTLOG_new.pod index 30b8068249667a..7a78a6c58dd356 100644 --- a/deps/openssl/openssl/doc/man3/CTLOG_new.pod +++ b/deps/openssl/openssl/doc/man3/CTLOG_new.pod @@ -50,7 +50,7 @@ property query string are used. Regardless of whether CTLOG_new() or CTLOG_new_from_base64() is used, it is the caller's responsibility to pass the CTLOG to CTLOG_free() once it is no longer needed. This will delete it and, if created by CTLOG_new(), the EVP_PKEY that -was passed to it. +was passed to it. If the argument to CTLOG_free() is NULL, nothing is done. CTLOG_get0_name() returns the name of the log, as provided when the CTLOG was created. Ownership of the string remains with the CTLOG. @@ -80,7 +80,7 @@ were added in OpenSSL 3.0. All other functions were added in OpenSSL 1.1.0. =head1 COPYRIGHT -Copyright 2016-2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/CT_POLICY_EVAL_CTX_new.pod b/deps/openssl/openssl/doc/man3/CT_POLICY_EVAL_CTX_new.pod index bba6778d2debcd..3b79980c43c264 100644 --- a/deps/openssl/openssl/doc/man3/CT_POLICY_EVAL_CTX_new.pod +++ b/deps/openssl/openssl/doc/man3/CT_POLICY_EVAL_CTX_new.pod @@ -105,7 +105,8 @@ The time should be in milliseconds since the Unix Epoch. Each setter has a matching getter for accessing the current value. When no longer required, the B should be passed to -CT_POLICY_EVAL_CTX_free() to delete it. +CT_POLICY_EVAL_CTX_free() to delete it. If the argument to +CT_POLICY_EVAL_CTX_free() is NULL, nothing is done. =head1 NOTES @@ -130,7 +131,7 @@ functions were added in OpenSSL 1.1.0. =head1 COPYRIGHT -Copyright 2016-2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/DEFINE_STACK_OF.pod b/deps/openssl/openssl/doc/man3/DEFINE_STACK_OF.pod index 0775214fb5d7e6..3ebd473afcb250 100644 --- a/deps/openssl/openssl/doc/man3/DEFINE_STACK_OF.pod +++ b/deps/openssl/openssl/doc/man3/DEFINE_STACK_OF.pod @@ -41,8 +41,8 @@ OPENSSL_sk_unshift, OPENSSL_sk_value, OPENSSL_sk_zero STACK_OF(TYPE) *sk_TYPE_new(sk_TYPE_compfunc compare); STACK_OF(TYPE) *sk_TYPE_new_null(void); int sk_TYPE_reserve(STACK_OF(TYPE) *sk, int n); - void sk_TYPE_free(const STACK_OF(TYPE) *sk); - void sk_TYPE_zero(const STACK_OF(TYPE) *sk); + void sk_TYPE_free(STACK_OF(TYPE) *sk); + void sk_TYPE_zero(STACK_OF(TYPE) *sk); TYPE *sk_TYPE_delete(STACK_OF(TYPE) *sk, int i); TYPE *sk_TYPE_delete_ptr(STACK_OF(TYPE) *sk, TYPE *ptr); int sk_TYPE_push(STACK_OF(TYPE) *sk, const TYPE *ptr); @@ -297,7 +297,7 @@ B_reserve>() and B_new_reserve>() were added in OpenSSL =head1 COPYRIGHT -Copyright 2000-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/DH_meth_new.pod b/deps/openssl/openssl/doc/man3/DH_meth_new.pod index 43827f55ef8c37..d5ba5eac565d2d 100644 --- a/deps/openssl/openssl/doc/man3/DH_meth_new.pod +++ b/deps/openssl/openssl/doc/man3/DH_meth_new.pod @@ -81,7 +81,7 @@ parameter. This might be useful for creating a new B based on an existing one, but with some differences. DH_meth_free() destroys a B structure and frees up any memory -associated with it. +associated with it. If the argument is NULL, nothing is done. DH_meth_get0_name() will return a pointer to the name of this DH_METHOD. This is a pointer to the internal name string and so should not be freed by the @@ -166,7 +166,7 @@ The functions described here were added in OpenSSL 1.1.0. =head1 COPYRIGHT -Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/DSA_SIG_new.pod b/deps/openssl/openssl/doc/man3/DSA_SIG_new.pod index 1f532d300065e1..158da2d7dd0658 100644 --- a/deps/openssl/openssl/doc/man3/DSA_SIG_new.pod +++ b/deps/openssl/openssl/doc/man3/DSA_SIG_new.pod @@ -20,6 +20,7 @@ DSA_SIG_new() allocates an empty B structure. DSA_SIG_free() frees the B structure and its components. The values are erased before the memory is returned to the system. +If the argument is NULL, nothing is done. DSA_SIG_get0() returns internal pointers to the B and B values contained in B. @@ -48,7 +49,7 @@ L =head1 COPYRIGHT -Copyright 2000-2016 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/DSA_meth_new.pod b/deps/openssl/openssl/doc/man3/DSA_meth_new.pod index c00747cfc44865..534561c610467f 100644 --- a/deps/openssl/openssl/doc/man3/DSA_meth_new.pod +++ b/deps/openssl/openssl/doc/man3/DSA_meth_new.pod @@ -110,7 +110,7 @@ parameter. This might be useful for creating a new B based on an existing one, but with some differences. DSA_meth_free() destroys a B structure and frees up any memory -associated with it. +associated with it. If the argument is NULL, nothing is done. DSA_meth_get0_name() will return a pointer to the name of this DSA_METHOD. This is a pointer to the internal name string and so should not be freed by the @@ -214,7 +214,7 @@ The functions described here were added in OpenSSL 1.1.0. =head1 COPYRIGHT -Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/ECDSA_SIG_new.pod b/deps/openssl/openssl/doc/man3/ECDSA_SIG_new.pod index 3266c43b550c43..e56ec959edff07 100644 --- a/deps/openssl/openssl/doc/man3/ECDSA_SIG_new.pod +++ b/deps/openssl/openssl/doc/man3/ECDSA_SIG_new.pod @@ -31,6 +31,7 @@ ECDSA_SIG_new() allocates an empty B structure. Note: before OpenSSL 1.1.0, the I and I components were initialised. ECDSA_SIG_free() frees the B structure I. +If the argument is NULL, nothing is done. ECDSA_SIG_get0() returns internal pointers the I and I values contained in I and stores them in I<*pr> and I<*ps>, respectively. @@ -136,7 +137,7 @@ L =head1 COPYRIGHT -Copyright 2004-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2004-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/ENGINE_add.pod b/deps/openssl/openssl/doc/man3/ENGINE_add.pod index 55e5d76fcdb8c0..24c83c57647601 100644 --- a/deps/openssl/openssl/doc/man3/ENGINE_add.pod +++ b/deps/openssl/openssl/doc/man3/ENGINE_add.pod @@ -227,7 +227,8 @@ references such as; ENGINE_by_id(), ENGINE_get_first(), ENGINE_get_last(), ENGINE_get_next(), ENGINE_get_prev(). All structural references should be released by a corresponding to call to the ENGINE_free() function - the ENGINE object itself will only actually be cleaned up and deallocated when -the last structural reference is released. +the last structural reference is released. If the argument to ENGINE_free() +is NULL, nothing is done. It should also be noted that many ENGINE API function calls that accept a structural reference will internally obtain another reference - typically @@ -665,7 +666,7 @@ and should not be used. =head1 COPYRIGHT -Copyright 2002-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2002-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_ASYM_CIPHER_free.pod b/deps/openssl/openssl/doc/man3/EVP_ASYM_CIPHER_free.pod index c158ec1ae74a7e..162ad7ed7687dd 100644 --- a/deps/openssl/openssl/doc/man3/EVP_ASYM_CIPHER_free.pod +++ b/deps/openssl/openssl/doc/man3/EVP_ASYM_CIPHER_free.pod @@ -45,7 +45,7 @@ The returned value must eventually be freed with EVP_ASYM_CIPHER_free(). EVP_ASYM_CIPHER_free() decrements the reference count for the B structure. Typically this structure will have been obtained from an earlier call to EVP_ASYM_CIPHER_fetch(). If the reference count drops to 0 then the -structure is freed. +structure is freed. If the argument is NULL, nothing is done. EVP_ASYM_CIPHER_up_ref() increments the reference count for an B structure. @@ -102,7 +102,7 @@ The functions described here were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_CIPHER_meth_new.pod b/deps/openssl/openssl/doc/man3/EVP_CIPHER_meth_new.pod index 8b862d9d99c815..8638cd3009c7ea 100644 --- a/deps/openssl/openssl/doc/man3/EVP_CIPHER_meth_new.pod +++ b/deps/openssl/openssl/doc/man3/EVP_CIPHER_meth_new.pod @@ -80,6 +80,7 @@ EVP_CIPHER_meth_new() creates a new B structure. EVP_CIPHER_meth_dup() creates a copy of B. EVP_CIPHER_meth_free() destroys a B structure. +If the argument is NULL, nothing is done. EVP_CIPHER_meth_set_iv_length() sets the length of the IV. This is only needed when the implemented cipher mode requires it. @@ -249,7 +250,7 @@ counted in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_DigestInit.pod b/deps/openssl/openssl/doc/man3/EVP_DigestInit.pod index 1953df3c5e8d45..de30191b61ac05 100644 --- a/deps/openssl/openssl/doc/man3/EVP_DigestInit.pod +++ b/deps/openssl/openssl/doc/man3/EVP_DigestInit.pod @@ -157,6 +157,7 @@ Increments the reference count for an B structure. Decrements the reference count for the fetched B structure. If the reference count drops to 0 then the structure is freed. +If the argument is NULL, nothing is done. =item EVP_MD_CTX_new() @@ -170,6 +171,7 @@ existing context. =item EVP_MD_CTX_free() Cleans up digest context I and frees up the space allocated to it. +If the argument is NULL, nothing is done. =item EVP_MD_CTX_ctrl() @@ -483,7 +485,7 @@ EVP_MD_CTX_get_params() can be used with the following OSSL_PARAM keys: =over 4 -=item "micalg" (B) . +=item "micalg" (B) . Gets the digest Message Integrity Check algorithm string. This is used when creating S/MIME multipart/signed messages, as specified in RFC 3851. @@ -529,9 +531,13 @@ can be used the manipulate and test these B flags: This flag instructs the digest to optimize for one update only, if possible. -=for comment EVP_MD_CTX_FLAG_CLEANED is internal, don't mention it +=item EVP_MD_CTX_FLAG_CLEANED -=for comment EVP_MD_CTX_FLAG_REUSE is internal, don't mention it +This flag is for internal use only and I be used in user code. + +=item EVP_MD_CTX_FLAG_REUSE + +This flag is for internal use only and I be used in user code. =for comment We currently avoid documenting flags that are only bit holder: EVP_MD_CTX_FLAG_NON_FIPS_ALLOW, EVP_MD_CTX_FLAGS_PAD_* @@ -784,7 +790,7 @@ in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2000-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_EncodeInit.pod b/deps/openssl/openssl/doc/man3/EVP_EncodeInit.pod index 2b9e02e02d79f6..03c6f4e60579e7 100644 --- a/deps/openssl/openssl/doc/man3/EVP_EncodeInit.pod +++ b/deps/openssl/openssl/doc/man3/EVP_EncodeInit.pod @@ -41,7 +41,7 @@ EVP_ENCODE_CTX_new() allocates, initializes and returns a context to be used for the encode/decode functions. EVP_ENCODE_CTX_free() cleans up an encode/decode context B and frees up the -space allocated to it. +space allocated to it. If the argument is NULL, nothing is done. Encoding of binary data is performed in blocks of 48 input bytes (or less for the final block). For each 48 byte input block encoded 64 bytes of base 64 data @@ -151,7 +151,7 @@ L =head1 COPYRIGHT -Copyright 2016-2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_EncryptInit.pod b/deps/openssl/openssl/doc/man3/EVP_EncryptInit.pod index 12d7153d0fd406..f037d135c9da06 100644 --- a/deps/openssl/openssl/doc/man3/EVP_EncryptInit.pod +++ b/deps/openssl/openssl/doc/man3/EVP_EncryptInit.pod @@ -268,6 +268,7 @@ Increments the reference count for an B structure. Decrements the reference count for the fetched B structure. If the reference count drops to 0 then the structure is freed. +If the argument is NULL, nothing is done. =item EVP_CIPHER_CTX_new() @@ -276,9 +277,9 @@ Allocates and returns a cipher context. =item EVP_CIPHER_CTX_free() Clears all information from a cipher context and frees any allocated memory -associated with it, including I itself. This function should be called after -all operations using a cipher are complete so sensitive information does not -remain in memory. +associated with it, including I itself. This function should be called +after all operations using a cipher are complete so sensitive information does +not remain in memory. If the argument is NULL, nothing is done. =item EVP_CIPHER_CTX_ctrl() @@ -360,9 +361,13 @@ exists. Encrypts I bytes from the buffer I and writes the encrypted version to I. The pointers I and I may point to the same location, in which -case the encryption will be done in-place. If I and I point to different -locations, the two buffers must be disjoint, otherwise the operation might fail -or the outcome might be undefined. +case the encryption will be done in-place. However, in-place encryption is +guaranteed to work only if the encryption context (I) has processed data in +multiples of the block size. If the context contains an incomplete data block +from previous operations, in-place encryption will fail. + +If I and I point to different locations, the two buffers must be +disjoint, otherwise the operation might fail or the outcome might be undefined. This function can be called multiple times to encrypt successive blocks of data. The amount of data written depends on the block alignment of the @@ -1733,7 +1738,7 @@ The EVP_CIPHER_CTX_flags() macro was deprecated in OpenSSL 1.1.0. =head1 COPYRIGHT -Copyright 2000-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_KDF.pod b/deps/openssl/openssl/doc/man3/EVP_KDF.pod index 31d61b2a3df0a7..9447651a340e44 100644 --- a/deps/openssl/openssl/doc/man3/EVP_KDF.pod +++ b/deps/openssl/openssl/doc/man3/EVP_KDF.pod @@ -20,7 +20,7 @@ EVP_KDF_CTX_gettable_params, EVP_KDF_CTX_settable_params - EVP KDF routines typedef struct evp_kdf_st EVP_KDF; typedef struct evp_kdf_ctx_st EVP_KDF_CTX; - EVP_KDF_CTX *EVP_KDF_CTX_new(const EVP_KDF *kdf); + EVP_KDF_CTX *EVP_KDF_CTX_new(EVP_KDF *kdf); const EVP_KDF *EVP_KDF_CTX_kdf(EVP_KDF_CTX *ctx); void EVP_KDF_CTX_free(EVP_KDF_CTX *ctx); EVP_KDF_CTX *EVP_KDF_CTX_dup(const EVP_KDF_CTX *src); @@ -304,7 +304,7 @@ This functionality was added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2019-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_KEM_free.pod b/deps/openssl/openssl/doc/man3/EVP_KEM_free.pod index 575abc5f5798c3..b0ef604757d757 100644 --- a/deps/openssl/openssl/doc/man3/EVP_KEM_free.pod +++ b/deps/openssl/openssl/doc/man3/EVP_KEM_free.pod @@ -41,6 +41,7 @@ The returned value must eventually be freed with EVP_KEM_free(). EVP_KEM_free() decrements the reference count for the B structure. Typically this structure will have been obtained from an earlier call to EVP_KEM_fetch(). If the reference count drops to 0 then the structure is freed. +If the argument is NULL, nothing is done. EVP_KEM_up_ref() increments the reference count for an B structure. @@ -95,7 +96,7 @@ The functions described here were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2020-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_KEYEXCH_free.pod b/deps/openssl/openssl/doc/man3/EVP_KEYEXCH_free.pod index 272855ccb3dd81..e08f44e60c732b 100644 --- a/deps/openssl/openssl/doc/man3/EVP_KEYEXCH_free.pod +++ b/deps/openssl/openssl/doc/man3/EVP_KEYEXCH_free.pod @@ -41,7 +41,7 @@ The returned value must eventually be freed with EVP_KEYEXCH_free(). EVP_KEYEXCH_free() decrements the reference count for the B structure. Typically this structure will have been obtained from an earlier call to EVP_KEYEXCH_fetch(). If the reference count drops to 0 then the -structure is freed. +structure is freed. If the argument is NULL, nothing is done. EVP_KEYEXCH_up_ref() increments the reference count for an B structure. @@ -101,7 +101,7 @@ The functions described here were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_KEYMGMT.pod b/deps/openssl/openssl/doc/man3/EVP_KEYMGMT.pod index 455ffadce5ec64..4c0c3b776c6409 100644 --- a/deps/openssl/openssl/doc/man3/EVP_KEYMGMT.pod +++ b/deps/openssl/openssl/doc/man3/EVP_KEYMGMT.pod @@ -62,6 +62,7 @@ B I. EVP_KEYMGMT_free() decrements the reference count for the given B I, and when the count reaches zero, frees it. +If the argument is NULL, nothing is done. EVP_KEYMGMT_get0_provider() returns the provider that has this particular implementation. @@ -140,7 +141,7 @@ The functions described here were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2019-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_MD_meth_new.pod b/deps/openssl/openssl/doc/man3/EVP_MD_meth_new.pod index a553c378f3d7d8..3497973323768b 100644 --- a/deps/openssl/openssl/doc/man3/EVP_MD_meth_new.pod +++ b/deps/openssl/openssl/doc/man3/EVP_MD_meth_new.pod @@ -74,6 +74,7 @@ EVP_MD_meth_dup() creates a copy of B. EVP_MD_meth_free() decrements the reference count for the B structure. If the reference count drops to 0 then the structure is freed. +If the argument is NULL, nothing is done. EVP_MD_meth_set_input_blocksize() sets the internal input block size for the method B to B bytes. @@ -194,7 +195,7 @@ counted in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2015-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2015-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_PKEY_ASN1_METHOD.pod b/deps/openssl/openssl/doc/man3/EVP_PKEY_ASN1_METHOD.pod index cc50d363daf1e2..41f058fe54ee41 100644 --- a/deps/openssl/openssl/doc/man3/EVP_PKEY_ASN1_METHOD.pod +++ b/deps/openssl/openssl/doc/man3/EVP_PKEY_ASN1_METHOD.pod @@ -393,7 +393,7 @@ This function is not thread safe, it's recommended to only use this when initializing the application. EVP_PKEY_asn1_free() frees an existing B pointed -by B. +by B. If the argument is NULL, nothing is done. EVP_PKEY_asn1_add0() adds B to the user defined stack of methods unless another B with the same NID is @@ -439,7 +439,7 @@ parameter is now constified. =head1 COPYRIGHT -Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2017-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_PKEY_CTX_set_params.pod b/deps/openssl/openssl/doc/man3/EVP_PKEY_CTX_set_params.pod index c02151654c3a62..8947648ccbe699 100644 --- a/deps/openssl/openssl/doc/man3/EVP_PKEY_CTX_set_params.pod +++ b/deps/openssl/openssl/doc/man3/EVP_PKEY_CTX_set_params.pod @@ -23,7 +23,9 @@ The EVP_PKEY_CTX_get_params() and EVP_PKEY_CTX_set_params() functions allow transfer of arbitrary key parameters to and from providers. Not all parameters may be supported by all providers. See L for more information on providers. -See L for more information on parameters. +The I field is a pointer to a list of B structures, +terminated with a L struct. +See L for information about passing parameters. These functions must only be called after the EVP_PKEY_CTX has been initialised for use in an operation. These methods replace the EVP_PKEY_CTX_ctrl() mechanism. (EVP_PKEY_CTX_ctrl now @@ -84,7 +86,7 @@ All functions were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2020-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_PKEY_check.pod b/deps/openssl/openssl/doc/man3/EVP_PKEY_check.pod index a16fdbbd508f04..04751f0bd5c784 100644 --- a/deps/openssl/openssl/doc/man3/EVP_PKEY_check.pod +++ b/deps/openssl/openssl/doc/man3/EVP_PKEY_check.pod @@ -61,6 +61,11 @@ It is not necessary to call these functions after locally calling an approved ke generation method, but may be required for assurance purposes when receiving keys from a third party. +The EVP_PKEY_pairwise_check() and EVP_PKEY_private_check() might not be bounded +by any key size limits as private keys are not expected to be supplied by +attackers. For that reason they might take an unbounded time if run on +arbitrarily large keys. + =head1 RETURN VALUES All functions return 1 for success or others for failure. @@ -86,7 +91,7 @@ EVP_PKEY_private_check() and EVP_PKEY_pairwise_check() were added in OpenSSL 3.0 =head1 COPYRIGHT -Copyright 2006-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2006-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_PKEY_meth_new.pod b/deps/openssl/openssl/doc/man3/EVP_PKEY_meth_new.pod index db0b09f855fc4d..1b0adb2913f82a 100644 --- a/deps/openssl/openssl/doc/man3/EVP_PKEY_meth_new.pod +++ b/deps/openssl/openssl/doc/man3/EVP_PKEY_meth_new.pod @@ -407,7 +407,7 @@ of an B is always called by the EVP framework while doing a digest signing operation by calling L. EVP_PKEY_meth_free() frees an existing B pointed by -B. +B. If the argument is NULL, nothing is done. EVP_PKEY_meth_copy() copies an B object from B to B. @@ -456,7 +456,7 @@ has changed in OpenSSL 3.0 so its I parameter is now constified. =head1 COPYRIGHT -Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2017-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_RAND.pod b/deps/openssl/openssl/doc/man3/EVP_RAND.pod index 11ea807cc33050..e5f75010499cac 100644 --- a/deps/openssl/openssl/doc/man3/EVP_RAND.pod +++ b/deps/openssl/openssl/doc/man3/EVP_RAND.pod @@ -284,7 +284,7 @@ associated RAND ctx. Reads or set the number of elapsed seconds before reseeding the associated RAND ctx. -=item "max_request" (B) +=item "max_request" (B) Specifies the maximum number of bytes that can be generated in a single call to OSSL_FUNC_rand_generate. @@ -406,7 +406,7 @@ This functionality was added to OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2020-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/EVP_SIGNATURE.pod b/deps/openssl/openssl/doc/man3/EVP_SIGNATURE.pod index 1f534ef33810eb..cf476d1453152b 100644 --- a/deps/openssl/openssl/doc/man3/EVP_SIGNATURE.pod +++ b/deps/openssl/openssl/doc/man3/EVP_SIGNATURE.pod @@ -49,7 +49,7 @@ The returned value must eventually be freed with EVP_SIGNATURE_free(). EVP_SIGNATURE_free() decrements the reference count for the B structure. Typically this structure will have been obtained from an earlier call to EVP_SIGNATURE_fetch(). If the reference count drops to 0 then the -structure is freed. +structure is freed. If the argument is NULL, nothing is done. EVP_SIGNATURE_up_ref() increments the reference count for an B structure. @@ -106,7 +106,7 @@ The functions described here were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2019-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/HMAC.pod b/deps/openssl/openssl/doc/man3/HMAC.pod index 87a567242f60fa..ebe69d2db9d3d9 100644 --- a/deps/openssl/openssl/doc/man3/HMAC.pod +++ b/deps/openssl/openssl/doc/man3/HMAC.pod @@ -87,7 +87,7 @@ created with HMAC_CTX_new(). HMAC_CTX_free() erases the key and other data from the B, releases any associated resources and finally frees the B -itself. +itself. If the argument is NULL, nothing is done. The following functions may be used if the message is not completely stored in memory: @@ -163,7 +163,7 @@ OpenSSL before version 1.0.0. =head1 COPYRIGHT -Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/MD5.pod b/deps/openssl/openssl/doc/man3/MD5.pod index 2e01fe8193dd96..99bf8211601573 100644 --- a/deps/openssl/openssl/doc/man3/MD5.pod +++ b/deps/openssl/openssl/doc/man3/MD5.pod @@ -7,12 +7,12 @@ MD4_Final, MD5_Init, MD5_Update, MD5_Final - MD2, MD4, and MD5 hash functions =head1 SYNOPSIS - #include - The following functions have been deprecated since OpenSSL 3.0, and can be hidden entirely by defining B with a suitable version value, see L: + #include + unsigned char *MD2(const unsigned char *d, unsigned long n, unsigned char *md); int MD2_Init(MD2_CTX *c); @@ -20,25 +20,24 @@ see L: int MD2_Final(unsigned char *md, MD2_CTX *c); - #include - The following functions have been deprecated since OpenSSL 3.0, and can be hidden entirely by defining B with a suitable version value, see L: + #include + unsigned char *MD4(const unsigned char *d, unsigned long n, unsigned char *md); int MD4_Init(MD4_CTX *c); int MD4_Update(MD4_CTX *c, const void *data, unsigned long len); int MD4_Final(unsigned char *md, MD4_CTX *c); - - #include - The following functions have been deprecated since OpenSSL 3.0, and can be hidden entirely by defining B with a suitable version value, see L: + #include + unsigned char *MD5(const unsigned char *d, unsigned long n, unsigned char *md); int MD5_Init(MD5_CTX *c); @@ -105,7 +104,7 @@ All of these functions were deprecated in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2000-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/NCONF_new_ex.pod b/deps/openssl/openssl/doc/man3/NCONF_new_ex.pod index 6861fb198c103f..d088ab2fed29dc 100644 --- a/deps/openssl/openssl/doc/man3/NCONF_new_ex.pod +++ b/deps/openssl/openssl/doc/man3/NCONF_new_ex.pod @@ -35,7 +35,7 @@ I is set to NULL then the default value of NCONF_default() is used. NCONF_new() is similar to NCONF_new_ex() but sets the I to NULL. NCONF_free() frees the data associated with I and then frees the I -object. +object. If the argument is NULL, nothing is done. NCONF_load() parses the file named I and adds the values found to I. If an error occurs I and I list the file and line that @@ -74,7 +74,7 @@ in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2020-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OCSP_REQUEST_new.pod b/deps/openssl/openssl/doc/man3/OCSP_REQUEST_new.pod index e34e591fe01bec..3f171e822971c9 100644 --- a/deps/openssl/openssl/doc/man3/OCSP_REQUEST_new.pod +++ b/deps/openssl/openssl/doc/man3/OCSP_REQUEST_new.pod @@ -29,6 +29,7 @@ OCSP_request_onereq_get0 - OCSP request functions OCSP_REQUEST_new() allocates and returns an empty B structure. OCSP_REQUEST_free() frees up the request structure B. +If the argument is NULL, nothing is done. OCSP_request_add0_id() adds certificate ID B to B. It returns the B structure added so an application can add additional @@ -108,7 +109,7 @@ L =head1 COPYRIGHT -Copyright 2015-2016 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2015-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OCSP_cert_to_id.pod b/deps/openssl/openssl/doc/man3/OCSP_cert_to_id.pod index 298527f6bb25b7..e0fbdfa9eacbe1 100644 --- a/deps/openssl/openssl/doc/man3/OCSP_cert_to_id.pod +++ b/deps/openssl/openssl/doc/man3/OCSP_cert_to_id.pod @@ -38,6 +38,7 @@ issuer name B, issuer key hash B and serial number B. OCSP_CERTID_free() frees up B. +If the argument is NULL, nothing is done. OCSP_id_cmp() compares B B and B. @@ -79,7 +80,7 @@ L =head1 COPYRIGHT -Copyright 2015-2016 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2015-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OCSP_response_status.pod b/deps/openssl/openssl/doc/man3/OCSP_response_status.pod index 7ff74923a53f32..0902ae8a31bc1d 100644 --- a/deps/openssl/openssl/doc/man3/OCSP_response_status.pod +++ b/deps/openssl/openssl/doc/man3/OCSP_response_status.pod @@ -46,6 +46,7 @@ OCSP_response_create() creates and returns an I structure for I and optionally including basic response I. OCSP_RESPONSE_free() frees up OCSP response I. +If the argument is NULL, nothing is done. OCSP_RESPID_set_by_name() sets the name of the OCSP_RESPID to be the same as the subject name in the supplied X509 certificate I for the OCSP responder. @@ -123,7 +124,7 @@ The OCSP_basic_sign_ctx() function was added in OpenSSL 1.1.1. =head1 COPYRIGHT -Copyright 2015-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2015-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OPENSSL_LH_COMPFUNC.pod b/deps/openssl/openssl/doc/man3/OPENSSL_LH_COMPFUNC.pod index 688ef0edcb91ae..f62c57172cb747 100644 --- a/deps/openssl/openssl/doc/man3/OPENSSL_LH_COMPFUNC.pod +++ b/deps/openssl/openssl/doc/man3/OPENSSL_LH_COMPFUNC.pod @@ -123,7 +123,7 @@ Then a hash table of B> objects can be created using this: B_free>() frees the B(B>) structure I. Allocated hash table entries will not be freed; consider using B_doall>() to deallocate any remaining entries in the -hash table (see below). +hash table (see below). If the argument is NULL, nothing is done. B_flush>() empties the B(B>) structure I
    . New entries can be added to the flushed table. Allocated hash table entries @@ -299,7 +299,7 @@ type checking. =head1 COPYRIGHT -Copyright 2000-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OPENSSL_init_crypto.pod b/deps/openssl/openssl/doc/man3/OPENSSL_init_crypto.pod index c3e72d27442309..1363693c779b78 100644 --- a/deps/openssl/openssl/doc/man3/OPENSSL_init_crypto.pod +++ b/deps/openssl/openssl/doc/man3/OPENSSL_init_crypto.pod @@ -249,6 +249,7 @@ If the B flag is not included, any errors in the configuration file will cause an error return from B or indirectly L. The object can be released with OPENSSL_INIT_free() when done. +If the argument to OPENSSL_INIT_free() is NULL, nothing is done. =head1 NOTES @@ -289,7 +290,7 @@ and OPENSSL_INIT_free() functions were added in OpenSSL 1.1.0. =head1 COPYRIGHT -Copyright 2016-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OPENSSL_malloc.pod b/deps/openssl/openssl/doc/man3/OPENSSL_malloc.pod index 76751012bde5ff..20441e76ac6512 100644 --- a/deps/openssl/openssl/doc/man3/OPENSSL_malloc.pod +++ b/deps/openssl/openssl/doc/man3/OPENSSL_malloc.pod @@ -99,7 +99,8 @@ OPENSSL_zalloc() calls memset() to zero the memory before returning. OPENSSL_clear_realloc() and OPENSSL_clear_free() should be used when the buffer at B holds sensitive information. The old buffer is filled with zero's by calling OPENSSL_cleanse() -before ultimately calling OPENSSL_free(). +before ultimately calling OPENSSL_free(). If the argument to OPENSSL_free() is +NULL, nothing is done. OPENSSL_cleanse() fills B of size B with a string of 0's. Use OPENSSL_cleanse() with care if the memory is a mapping of a file. @@ -198,7 +199,7 @@ clang's memory and leak sanitizer. =head1 COPYRIGHT -Copyright 2016-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OPENSSL_secure_malloc.pod b/deps/openssl/openssl/doc/man3/OPENSSL_secure_malloc.pod index c5d4bb2dbbb8c2..1bddd7737069b5 100644 --- a/deps/openssl/openssl/doc/man3/OPENSSL_secure_malloc.pod +++ b/deps/openssl/openssl/doc/man3/OPENSSL_secure_malloc.pod @@ -82,13 +82,15 @@ If CRYPTO_secure_malloc_init() is not called, this is equivalent to calling OPENSSL_free(). It exists for consistency with OPENSSL_secure_malloc() , and is a macro that expands to CRYPTO_secure_free() and adds the C<__FILE__> -and C<__LINE__> parameters.. +and C<__LINE__> parameters.. If the argument to OPENSSL_secure_free() +is NULL, nothing is done. OPENSSL_secure_clear_free() is similar to OPENSSL_secure_free() except that it has an additional C parameter which is used to clear the memory if it was not allocated from the secure heap. If CRYPTO_secure_malloc_init() is not called, this is equivalent to -calling OPENSSL_clear_free(). +calling OPENSSL_clear_free(). If the argument to OPENSSL_secure_clear_free() +is NULL, nothing is done. OPENSSL_secure_actual_size() tells the actual size allocated to the pointer; implementations may allocate more space than initially @@ -133,7 +135,7 @@ a B in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2015-2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2015-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_CMP_CTX_new.pod b/deps/openssl/openssl/doc/man3/OSSL_CMP_CTX_new.pod index ce7db8f2f08628..cab88ae88c9102 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_CMP_CTX_new.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_CMP_CTX_new.pod @@ -176,6 +176,7 @@ the message timeout is set to 120 seconds, and the proof-of-possession method is set to OSSL_CRMF_POPO_SIGNATURE. OSSL_CMP_CTX_free() deallocates an OSSL_CMP_CTX structure. +If the argument is NULL, nothing is done. OSSL_CMP_CTX_reinit() prepares the given I for a further transaction by clearing the internal CMP transaction (aka session) status, PKIStatusInfo, @@ -312,6 +313,11 @@ RFC 4210. Allow retrieving a trust anchor from extraCerts and using that to validate the certificate chain of an IP message. + This is a quirk option added to support 3GPP TS 33.310. + + Note that using this option is dangerous as the certificate obtained + this way has not been authenticated (at least not at CMP level). + Taking it over as a trust anchor implements trust-on-first-use (TOFU). =back @@ -796,7 +802,7 @@ OSSL_CMP_CTX_reset_geninfo_ITAVs() was added in OpenSSL 3.0.8. =head1 COPYRIGHT -Copyright 2007-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2007-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_CMP_SRV_CTX_new.pod b/deps/openssl/openssl/doc/man3/OSSL_CMP_SRV_CTX_new.pod index d7f1a2e4dba7ba..12b1bfa88a1dbd 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_CMP_SRV_CTX_new.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_CMP_SRV_CTX_new.pod @@ -104,6 +104,7 @@ associated with the library context I and property query string I, both of which may be NULL to select the defaults. OSSL_CMP_SRV_CTX_free() deletes the given I. +If the argument is NULL, nothing is done. OSSL_CMP_SRV_CTX_init() sets in the given I a custom server context pointer as well as callback functions performing the specific processing of CMP @@ -158,7 +159,7 @@ The OpenSSL CMP support was added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2007-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2007-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_CMP_validate_msg.pod b/deps/openssl/openssl/doc/man3/OSSL_CMP_validate_msg.pod index 44c901210feb94..555624a4035836 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_CMP_validate_msg.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_CMP_validate_msg.pod @@ -40,11 +40,14 @@ using any trust store set via L. If the option OSSL_CMP_OPT_PERMIT_TA_IN_EXTRACERTS_FOR_IR was set by calling L, for an Initialization Response (IP) message -any self-issued certificate from the I extraCerts field may also be used -as trust anchor for the path verification of an acceptable cert if it can be +any self-issued certificate from the I extraCerts field may be used +as a trust anchor for the path verification of an 'acceptable' cert if it can be used also to validate the issued certificate returned in the IP message. This is according to TS 33.310 [Network Domain Security (NDS); Authentication Framework (AF)] document specified by the The 3rd Generation Partnership Project (3GPP). +Note that using this option is dangerous as the certificate obtained this way +has not been authenticated (at least not at CMP level). +Taking it over as a trust anchor implements trust-on-first-use (TOFU). Any cert that has been found as described above is cached and tried first when validating the signatures of subsequent messages in the same transaction. @@ -74,7 +77,7 @@ The OpenSSL CMP support was added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2007-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2007-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_DECODER.pod b/deps/openssl/openssl/doc/man3/OSSL_DECODER.pod index dcfd72bf973847..633aa07f8ffff0 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_DECODER.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_DECODER.pod @@ -61,6 +61,7 @@ I. OSSL_DECODER_free() decrements the reference count for the given I, and when the count reaches zero, frees it. +If the argument is NULL, nothing is done. OSSL_DECODER_get0_provider() returns the provider of the given I. @@ -180,7 +181,7 @@ The functions described here were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2020-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_DECODER_CTX.pod b/deps/openssl/openssl/doc/man3/OSSL_DECODER_CTX.pod index 3ffd794cf0fb38..bace3ee0cfed3f 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_DECODER_CTX.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_DECODER_CTX.pod @@ -126,6 +126,7 @@ decoders that have been added to the I so far. Parameters that an implementation doesn't recognise should be ignored by it. OSSL_DECODER_CTX_free() frees the given context I. +If the argument is NULL, nothing is done. OSSL_DECODER_CTX_add_decoder() populates the B I with a decoder, to be used to attempt to decode some encoded input. @@ -249,7 +250,7 @@ The functions described here were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2020-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_DECODER_CTX_new_for_pkey.pod b/deps/openssl/openssl/doc/man3/OSSL_DECODER_CTX_new_for_pkey.pod index acb04bc3762379..e55212ad554b9e 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_DECODER_CTX_new_for_pkey.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_DECODER_CTX_new_for_pkey.pod @@ -82,7 +82,7 @@ choice of preferred pass phrase callback form. These are called indirectly, through an internal L function. The internal L function caches the pass phrase, to -be re-used in all decodings that are performed in the same decoding run (for +be reused in all decodings that are performed in the same decoding run (for example, within one L call). =head2 Input Types @@ -135,7 +135,7 @@ The functions described here were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2020-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_ENCODER.pod b/deps/openssl/openssl/doc/man3/OSSL_ENCODER.pod index 06d8f80f881225..bbf64b0b47bf67 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_ENCODER.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_ENCODER.pod @@ -61,6 +61,7 @@ I. OSSL_ENCODER_free() decrements the reference count for the given I, and when the count reaches zero, frees it. +If the argument is NULL, nothing is done. OSSL_ENCODER_get0_provider() returns the provider of the given I. @@ -134,7 +135,7 @@ The functions described here were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2019-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_ENCODER_CTX.pod b/deps/openssl/openssl/doc/man3/OSSL_ENCODER_CTX.pod index 7f3915fda88236..e9248c356a0551 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_ENCODER_CTX.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_ENCODER_CTX.pod @@ -102,6 +102,7 @@ with an L array I. Parameters that the implementation doesn't recognise should be ignored. OSSL_ENCODER_CTX_free() frees the given context I. +If the argument is NULL, nothing is done. OSSL_ENCODER_CTX_add_encoder() populates the B I with a encoder, to be used to encode an input object. @@ -211,7 +212,7 @@ The functions described here were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2019-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_HTTP_REQ_CTX.pod b/deps/openssl/openssl/doc/man3/OSSL_HTTP_REQ_CTX.pod index 6216420e4ffe92..d6c4cdfa0ca7c2 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_HTTP_REQ_CTX.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_HTTP_REQ_CTX.pod @@ -71,6 +71,7 @@ which collects the HTTP request header lines. OSSL_HTTP_REQ_CTX_free() frees up the HTTP request context I. The I is not free'd, I will be free'd if I is set. +If the argument is NULL, nothing is done. OSSL_HTTP_REQ_CTX_set_request_line() adds the 1st HTTP request line to I. The HTTP method is determined by I, @@ -260,7 +261,7 @@ The functions described here were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2015-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2015-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_LIB_CTX.pod b/deps/openssl/openssl/doc/man3/OSSL_LIB_CTX.pod index 45fdd8f39a6afa..ad203299e986ff 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_LIB_CTX.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_LIB_CTX.pod @@ -88,7 +88,7 @@ This can be used to associate a library context with providers that are loaded from a configuration. OSSL_LIB_CTX_free() frees the given I, unless it happens to be the -default OpenSSL library context. +default OpenSSL library context. If the argument is NULL, nothing is done. OSSL_LIB_CTX_get0_global_default() returns a concrete (non NULL) reference to the global default library context. @@ -126,7 +126,7 @@ All of the functions described on this page were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_PARAM_BLD.pod b/deps/openssl/openssl/doc/man3/OSSL_PARAM_BLD.pod index 455761c20d32b9..8598ed0626e0eb 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_PARAM_BLD.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_PARAM_BLD.pod @@ -53,6 +53,7 @@ so that values can be added. Any existing values are cleared. OSSL_PARAM_BLD_free() deallocates the memory allocates by OSSL_PARAM_BLD_new(). +If the argument is NULL, nothing is done. OSSL_PARAM_BLD_to_param() converts a built up OSSL_PARAM_BLD structure I into an allocated OSSL_PARAM array. @@ -193,7 +194,7 @@ The functions described here were all added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_PARAM_dup.pod b/deps/openssl/openssl/doc/man3/OSSL_PARAM_dup.pod index 4ae33faf1e4e8f..c8d109a2278239 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_PARAM_dup.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_PARAM_dup.pod @@ -32,6 +32,7 @@ array that have the same key. OSSL_PARAM_free() frees the parameter array I that was created using OSSL_PARAM_dup(), OSSL_PARAM_merge() or OSSL_PARAM_BLD_to_param(). +If the argument to OSSL_PARAM_free() is NULL, nothing is done. =head1 RETURN VALUES @@ -49,7 +50,7 @@ The functions were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2021-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_SELF_TEST_new.pod b/deps/openssl/openssl/doc/man3/OSSL_SELF_TEST_new.pod index 4c4b10fca96ad0..c46becd1ad9b75 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_SELF_TEST_new.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_SELF_TEST_new.pod @@ -32,6 +32,7 @@ The callback I may be triggered multiple times by a self test to indicate different phases. OSSL_SELF_TEST_free() frees the space allocated by OSSL_SELF_TEST_new(). +If the argument is NULL, nothing is done. OSSL_SELF_TEST_onbegin() may be inserted at the start of a block of self test code. It can be used for diagnostic purposes. @@ -165,7 +166,7 @@ The functions described here were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2020-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_STORE_INFO.pod b/deps/openssl/openssl/doc/man3/OSSL_STORE_INFO.pod index 39bb93fbf5f21b..b8332855d4d046 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_STORE_INFO.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_STORE_INFO.pod @@ -101,6 +101,7 @@ holds if the B type (as returned by OSSL_STORE_INFO_get_type()) matches the function, otherwise NULL. OSSL_STORE_INFO_free() frees a B and its contained type. +If the argument is NULL, nothing is done. OSSL_STORE_INFO_new_NAME() , OSSL_STORE_INFO_new_PARAMS(), , OSSL_STORE_INFO_new_PUBKEY(), OSSL_STORE_INFO_new_PKEY(), @@ -221,7 +222,7 @@ The OSSL_STORE_INFO_PUBKEY object type was added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_STORE_LOADER.pod b/deps/openssl/openssl/doc/man3/OSSL_STORE_LOADER.pod index 9cd016be158a9a..b4fcc7efe9351a 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_STORE_LOADER.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_STORE_LOADER.pod @@ -105,7 +105,6 @@ see L: typedef int (*OSSL_STORE_close_fn)(OSSL_STORE_LOADER_CTX *ctx); int OSSL_STORE_LOADER_set_close(OSSL_STORE_LOADER *store_loader, OSSL_STORE_close_fn store_close_function); - void OSSL_STORE_LOADER_free(OSSL_STORE_LOADER *store_loader); int OSSL_STORE_register_loader(OSSL_STORE_LOADER *loader); OSSL_STORE_LOADER *OSSL_STORE_unregister_loader(const char *scheme); @@ -126,6 +125,7 @@ I. OSSL_STORE_LOADER_free() decrements the reference count for the given I, and when the count reaches zero, frees it. +If the argument is NULL, nothing is done. OSSL_STORE_LOADER_get0_provider() returns the provider of the given I. @@ -297,6 +297,7 @@ OSSL_STORE_LOADER_set_close() sets the closing function for the I. OSSL_STORE_LOADER_free() frees the given I. +If the argument is NULL, nothing is done. OSSL_STORE_register_loader() register the given I and thereby makes it available for use with OSSL_STORE_open(), @@ -358,21 +359,25 @@ L =head1 HISTORY OSSL_STORE_LOADER_fetch(), OSSL_STORE_LOADER_up_ref(), -OSSL_STORE_LOADER_free(), OSSL_STORE_LOADER_get0_provider(), -OSSL_STORE_LOADER_get0_properties(), OSSL_STORE_LOADER_is_a(), -OSSL_STORE_LOADER_do_all_provided() and -OSSL_STORE_LOADER_names_do_all() were added in OpenSSL 3.0. +OSSL_STORE_LOADER_get0_provider(), OSSL_STORE_LOADER_get0_properties(), +OSSL_STORE_LOADER_get0_description(), OSSL_STORE_LOADER_is_a(), +OSSL_STORE_LOADER_do_all_provided() and OSSL_STORE_LOADER_names_do_all() +were added in OpenSSL 3.0. -OSSL_STORE_open_ex_fn() was added in OpenSSL 3.0. +B and OSSL_STORE_LOADER_free() were added in OpenSSL +1.1.1. -B, B, OSSL_STORE_LOADER_new(), +OSSL_STORE_LOADER_set_open_ex() and OSSL_STORE_open_ex_fn() were added in +OpenSSL 3.0, and are deprecated. + +B, OSSL_STORE_LOADER_new(), OSSL_STORE_LOADER_set0_scheme(), OSSL_STORE_LOADER_get0_scheme(), OSSL_STORE_LOADER_get0_engine(), OSSL_STORE_LOADER_set_expect(), OSSL_STORE_LOADER_set_find(), OSSL_STORE_LOADER_set_attach(), OSSL_STORE_LOADER_set_open_ex(), OSSL_STORE_LOADER_set_open(), OSSL_STORE_LOADER_set_ctrl(), OSSL_STORE_LOADER_set_load(), OSSL_STORE_LOADER_set_eof(), -OSSL_STORE_LOADER_set_close(), OSSL_STORE_LOADER_free(), +OSSL_STORE_LOADER_set_close(), OSSL_STORE_register_loader(), OSSL_STORE_LOADER_set_error(), OSSL_STORE_unregister_loader(), OSSL_STORE_open_fn(), OSSL_STORE_ctrl_fn(), OSSL_STORE_load_fn(), OSSL_STORE_eof_fn() and OSSL_STORE_close_fn() @@ -380,7 +385,7 @@ were added in OpenSSL 1.1.1, and became deprecated in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/OSSL_STORE_SEARCH.pod b/deps/openssl/openssl/doc/man3/OSSL_STORE_SEARCH.pod index 79186b08997e01..bd512890c607c0 100644 --- a/deps/openssl/openssl/doc/man3/OSSL_STORE_SEARCH.pod +++ b/deps/openssl/openssl/doc/man3/OSSL_STORE_SEARCH.pod @@ -75,6 +75,7 @@ criterion, so they must have at least the same life time as the created B. OSSL_STORE_SEARCH_free() is used to free the B. +If the argument is NULL, nothing is done. =head2 Loader Functions @@ -183,7 +184,7 @@ were added in OpenSSL 1.1.1. =head1 COPYRIGHT -Copyright 2018-2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2018-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/PEM_read_bio_PrivateKey.pod b/deps/openssl/openssl/doc/man3/PEM_read_bio_PrivateKey.pod index ac93920addec3c..f1635b89808a60 100644 --- a/deps/openssl/openssl/doc/man3/PEM_read_bio_PrivateKey.pod +++ b/deps/openssl/openssl/doc/man3/PEM_read_bio_PrivateKey.pod @@ -320,7 +320,9 @@ NULL but I<*x> is NULL then the structure returned will be written to I<*x>. If neither I nor I<*x> is NULL then an attempt is made to reuse the structure at I<*x> (but see BUGS and EXAMPLES sections). Irrespective of the value of I a pointer to the structure is always -returned (or NULL if an error occurred). +returned (or NULL if an error occurred). The caller retains ownership of the +returned object and needs to free it when it is no longer needed, e.g. +using X509_free() for X509 objects or EVP_PKEY_free() for EVP_PKEY objects. The PEM functions which write private keys take an I parameter which specifies the encryption algorithm to use, encryption is done @@ -574,7 +576,7 @@ PEM_write_bio_DHparams() and PEM_write_DHparams() were deprecated in 3.0. =head1 COPYRIGHT -Copyright 2001-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2001-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/RAND_set_DRBG_type.pod b/deps/openssl/openssl/doc/man3/RAND_set_DRBG_type.pod index f78c15ff45f3f8..423ebfad656f40 100644 --- a/deps/openssl/openssl/doc/man3/RAND_set_DRBG_type.pod +++ b/deps/openssl/openssl/doc/man3/RAND_set_DRBG_type.pod @@ -27,7 +27,7 @@ private random instances. RAND_set_seed_source_type() specifies the seed source that will be used within the library context I. The seed source of name I with properties I will be fetched and used to seed the primary -random big generator. +random bit generator. =head1 RETURN VALUES @@ -54,7 +54,7 @@ These functions were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2021-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/RSA_meth_new.pod b/deps/openssl/openssl/doc/man3/RSA_meth_new.pod index 29ea4161b0b535..40f9bc4e82810f 100644 --- a/deps/openssl/openssl/doc/man3/RSA_meth_new.pod +++ b/deps/openssl/openssl/doc/man3/RSA_meth_new.pod @@ -147,7 +147,7 @@ passed as a parameter. This might be useful for creating a new B based on an existing one, but with some differences. RSA_meth_free() destroys an B structure and frees up any -memory associated with it. +memory associated with it. If the argument is NULL, nothing is done. RSA_meth_get0_name() will return a pointer to the name of this RSA_METHOD. This is a pointer to the internal name string and so @@ -260,7 +260,7 @@ Other functions described here were added in OpenSSL 1.1.0. =head1 COPYRIGHT -Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/SCT_new.pod b/deps/openssl/openssl/doc/man3/SCT_new.pod index 235762721992b1..a20affd38a8412 100644 --- a/deps/openssl/openssl/doc/man3/SCT_new.pod +++ b/deps/openssl/openssl/doc/man3/SCT_new.pod @@ -166,6 +166,12 @@ SCT_set_source() can be used to record where the SCT was found (TLS extension, X.509 certificate extension or OCSP response). This is not required for verifying the SCT. +SCT_free() frees the specified SCT. +If the argument is NULL, nothing is done. + +SCT_LIST_free() frees the specified stack of SCTs. +If the argument is NULL, nothing is done. + =head1 NOTES Some of the setters return int, instead of void. These will all return 1 on @@ -210,7 +216,7 @@ These functions were added in OpenSSL 1.1.0. =head1 COPYRIGHT -Copyright 2016-2017 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/SSL_CIPHER_get_name.pod b/deps/openssl/openssl/doc/man3/SSL_CIPHER_get_name.pod index 7f00f09d67f8ff..a55ad4d980f9b9 100644 --- a/deps/openssl/openssl/doc/man3/SSL_CIPHER_get_name.pod +++ b/deps/openssl/openssl/doc/man3/SSL_CIPHER_get_name.pod @@ -120,7 +120,7 @@ cipher B. SSL_CIPHER_description() returns a textual description of the cipher used into the buffer B of length B provided. If B is provided, it -must be at least 128 bytes, otherwise a buffer will be allocated using +must be at least 128 bytes. If B is NULL it will be allocated using OPENSSL_malloc(). If the provided buffer is too small, or the allocation fails, B is returned. @@ -216,7 +216,7 @@ The SSL_CIPHER_get_prf_nid() function was added in OpenSSL 3.0.0. =head1 COPYRIGHT -Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/SSL_CTX_set_alpn_select_cb.pod b/deps/openssl/openssl/doc/man3/SSL_CTX_set_alpn_select_cb.pod index 102e6578512c1b..2997a995575677 100644 --- a/deps/openssl/openssl/doc/man3/SSL_CTX_set_alpn_select_cb.pod +++ b/deps/openssl/openssl/doc/man3/SSL_CTX_set_alpn_select_cb.pod @@ -52,7 +52,8 @@ SSL_select_next_proto, SSL_get0_alpn_selected, SSL_get0_next_proto_negotiated SSL_CTX_set_alpn_protos() and SSL_set_alpn_protos() are used by the client to set the list of protocols available to be negotiated. The B must be in protocol-list format, described below. The length of B is specified in -B. +B. Setting B to 0 clears any existing list of ALPN +protocols and no ALPN extension will be sent to the server. SSL_CTX_set_alpn_select_cb() sets the application callback B used by a server to select which protocol to use for the incoming connection. When B @@ -73,9 +74,16 @@ B and B, B must be in the protocol-list format described below. The first item in the B, B list that matches an item in the B, B list is selected, and returned in B, B. The B value will point into either B or -B, so it should be copied immediately. If no match is found, the first -item in B, B is returned in B, B. This -function can also be used in the NPN callback. +B, so it should be copied immediately. The client list must include at +least one valid (nonempty) protocol entry in the list. + +The SSL_select_next_proto() helper function can be useful from either the ALPN +callback or the NPN callback (described below). If no match is found, the first +item in B, B is returned in B, B and +B is returned. This can be useful when implementing +the NPN callback. In the ALPN case, the value returned in B and B +must be ignored if B has been returned from +SSL_select_next_proto(). SSL_CTX_set_next_proto_select_cb() sets a callback B that is called when a client needs to select a protocol from the server's provided list, and a @@ -85,9 +93,10 @@ must be set to point to the selected protocol (which may be within B). The length of the protocol name must be written into B. The server's advertised protocols are provided in B and B. The callback can assume that B is syntactically valid. The client must -select a protocol. It is fatal to the connection if this callback returns -a value other than B. The B parameter is the pointer -set via SSL_CTX_set_next_proto_select_cb(). +select a protocol (although it may be an empty, zero length protocol). It is +fatal to the connection if this callback returns a value other than +B or if the zero length protocol is selected. The B +parameter is the pointer set via SSL_CTX_set_next_proto_select_cb(). SSL_CTX_set_next_protos_advertised_cb() sets a callback B that is called when a TLS server needs a list of supported protocols for Next Protocol @@ -149,7 +158,8 @@ A match was found and is returned in B, B. =item OPENSSL_NPN_NO_OVERLAP No match was found. The first item in B, B is returned in -B, B. +B, B (or B and 0 in the case where the first entry in +B is invalid). =back @@ -187,7 +197,7 @@ L =head1 COPYRIGHT -Copyright 2016-2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/SSL_CTX_set_cert_store.pod b/deps/openssl/openssl/doc/man3/SSL_CTX_set_cert_store.pod index f1fef9e649cd1b..246f413136b64a 100644 --- a/deps/openssl/openssl/doc/man3/SSL_CTX_set_cert_store.pod +++ b/deps/openssl/openssl/doc/man3/SSL_CTX_set_cert_store.pod @@ -16,7 +16,9 @@ SSL_CTX_set_cert_store, SSL_CTX_set1_cert_store, SSL_CTX_get_cert_store - manipu SSL_CTX_set_cert_store() sets/replaces the certificate verification storage of B to/with B. If another X509_STORE object is currently -set in B, it will be X509_STORE_free()ed. +set in B, it will be X509_STORE_free()ed. SSL_CTX_set_cert_store() will +take ownership of the B, i.e., the call C is no +longer needed. SSL_CTX_set1_cert_store() sets/replaces the certificate verification storage of B to/with B. The B's reference count is incremented. @@ -79,7 +81,7 @@ L =head1 COPYRIGHT -Copyright 2001-2016 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2001-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/SSL_CTX_set_cipher_list.pod b/deps/openssl/openssl/doc/man3/SSL_CTX_set_cipher_list.pod index 71f399400c2a12..08d7693f420edf 100644 --- a/deps/openssl/openssl/doc/man3/SSL_CTX_set_cipher_list.pod +++ b/deps/openssl/openssl/doc/man3/SSL_CTX_set_cipher_list.pod @@ -52,7 +52,7 @@ ciphersuite names in order of preference. Valid TLSv1.3 ciphersuite names are: =back -An empty list is permissible. The default value for the this setting is: +An empty list is permissible. The default value for this setting is: "TLS_AES_256_GCM_SHA384:TLS_CHACHA20_POLY1305_SHA256:TLS_AES_128_GCM_SHA256" @@ -119,7 +119,7 @@ OSSL_default_cipher_list() and OSSL_default_ciphersites() are new in 3.0. =head1 COPYRIGHT -Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/SSL_CTX_set_tlsext_ticket_key_cb.pod b/deps/openssl/openssl/doc/man3/SSL_CTX_set_tlsext_ticket_key_cb.pod index f289383c781521..e4871590f719fe 100644 --- a/deps/openssl/openssl/doc/man3/SSL_CTX_set_tlsext_ticket_key_cb.pod +++ b/deps/openssl/openssl/doc/man3/SSL_CTX_set_tlsext_ticket_key_cb.pod @@ -126,9 +126,9 @@ The I key material can be set using L. =head1 NOTES -Session resumption shortcuts the TLS so that the client certificate -negotiation don't occur. It makes up for this by storing client certificate -an all other negotiated state information encrypted within the ticket. In a +Session resumption shortcuts the TLS handshake so that the client certificate +negotiation doesn't occur. It makes up for this by storing the client certificate +and all other negotiated state information encrypted within the ticket. In a resumed session the applications will have all this state information available exactly as if a full negotiation had occurred. @@ -241,7 +241,7 @@ OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2014-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2014-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/SSL_CTX_set_verify.pod b/deps/openssl/openssl/doc/man3/SSL_CTX_set_verify.pod index 9d4abac30eba53..346aa8452974df 100644 --- a/deps/openssl/openssl/doc/man3/SSL_CTX_set_verify.pod +++ b/deps/openssl/openssl/doc/man3/SSL_CTX_set_verify.pod @@ -144,6 +144,9 @@ B ignored (see BUGS) If the B is SSL_VERIFY_NONE none of the other flags may be set. +If verification flags are not modified explicitly by C +or C, the default value will be SSL_VERIFY_NONE. + The actual verification procedure is performed either using the built-in verification procedure or using another application provided verification function set with @@ -363,7 +366,7 @@ and SSL_set_post_handshake_auth() functions were added in OpenSSL 1.1.1. =head1 COPYRIGHT -Copyright 2000-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/SSL_CTX_use_certificate.pod b/deps/openssl/openssl/doc/man3/SSL_CTX_use_certificate.pod index ca1827dada8a8d..dd6f831b8658cd 100644 --- a/deps/openssl/openssl/doc/man3/SSL_CTX_use_certificate.pod +++ b/deps/openssl/openssl/doc/man3/SSL_CTX_use_certificate.pod @@ -68,7 +68,7 @@ SSL_use_certificate() loads B into B. The rest of the certificates needed to form the complete certificate chain can be specified using the L -function. +function. On success the reference counter of the B is incremented. SSL_CTX_use_certificate_ASN1() loads the ASN1 encoded certificate from the memory location B (with length B) into B, @@ -97,6 +97,7 @@ to the certificate an error is returned. To change a [certificate/private-key] pair, the new certificate needs to be set first with SSL_use_certificate() or SSL_CTX_use_certificate() before setting the private key with SSL_CTX_use_PrivateKey() or SSL_use_PrivateKey(). +On success the reference counter of the B/B is incremented. SSL_CTX_use_cert_and_key() and SSL_use_cert_and_key() assign the X.509 certificate B, private key B, and certificate B onto the @@ -195,7 +196,7 @@ L =head1 COPYRIGHT -Copyright 2000-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/SSL_load_client_CA_file.pod b/deps/openssl/openssl/doc/man3/SSL_load_client_CA_file.pod index 988c7e89340755..08a6c15e46d965 100644 --- a/deps/openssl/openssl/doc/man3/SSL_load_client_CA_file.pod +++ b/deps/openssl/openssl/doc/man3/SSL_load_client_CA_file.pod @@ -54,7 +54,8 @@ it is not limited to CA certificates. =head1 RETURN VALUES -The following return values can occur: +The following return values can occur for SSL_load_client_CA_file_ex(), and +SSL_load_client_CA_file(): =over 4 @@ -68,6 +69,21 @@ Pointer to the subject names of the successfully read certificates. =back +The following return values can occur for SSL_add_file_cert_subjects_to_stack(), +SSL_add_dir_cert_subjects_to_stack(), and SSL_add_store_cert_subjects_to_stack(): + +=over 4 + +=item 0 (Failure) + +The operation failed. + +=item 1 (Success) + +The operation succeeded. + +=back + =head1 EXAMPLES Load names of CAs from file and use it as a client CA list: @@ -96,7 +112,7 @@ were added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2000-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2000-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/TS_RESP_CTX_new.pod b/deps/openssl/openssl/doc/man3/TS_RESP_CTX_new.pod index 725a1921d133ed..01fd23a10eb44f 100644 --- a/deps/openssl/openssl/doc/man3/TS_RESP_CTX_new.pod +++ b/deps/openssl/openssl/doc/man3/TS_RESP_CTX_new.pod @@ -27,6 +27,7 @@ and property query to NULL. This results in the default (NULL) library context being used for any operations requiring algorithm fetches. TS_RESP_CTX_free() frees the B object I. +If the argument is NULL, nothing is done. =head1 RETURN VALUES @@ -39,7 +40,7 @@ The function TS_RESP_CTX_new_ex() was added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2021-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/X509V3_get_d2i.pod b/deps/openssl/openssl/doc/man3/X509V3_get_d2i.pod index 4a2e81b0dbdff9..88294ff710f2e0 100644 --- a/deps/openssl/openssl/doc/man3/X509V3_get_d2i.pod +++ b/deps/openssl/openssl/doc/man3/X509V3_get_d2i.pod @@ -108,6 +108,7 @@ The function X509V3_get_d2i() and its variants will return NULL if the extension is not found, occurs multiple times or cannot be decoded. It is possible to determine the precise reason by checking the value of I<*crit>. +The returned pointer must be explicitly freed. The function X509V3_add1_i2d() and its variants allocate B objects on STACK I<*x> depending on I. The B objects @@ -236,7 +237,7 @@ L =head1 COPYRIGHT -Copyright 2015-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2015-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/X509_LOOKUP.pod b/deps/openssl/openssl/doc/man3/X509_LOOKUP.pod index 4d2fe38f25ab2a..8f0240de2d13b7 100644 --- a/deps/openssl/openssl/doc/man3/X509_LOOKUP.pod +++ b/deps/openssl/openssl/doc/man3/X509_LOOKUP.pod @@ -85,6 +85,7 @@ X509_LOOKUP_shutdown() tears down the internal state and resources of the given B. X509_LOOKUP_free() destructs the given B. +If the argument is NULL, nothing is done. X509_LOOKUP_set_method_data() and X509_LOOKUP_get_method_data() associates and retrieves a pointer to application data to and from the @@ -228,7 +229,7 @@ added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2020-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/X509_LOOKUP_meth_new.pod b/deps/openssl/openssl/doc/man3/X509_LOOKUP_meth_new.pod index 2021749935631b..ef2c3cdd500ad5 100644 --- a/deps/openssl/openssl/doc/man3/X509_LOOKUP_meth_new.pod +++ b/deps/openssl/openssl/doc/man3/X509_LOOKUP_meth_new.pod @@ -110,6 +110,7 @@ be given a human-readable string containing a brief description of the lookup method. X509_LOOKUP_meth_free() destroys a B structure. +If the argument is NULL, nothing is done. X509_LOOKUP_get_new_item() and X509_LOOKUP_set_new_item() get and set the function that is called when an B object is created with @@ -186,7 +187,7 @@ The functions described here were added in OpenSSL 1.1.0i. =head1 COPYRIGHT -Copyright 2018-2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2018-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/X509_STORE_new.pod b/deps/openssl/openssl/doc/man3/X509_STORE_new.pod index 31d466faa42a7e..5b3dadb243d709 100644 --- a/deps/openssl/openssl/doc/man3/X509_STORE_new.pod +++ b/deps/openssl/openssl/doc/man3/X509_STORE_new.pod @@ -27,6 +27,7 @@ X509_STORE_lock() locks the store from modification by other threads, X509_STORE_unlock() unlocks it. X509_STORE_free() frees up a single X509_STORE object. +If the argument is NULL, nothing is done. =head1 RETURN VALUES @@ -49,7 +50,7 @@ functions were added in OpenSSL 1.1.0. =head1 COPYRIGHT -Copyright 2016-2020 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/X509_dup.pod b/deps/openssl/openssl/doc/man3/X509_dup.pod index 849364e2aae700..c8dd79ff71b73e 100644 --- a/deps/openssl/openssl/doc/man3/X509_dup.pod +++ b/deps/openssl/openssl/doc/man3/X509_dup.pod @@ -367,7 +367,7 @@ followed by I, which re-builds the cached data. B_free>() releases the object and all pointers and sub-objects -within it. +within it. If the argument is NULL, nothing is done. B_print_ctx>() prints the object I on the specified BIO I. Each line will be prefixed with I spaces. diff --git a/deps/openssl/openssl/doc/man3/X509_new.pod b/deps/openssl/openssl/doc/man3/X509_new.pod index ea2b3a2cc9b0dc..4b52972554dee9 100644 --- a/deps/openssl/openssl/doc/man3/X509_new.pod +++ b/deps/openssl/openssl/doc/man3/X509_new.pod @@ -18,7 +18,7 @@ X509_chain_up_ref - X509 certificate ASN1 allocation functions =head1 DESCRIPTION -The X509 ASN1 allocation routines, allocate and free an +The X509 ASN1 allocation routines allocate and free an X509 structure, which represents an X509 certificate. X509_new_ex() allocates and initializes a X509 structure with a @@ -33,7 +33,8 @@ and property query to NULL. This results in the default (NULL) library context being used for any X509 operations requiring algorithm fetches. X509_free() decrements the reference count of B structure B and -frees it up if the reference count is zero. If B is NULL nothing is done. +frees it up if the reference count is zero. If the argument is NULL, +nothing is done. X509_up_ref() increments the reference count of B. @@ -86,7 +87,7 @@ The function X509_new_ex() was added in OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2002-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2002-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man3/d2i_X509.pod b/deps/openssl/openssl/doc/man3/d2i_X509.pod index 00efb603581661..c4b589dd89576e 100644 --- a/deps/openssl/openssl/doc/man3/d2i_X509.pod +++ b/deps/openssl/openssl/doc/man3/d2i_X509.pod @@ -387,7 +387,9 @@ B>() attempts to decode I bytes at I<*ppin>. If successful a pointer to the B> structure is returned and I<*ppin> is incremented to the byte following the parsed data. If I is not NULL then a pointer to the returned structure is also written to I<*a>. If an error occurred -then NULL is returned. +then NULL is returned. The caller retains ownership of the +returned object and needs to free it when it is no longer needed, e.g. +using X509_free() for X509 objects or DSA_SIG_free() for DSA_SIG objects. On a successful return, if I<*a> is not NULL then it is assumed that I<*a> contains a valid B> structure and an attempt is made to reuse it. @@ -615,7 +617,7 @@ efficiency reasons. =head1 COPYRIGHT -Copyright 1998-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 1998-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man7/EVP_KEYEXCH-DH.pod b/deps/openssl/openssl/doc/man7/EVP_KEYEXCH-DH.pod index a6927afefb24e9..eaec67775ddb7f 100644 --- a/deps/openssl/openssl/doc/man7/EVP_KEYEXCH-DH.pod +++ b/deps/openssl/openssl/doc/man7/EVP_KEYEXCH-DH.pod @@ -7,9 +7,14 @@ EVP_KEYEXCH-DH =head1 DESCRIPTION -Key exchange support for the B key type. +Key exchange support for the B and B key types. -=head2 DH key exchange parameters +Please note that although both key types support the same key exchange +operations, they cannot be used together in a single key exchange. It +is not possible to use a private key of the B type in key exchange +with the public key of B type and vice versa. + +=head2 DH and DHX key exchange parameters =over 4 @@ -122,7 +127,7 @@ L, =head1 COPYRIGHT -Copyright 2020-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man7/EVP_PKEY-DH.pod b/deps/openssl/openssl/doc/man7/EVP_PKEY-DH.pod index 1dfc9c7345442f..1232730c33647c 100644 --- a/deps/openssl/openssl/doc/man7/EVP_PKEY-DH.pod +++ b/deps/openssl/openssl/doc/man7/EVP_PKEY-DH.pod @@ -7,11 +7,12 @@ EVP_PKEY-DH, EVP_PKEY-DHX, EVP_KEYMGMT-DH, EVP_KEYMGMT-DHX =head1 DESCRIPTION -For B FFC key agreement, two classes of domain parameters can be used: -"safe" domain parameters that are associated with approved named safe-prime -groups, and a class of "FIPS186-type" domain parameters. FIPS186-type domain -parameters should only be used for backward compatibility with existing -applications that cannot be upgraded to use the approved safe-prime groups. +For finite field Diffie-Hellman key agreement, two classes of domain +parameters can be used: "safe" domain parameters that are associated with +approved named safe-prime groups, and a class of "FIPS186-type" domain +parameters. FIPS186-type domain parameters should only be used for backward +compatibility with existing applications that cannot be upgraded to use the +approved safe-prime groups. See L for more information about FFC keys. @@ -20,11 +21,11 @@ I value. The B key type uses X9.42 format which saves the value of I and this must be used for FIPS186-4. If key validation is required, users should be aware of the nuances associated with FIPS186-4 style parameters as discussed in -L. +L. =head2 DH and DHX domain parameters -In addition to the common FCC parameters that all FFC keytypes should support +In addition to the common FFC parameters that all FFC keytypes should support (see L) the B and B keytype implementations support the following: @@ -129,43 +130,44 @@ Where s is the security strength of the key which has values of =back -=head2 DH key validation +=head2 DH and DHX key validation -For B that is not a named group the FIPS186-4 standard specifies that the +For keys that are not a named group the FIPS186-4 standard specifies that the values used for FFC parameter generation are also required for parameter validation. This means that optional FFC domain parameter values for I, I and I or I may need to be stored for validation purposes. For B the I and I can be stored in ASN1 data (but the I or I cannot be stored). It is recommended to use a -named safe prime group instead. +B parameters with named safe prime group instead. -For DH keys, L behaves in the following way: -The OpenSSL FIPS provider tests if the parameters are either an approved safe -prime group OR that the FFC parameters conform to FIPS186-4 as defined in -SP800-56Ar3 I. -The OpenSSL default provider uses simpler checks that allows there to be no I -value for backwards compatibility. +With the OpenSSL FIPS provider, L and +L behave in the following way: the parameters +are tested if they are either an approved safe prime group OR that the FFC +parameters conform to FIPS186-4 as defined in SP800-56Ar3 I. -For DH keys, L is equivalent to -L. +The OpenSSL default provider uses simpler checks that allows there to be no I +value for backwards compatibility, however the L will +test the I

    value for being a prime (and a safe prime if I is missing) +which can take significant time. The L avoids +the prime tests. -For DH keys, L conforms to -SP800-56Ar3 I. +L conforms to SP800-56Ar3 +I. -For DH keys, L conforms to -SP800-56Ar3 I when the -DH key is an approved named safe prime group, otherwise it is the same as -L. +L conforms to SP800-56Ar3 +I when the key is an approved named safe +prime group, otherwise it is the same as L. -For DH Keys, L tests that the private key is in the -correct range according to SP800-56Ar3. The OpenSSL FIPS provider requires the -value of I to be set (note that this is set for named safe prime groups). +L tests that the private key is in the correct range +according to SP800-56Ar3. The OpenSSL FIPS provider requires the value of I +to be set (note that this is implicitly set for named safe prime groups). For backwards compatibility the OpenSSL default provider only requires I

    to be set. -For DH keys, L conforms to -SP800-56Ar3 I. +L conforms to SP800-56Ar3 +I. =head1 EXAMPLES @@ -327,7 +329,7 @@ L =head1 COPYRIGHT -Copyright 2020-2022 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man7/EVP_PKEY-SM2.pod b/deps/openssl/openssl/doc/man7/EVP_PKEY-SM2.pod index 8bdc506cec21f7..28a0e995d5d1a6 100644 --- a/deps/openssl/openssl/doc/man7/EVP_PKEY-SM2.pod +++ b/deps/openssl/openssl/doc/man7/EVP_PKEY-SM2.pod @@ -38,6 +38,9 @@ Getter that returns the default digest name. B signatures can be generated by using the 'DigestSign' series of APIs, for instance, EVP_DigestSignInit(), EVP_DigestSignUpdate() and EVP_DigestSignFinal(). Ditto for the verification process by calling the 'DigestVerify' series of APIs. +Note that the SM2 algorithm requires the presence of the public key for signatures, +as such the B option must be set on any key used in signature +generation. Before computing an B signature, an B needs to be created, and an B ID must be set for it, like this: @@ -84,7 +87,7 @@ L =head1 COPYRIGHT -Copyright 2018-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2018-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man7/migration_guide.pod b/deps/openssl/openssl/doc/man7/migration_guide.pod index 61641324a7fc9d..e5ab29b95370b2 100644 --- a/deps/openssl/openssl/doc/man7/migration_guide.pod +++ b/deps/openssl/openssl/doc/man7/migration_guide.pod @@ -136,6 +136,14 @@ To ensure the future compatibility, the engines should be turned to providers. To prefer the provider-based hardware offload, you can specify the default properties to prefer your provider. +Setting engine-based or application-based default low-level crypto method such +as B or B is still possible and keys inside the +default provider will use the engine-based implementation for the crypto +operations. However Bs created by decoding by using B, +B or B APIs will be provider-based. To create a fully legacy +Bs L, L or similar +functions must be used. + =head3 Versioning Scheme The OpenSSL versioning scheme has changed with the OpenSSL 3.0 release. The new @@ -1298,7 +1306,7 @@ d2i_DSAPrivateKey_bio(), d2i_DSAPrivateKey_fp(), d2i_DSA_PUBKEY(), d2i_DSA_PUBKEY_bio(), d2i_DSA_PUBKEY_fp(), d2i_DSAPublicKey(), d2i_ECParameters(), d2i_ECPrivateKey(), d2i_ECPrivateKey_bio(), d2i_ECPrivateKey_fp(), d2i_EC_PUBKEY(), d2i_EC_PUBKEY_bio(), -d2i_EC_PUBKEY_fp(), o2i_ECPublicKey(), d2i_RSAPrivateKey(), +d2i_EC_PUBKEY_fp(), d2i_RSAPrivateKey(), d2i_RSAPrivateKey_bio(), d2i_RSAPrivateKey_fp(), d2i_RSA_PUBKEY(), d2i_RSA_PUBKEY_bio(), d2i_RSA_PUBKEY_fp(), d2i_RSAPublicKey(), d2i_RSAPublicKey_bio(), d2i_RSAPublicKey_fp() @@ -1307,6 +1315,13 @@ See L =item * +o2i_ECPublicKey() + +Use L. +See L + +=item * + DES_crypt(), DES_fcrypt(), DES_encrypt1(), DES_encrypt2(), DES_encrypt3(), DES_decrypt3(), DES_ede3_cbc_encrypt(), DES_ede3_cfb64_encrypt(), DES_ede3_cfb_encrypt(),DES_ede3_ofb64_encrypt(), @@ -1857,13 +1872,20 @@ and L i2d_ECParameters(), i2d_ECPrivateKey(), i2d_ECPrivateKey_bio(), i2d_ECPrivateKey_fp(), i2d_EC_PUBKEY(), i2d_EC_PUBKEY_bio(), -i2d_EC_PUBKEY_fp(), i2o_ECPublicKey() +i2d_EC_PUBKEY_fp() See L and L =item * +i2o_ECPublicKey() + +Use L. +See L + +=item * + i2d_RSAPrivateKey(), i2d_RSAPrivateKey_bio(), i2d_RSAPrivateKey_fp(), i2d_RSA_PUBKEY(), i2d_RSA_PUBKEY_bio(), i2d_RSA_PUBKEY_fp(), i2d_RSAPublicKey(), i2d_RSAPublicKey_bio(), i2d_RSAPublicKey_fp() @@ -2462,7 +2484,7 @@ The migration guide was created for OpenSSL 3.0. =head1 COPYRIGHT -Copyright 2021-2023 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2021-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/doc/man7/ossl_store.pod b/deps/openssl/openssl/doc/man7/ossl_store.pod index 3152cff104240a..d6fdae8f22b0f5 100644 --- a/deps/openssl/openssl/doc/man7/ossl_store.pod +++ b/deps/openssl/openssl/doc/man7/ossl_store.pod @@ -44,7 +44,11 @@ other encoding is undefined. =head2 A generic call - OSSL_STORE_CTX *ctx = OSSL_STORE_open("file:/foo/bar/data.pem"); + #include /* for UI_get_default_method */ + #include + + OSSL_STORE_CTX *ctx = OSSL_STORE_open("file:/foo/bar/data.pem", + UI_get_default_method(), NULL, NULL, NULL); /* * OSSL_STORE_eof() simulates file semantics for any repository to signal @@ -65,6 +69,7 @@ other encoding is undefined. PEM_write_X509(stdout, OSSL_STORE_INFO_get0_CERT(info)); break; } + OSSL_STORE_INFO_free(info); } OSSL_STORE_close(ctx); @@ -77,7 +82,7 @@ L =head1 COPYRIGHT -Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. +Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. Licensed under the Apache License 2.0 (the "License"). You may not use this file except in compliance with the License. You can obtain a copy diff --git a/deps/openssl/openssl/e_os.h b/deps/openssl/openssl/e_os.h index db05b7f8150f89..72eab92eeb4b89 100644 --- a/deps/openssl/openssl/e_os.h +++ b/deps/openssl/openssl/e_os.h @@ -1,5 +1,5 @@ /* - * Copyright 1995-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -296,20 +296,18 @@ static ossl_inline void ossl_sleep(unsigned long millis) ts.tv_sec = (long int) (millis / 1000); ts.tv_nsec = (long int) (millis % 1000) * 1000000ul; nanosleep(&ts, NULL); -# elif defined(__TANDEM) -# if !defined(_REENTRANT) +# elif defined(__TANDEM) && !defined(_REENTRANT) # include + /* HPNS does not support usleep for non threaded apps */ PROCESS_DELAY_(millis * 1000); -# elif defined(_SPT_MODEL_) -# include -# include - usleep(millis * 1000); -# else - usleep(millis * 1000); -# endif # else - usleep(millis * 1000); + unsigned int s = (unsigned int)(millis / 1000); + unsigned int us = (unsigned int)((millis % 1000) * 1000); + + if (s > 0) + sleep(s); + usleep(us); # endif } #elif defined(_WIN32) diff --git a/deps/openssl/openssl/engines/e_afalg.c b/deps/openssl/openssl/engines/e_afalg.c index 2c08cbb28dde39..ec4e21c582c8e8 100644 --- a/deps/openssl/openssl/engines/e_afalg.c +++ b/deps/openssl/openssl/engines/e_afalg.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -811,8 +811,10 @@ static int bind_helper(ENGINE *e, const char *id) if (!afalg_chk_platform()) return 0; - if (!bind_afalg(e)) + if (!bind_afalg(e)) { + afalg_destroy(e); return 0; + } return 1; } diff --git a/deps/openssl/openssl/engines/e_dasync.c b/deps/openssl/openssl/engines/e_dasync.c index 7974106ae2197f..329d618f555297 100644 --- a/deps/openssl/openssl/engines/e_dasync.c +++ b/deps/openssl/openssl/engines/e_dasync.c @@ -1,5 +1,5 @@ /* - * Copyright 2015-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2015-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -985,7 +985,7 @@ static int dasync_rsa_decrypt(EVP_PKEY_CTX *ctx, unsigned char *out, size_t inlen); if (pdecrypt == NULL) - EVP_PKEY_meth_get_encrypt(dasync_rsa_orig, NULL, &pdecrypt); + EVP_PKEY_meth_get_decrypt(dasync_rsa_orig, NULL, &pdecrypt); return pdecrypt(ctx, out, outlen, in, inlen); } diff --git a/deps/openssl/openssl/fuzz/asn1.c b/deps/openssl/openssl/fuzz/asn1.c index ee602a08a3d912..f7a019774b9ddd 100644 --- a/deps/openssl/openssl/fuzz/asn1.c +++ b/deps/openssl/openssl/fuzz/asn1.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -312,10 +312,16 @@ int FuzzerTestOneInput(const uint8_t *buf, size_t len) ASN1_VALUE *o = ASN1_item_d2i(NULL, &b, len, i); if (o != NULL) { - BIO *bio = BIO_new(BIO_s_null()); - if (bio != NULL) { - ASN1_item_print(bio, o, 4, i, pctx); - BIO_free(bio); + /* + * Don't print excessively long output to prevent spurious fuzzer + * timeouts. + */ + if (b - buf < 10000) { + BIO *bio = BIO_new(BIO_s_null()); + if (bio != NULL) { + ASN1_item_print(bio, o, 4, i, pctx); + BIO_free(bio); + } } if (ASN1_item_i2d(o, &der, i) > 0) { OPENSSL_free(der); diff --git a/deps/openssl/openssl/fuzz/bignum.c b/deps/openssl/openssl/fuzz/bignum.c index d7c3716aacb43b..08da6fb197f5be 100644 --- a/deps/openssl/openssl/fuzz/bignum.c +++ b/deps/openssl/openssl/fuzz/bignum.c @@ -1,5 +1,5 @@ /* - * Copyright 2016 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -52,11 +52,12 @@ int FuzzerTestOneInput(const uint8_t *buf, size_t len) */ if (len > 2) { len -= 3; - l1 = (buf[0] * len) / 255; + /* limit l1, l2, and l3 to be no more than 512 bytes */ + l1 = ((buf[0] * len) / 255) % 512; ++buf; - l2 = (buf[0] * (len - l1)) / 255; + l2 = ((buf[0] * (len - l1)) / 255) % 512; ++buf; - l3 = len - l1 - l2; + l3 = (len - l1 - l2) % 512; s1 = buf[0] & 1; s3 = buf[0] & 4; diff --git a/deps/openssl/openssl/include/crypto/aes_platform.h b/deps/openssl/openssl/include/crypto/aes_platform.h index e95ad5aa5de6f8..30c968b62c6c51 100644 --- a/deps/openssl/openssl/include/crypto/aes_platform.h +++ b/deps/openssl/openssl/include/crypto/aes_platform.h @@ -1,5 +1,5 @@ /* - * Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -65,6 +65,7 @@ void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len, # ifdef VPAES_ASM # define VPAES_CAPABLE (OPENSSL_ppccap_P & PPC_ALTIVEC) # endif +# if !defined(OPENSSL_SYS_MACOSX) # define HWAES_CAPABLE (OPENSSL_ppccap_P & PPC_CRYPTO207) # define HWAES_set_encrypt_key aes_p8_set_encrypt_key # define HWAES_set_decrypt_key aes_p8_set_decrypt_key @@ -74,6 +75,7 @@ void AES_xts_decrypt(const unsigned char *inp, unsigned char *out, size_t len, # define HWAES_ctr32_encrypt_blocks aes_p8_ctr32_encrypt_blocks # define HWAES_xts_encrypt aes_p8_xts_encrypt # define HWAES_xts_decrypt aes_p8_xts_decrypt +# endif /* OPENSSL_SYS_MACOSX */ # endif /* PPC */ # if (defined(__arm__) || defined(__arm) || defined(__aarch64__)) diff --git a/deps/openssl/openssl/include/crypto/bn.h b/deps/openssl/openssl/include/crypto/bn.h index fd1c09d997de5b..c5f328156d3a9c 100644 --- a/deps/openssl/openssl/include/crypto/bn.h +++ b/deps/openssl/openssl/include/crypto/bn.h @@ -1,5 +1,5 @@ /* - * Copyright 2014-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2014-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -87,6 +87,14 @@ int bn_lshift_fixed_top(BIGNUM *r, const BIGNUM *a, int n); int bn_rshift_fixed_top(BIGNUM *r, const BIGNUM *a, int n); int bn_div_fixed_top(BIGNUM *dv, BIGNUM *rem, const BIGNUM *m, const BIGNUM *d, BN_CTX *ctx); +int ossl_bn_mask_bits_fixed_top(BIGNUM *a, int n); +int ossl_bn_is_word_fixed_top(const BIGNUM *a, const BN_ULONG w); +int ossl_bn_priv_rand_range_fixed_top(BIGNUM *r, const BIGNUM *range, + unsigned int strength, BN_CTX *ctx); +int ossl_bn_gen_dsa_nonce_fixed_top(BIGNUM *out, const BIGNUM *range, + const BIGNUM *priv, + const unsigned char *message, + size_t message_len, BN_CTX *ctx); #define BN_PRIMETEST_COMPOSITE 0 #define BN_PRIMETEST_COMPOSITE_WITH_FACTOR 1 diff --git a/deps/openssl/openssl/include/internal/constant_time.h b/deps/openssl/openssl/include/internal/constant_time.h index 0ed6f823c11edc..2b49afe1ea2a5c 100644 --- a/deps/openssl/openssl/include/internal/constant_time.h +++ b/deps/openssl/openssl/include/internal/constant_time.h @@ -1,5 +1,5 @@ /* - * Copyright 2014-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2014-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -140,6 +140,29 @@ static ossl_inline uint64_t constant_time_lt_64(uint64_t a, uint64_t b) return constant_time_msb_64(a ^ ((a ^ b) | ((a - b) ^ b))); } +#ifdef BN_ULONG +static ossl_inline BN_ULONG constant_time_msb_bn(BN_ULONG a) +{ + return 0 - (a >> (sizeof(a) * 8 - 1)); +} + +static ossl_inline BN_ULONG constant_time_lt_bn(BN_ULONG a, BN_ULONG b) +{ + return constant_time_msb_bn(a ^ ((a ^ b) | ((a - b) ^ b))); +} + +static ossl_inline BN_ULONG constant_time_is_zero_bn(BN_ULONG a) +{ + return constant_time_msb_bn(~a & (a - 1)); +} + +static ossl_inline BN_ULONG constant_time_eq_bn(BN_ULONG a, + BN_ULONG b) +{ + return constant_time_is_zero_bn(a ^ b); +} +#endif + static ossl_inline unsigned int constant_time_ge(unsigned int a, unsigned int b) { diff --git a/deps/openssl/openssl/include/openssl/sslerr.h b/deps/openssl/openssl/include/openssl/sslerr.h index 3d07ecc8135439..b159ef8127c044 100644 --- a/deps/openssl/openssl/include/openssl/sslerr.h +++ b/deps/openssl/openssl/include/openssl/sslerr.h @@ -1,6 +1,6 @@ /* * Generated by util/mkerr.pl DO NOT EDIT - * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -284,10 +284,12 @@ # define SSL_R_TLSV1_ALERT_INAPPROPRIATE_FALLBACK 1086 # define SSL_R_TLSV1_ALERT_INSUFFICIENT_SECURITY 1071 # define SSL_R_TLSV1_ALERT_INTERNAL_ERROR 1080 +# define SSL_R_TLSV1_ALERT_NO_APPLICATION_PROTOCOL 1120 # define SSL_R_TLSV1_ALERT_NO_RENEGOTIATION 1100 # define SSL_R_TLSV1_ALERT_PROTOCOL_VERSION 1070 # define SSL_R_TLSV1_ALERT_RECORD_OVERFLOW 1022 # define SSL_R_TLSV1_ALERT_UNKNOWN_CA 1048 +# define SSL_R_TLSV1_ALERT_UNKNOWN_PSK_IDENTITY 1115 # define SSL_R_TLSV1_ALERT_USER_CANCELLED 1090 # define SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE 1114 # define SSL_R_TLSV1_BAD_CERTIFICATE_STATUS_RESPONSE 1113 diff --git a/deps/openssl/openssl/include/openssl/tls1.h b/deps/openssl/openssl/include/openssl/tls1.h index 40ac50e9757ce4..d005d3c16dfff9 100644 --- a/deps/openssl/openssl/include/openssl/tls1.h +++ b/deps/openssl/openssl/include/openssl/tls1.h @@ -1,5 +1,5 @@ /* - * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright (c) 2002, Oracle and/or its affiliates. All rights reserved * Copyright 2005 Nokia. All rights reserved. * @@ -214,6 +214,8 @@ extern "C" { # define TLSEXT_max_fragment_length_1024 2 # define TLSEXT_max_fragment_length_2048 3 # define TLSEXT_max_fragment_length_4096 4 +/* OpenSSL value for unset maximum fragment length extension */ +# define TLSEXT_max_fragment_length_UNSPECIFIED 255 int SSL_CTX_set_tlsext_max_fragment_length(SSL_CTX *ctx, uint8_t mode); int SSL_set_tlsext_max_fragment_length(SSL *ssl, uint8_t mode); diff --git a/deps/openssl/openssl/os-dep/Apple/PrivacyInfo.xcprivacy b/deps/openssl/openssl/os-dep/Apple/PrivacyInfo.xcprivacy new file mode 100644 index 00000000000000..285dd5bebae800 --- /dev/null +++ b/deps/openssl/openssl/os-dep/Apple/PrivacyInfo.xcprivacy @@ -0,0 +1,23 @@ + + + + + NSPrivacyAccessedAPITypes + + + NSPrivacyAccessedAPIType + NSPrivacyAccessedAPICategoryFileTimestamp + NSPrivacyAccessedAPITypeReasons + + C617.1 + + + + NSPrivacyCollectedDataTypes + + NSPrivacyTrackingDomains + + NSPrivacyTracking + + + diff --git a/deps/openssl/openssl/providers/fips-sources.checksums b/deps/openssl/openssl/providers/fips-sources.checksums index c45b030606756c..2075eca274d6f1 100644 --- a/deps/openssl/openssl/providers/fips-sources.checksums +++ b/deps/openssl/openssl/providers/fips-sources.checksums @@ -4,71 +4,71 @@ c049a936d74100fcced225f575d46662792a6a0039777d2d4df0cf61eff90a68 crypto/aes/aes c1e674d08683a25bc053f6233f73a0d0b3a90aafe591ff57b702c7da1582e4a5 crypto/aes/aes_local.h a2466f18da5847c7d9fbced17524633c10ce024671a72f53f9c9c55b9b9923dd crypto/aes/aes_misc.c 6979c133f76f4623e62e6e970deae70fa025e713a72b71aead5a048d49e47f6f crypto/aes/asm/aes-586.pl -92be9ff608331a432e95247a8f4fb9e46897d0cb76f2b6db809b61d44287964a crypto/aes/asm/aes-armv4.pl -953897f86e2de9fa27ef411155ab3aed133af94885f1507e76449c142da78656 crypto/aes/asm/aes-c64xplus.pl +2eef5f20f1410b48bdaaafa24ded24f56f34c4ca79db1d38fa6bf1b3b19535bf crypto/aes/asm/aes-armv4.pl +38c2cf8ed3910efd89d8721e1b0763a8fde073b91f6529d251165a0496ef9555 crypto/aes/asm/aes-c64xplus.pl 00196f01f5218ad731e6a058d406078f7228a9756d9d73f51c0d0c2a68f885af crypto/aes/asm/aes-ia64.S -88b6f8396cd9d86004743d5c3b0f72b7b8c3d5a2b00b0bbb761ba91ae5a7cdc8 crypto/aes/asm/aes-mips.pl -7ff9c96ef3d591d45d776fa4b244601ea0d9328e289aeab1e1b92436ce7d02ad crypto/aes/asm/aes-parisc.pl -f1244cdeadcb4e48f35bc5df19d4cfaf07e0086ad951b84f07ff6966501faa5b crypto/aes/asm/aes-ppc.pl -ecbfe826f4c514810c3ee20e265f4f621149694c298554b2682e5de4f029f14f crypto/aes/asm/aes-s390x.pl -ee4e8cacef972942d2a89c1a83c984df9cad87c61a54383403c5c4864c403ba1 crypto/aes/asm/aes-sparcv9.pl -2b3b9ac56bf54334d053857a24bdb08592151e8a7a60b89b8195846b7f8ee7b5 crypto/aes/asm/aes-x86_64.pl -c56c324667b67d726e040d70379efba5b270e2937f403c1b5979018b836903c7 crypto/aes/asm/aesfx-sparcv9.pl -14359dc32b7f4e5c08227fb9ac8f9232c1287399463b233fec4a2ab0c19f68d1 crypto/aes/asm/aesni-mb-x86_64.pl -2fe016e8098d1c959b6199ce98e91dfed9a3a543d6b068daf88d4c4c402701ec crypto/aes/asm/aesni-sha1-x86_64.pl -1d3acabadedb88d1327eeb76201ea9b3f4814f44898018ffae6c73e3f400b89b crypto/aes/asm/aesni-sha256-x86_64.pl +b4ef595194fe1692e1ab2b561f385da01b277cf004902e8fc99e8ac5389bbd35 crypto/aes/asm/aes-mips.pl +123c4498c94040b70708fdd911cb08c6411b020b4cf3eb761d6fa22c583c3e6f crypto/aes/asm/aes-parisc.pl +7a7f2f90791415ef4ffc1ba2a6f6b6fe994bfe0e03d3bf9dab6e428e6874695c crypto/aes/asm/aes-ppc.pl +d139e5ad69560fd0ffd8aa2e72304e463650cea4c657be7a90e0d1eb782d580a crypto/aes/asm/aes-s390x.pl +133ba35d77002abcd430414749c4e98c4a319630da898e45ff8dbc5800176df1 crypto/aes/asm/aes-sparcv9.pl +c98690249d490d23e6fee84f672f1463ffc029427110a4329244a59e4e4aaed8 crypto/aes/asm/aes-x86_64.pl +7ec99947b47e56595f0b085b8bda0b3113112f694e78b1f71b63ecd1f0fa2c67 crypto/aes/asm/aesfx-sparcv9.pl +ab94a27e533e164bcf09898a6f6019f43609d51a3b374cf75482dcf2914d464e crypto/aes/asm/aesni-mb-x86_64.pl +74939261340a0056eb9333fff1c843c8758b9f93de3d94650cd6d2899c6790d8 crypto/aes/asm/aesni-sha1-x86_64.pl +ce91f0893a2a35fdf4c024ccb0fd8329b30fdbd955f0ae011ab948101ee14951 crypto/aes/asm/aesni-sha256-x86_64.pl 4ff74d4e629a88ef5a9e3d3f5b340fc0a4793d16d7cc7f1b70da62512a856248 crypto/aes/asm/aesni-x86.pl -c7c6694480bb5319690f94826139a93f5c460ebea6dba101b520a76cb956ec93 crypto/aes/asm/aesni-x86_64.pl -f3a8f3c960c0f47aaa8fc2633d18b14e7c7feeccc536b0115a08bc58333122b6 crypto/aes/asm/aesp8-ppc.pl -e397a5781893e97dd90a5a52049633be12a43f379ec5751bca2a6350c39444c8 crypto/aes/asm/aest4-sparcv9.pl -e3955352a92d56905d63e68937e4758f13190a14a10a3dcb1e5c641c49913c0c crypto/aes/asm/aesv8-armx.pl -5e8005fdb6641df465bdda20c3476f7176e6bcd63d5073044a0c02a327c7f172 crypto/aes/asm/bsaes-armv7.pl -0726a2c4c15c27a12b2f7d5e16863df4a1b1daa7b7d9b728f621b2b224d290e6 crypto/aes/asm/bsaes-x86_64.pl -1ff94d6bf6c8ae4809f64657eb89260fe3cb22137f649d3c73f72cb190258196 crypto/aes/asm/vpaes-armv8.pl -c3541865cd02d81101cdbab4877ed82772e6980d2c677b9008b38fa1b26d36d4 crypto/aes/asm/vpaes-ppc.pl +30103cfe3b29d06b34feff48a927e0fa649e9109d35a3db64b09cfeb15426fa2 crypto/aes/asm/aesni-x86_64.pl +f3490c936a80e012c49e577ec6e1d4d36df324dfef6264e788e6225e20b5fd52 crypto/aes/asm/aesp8-ppc.pl +a5807ed92ec8a16d123061487c385bf1f65e50878cee95c8e8096844454129f8 crypto/aes/asm/aest4-sparcv9.pl +d34cf129a8c63e2b77a74117ed4440a4f35408dabd90e21e70eae92d208fa516 crypto/aes/asm/aesv8-armx.pl +a0b578b7d2787c91013547df07dfa73d8d7a420446dd624c66f7c55159817eb2 crypto/aes/asm/bsaes-armv7.pl +34accd08242a6bf4a751105f89b0c4de2cd7e54320753587815647abff7124de crypto/aes/asm/bsaes-x86_64.pl +d9bc047db9b2f54f27fe0d6e2ede9239b4a1f57a14bf89fa3cfba6b836599386 crypto/aes/asm/vpaes-armv8.pl +516421b1a321b842f879ad69e7b82ae3e1f3efc8288c83bb34d6577996e85787 crypto/aes/asm/vpaes-ppc.pl 3ec24185750a995377516bc2fb2eae8b1c52094c6fff093bff591837fc12d6c3 crypto/aes/asm/vpaes-x86.pl -060bb6620f50af9afecdf97df051b45b9a50be9daf343dfec1cbb29693ce00a4 crypto/aes/asm/vpaes-x86_64.pl -2bc67270155e2d6c7da87d9070e005ee79cea18311004907edfd6a078003532a crypto/alphacpuid.pl -0255a480b78bdcc71f76676f496962a9828eb900f53b7be13be96ae3f67fe6db crypto/arm64cpuid.pl +47bedbe6a04254eede121e71f11a657b1f1940aee1916bbfc04fa9fb8454f9b8 crypto/aes/asm/vpaes-x86_64.pl +1c9a2a0e8cee4a1283c74b2e306f46f79890f6d236394de2a80d1994fd411d1d crypto/alphacpuid.pl +7a37cadacdbecb50304228dfcb087ad7fbb6e31f6ab69c52dd161e79afb2f9ca crypto/arm64cpuid.pl e0daf54f72dd8fd1bc537d93f34e2a6a887a9ed6027bb33e15a327ef5ff37a42 crypto/armcap.c -a43f2c1eef16146943745f684f2add7d186924932a47abf7fb0760cba02804e6 crypto/armv4cpuid.pl +24cc7611225df0e20e414c14e80516c36d48bf99659946e85a876d8757356686 crypto/armv4cpuid.pl 16739d54200fb81ca7835b5814f965022a2ab41589c7787e2697e3ea72d4fafa crypto/asn1_dsa.c -819c9fd2b0cae9aab81c3cbd1815c2e22949d75f132f649b5883812d0bbaa39a crypto/bn/asm/alpha-mont.pl -0070595128b250b9ebdebe48ce53d2d27ca16ec4f7c6c8bd169ab2e4a913b2d1 crypto/bn/asm/armv4-gf2m.pl -8c1c53a725b8a4f92b8a353bfeeb393be94198df41c912e3270f9e654417b250 crypto/bn/asm/armv4-mont.pl -8d6192337fedb0012764229d600634f8357c3b74fd38bcbfe8b86ddc6ca96ea2 crypto/bn/asm/armv8-mont.pl +155eff9d747eed808398cfa2af4b276dfc1f9aac8a0f9d801b314ab3f2bf5b56 crypto/bn/asm/alpha-mont.pl +894cc71b2d783e4e1b54dbef45e9e9280165a2c43981ebdd03282f0e90914928 crypto/bn/asm/armv4-gf2m.pl +0d2e31dc9cdce02c619adfc9ac720ccf7171384e76a84cdf0e686a805dd7006e crypto/bn/asm/armv4-mont.pl +d7df31176f725c1ae7241fee8f681fdcf2ab9eb4d3cc6c80d49c2248ae40a56a crypto/bn/asm/armv8-mont.pl cb4ad7b7461fcb8e2a0d52881158d0211b79544842d4eae36fc566869a2d62c8 crypto/bn/asm/bn-586.pl -636da7e2a66272a81f9c99e90b36c6f132ad6236c739e8b9f2e7315f30b72edd crypto/bn/asm/c64xplus-gf2m.pl +10fb73a6cc1bc064ebdcf6d7fe3c7407ea1c28b0d65ad0123046f8b1518fa75a crypto/bn/asm/c64xplus-gf2m.pl c86664fb974362ee52a454c83c2c4b23fd5b7d64b3c9e23ef1e0dfd130a46ee5 crypto/bn/asm/co-586.pl -199b9b100f194a2a128c14f2a71be5a04d50d069666d90ca5b69baee1318ccb7 crypto/bn/asm/ia64-mont.pl +b88190d748056e6a64988bf1a3d19efc4c292e3d338a65f4505cf769a2041077 crypto/bn/asm/ia64-mont.pl a511aafbf76647a0c83705d4491c898a5584d300aa449fa6166c8803372946eb crypto/bn/asm/ia64.S -687c5d6606fdfd0e242005972d15db74a9cbac2b8a9a54a56fcb1e99d3880ff3 crypto/bn/asm/mips-mont.pl -8aca83d2ec45a40af15e59cff1ac2dc33737a3d25f0a0b74d401fa778a5c5eb8 crypto/bn/asm/mips.pl -b27ec5181e387e812925bb26823b830f49d7a6e4971b6d11ea583f5632a1504b crypto/bn/asm/parisc-mont.pl -9973523b361db963eea4938a7a8a3adc692e1a4e1aec4fa1f1e57dc93da37921 crypto/bn/asm/ppc-mont.pl -59cd27e1e10c4984b7fb684b27f491e7634473b1bcff197a07e0ca653124aa9a crypto/bn/asm/ppc.pl +fee42cabeeb87cdf0fa0a6ff3698b2fe98a8a47d10a756052df572097161a8b9 crypto/bn/asm/mips-mont.pl +b197a8e1be79b8c21f8d26b34b9a282ca42ec4bcd1f3212fde3889747082a1f7 crypto/bn/asm/mips.pl +13df09cee06a21669137294f92e5c31b4bf05a8035be6800c1cb4403d7cd8290 crypto/bn/asm/parisc-mont.pl +25c96e545b4981d45557eb14ea5c83aa2d6375ae0df806cb6e6ded2f59ddfed3 crypto/bn/asm/ppc-mont.pl +1c057083546fa1a3bb1b9819dc5110f5a3b11b7bf5a2fb275012323bd7412403 crypto/bn/asm/ppc.pl e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855 crypto/bn/asm/ppc64-mont-fixed.pl -a25be64867ab837d93855af232e2bfa71b85b2c6f00e35e620fdc5618187fb6f crypto/bn/asm/ppc64-mont.pl -231579e532443665020d4d522d9f11713d9c5d5c814b95b434b0f65452e16de4 crypto/bn/asm/rsaz-avx2.pl -1657600d320ea549b527b2d878a7658533d60d26eeb38f42ea470fc612f9bb53 crypto/bn/asm/rsaz-avx512.pl -31e84dc905b13e38850071528d3abbfcaf8910bbc8b46f38d19c2b386a5f838e crypto/bn/asm/rsaz-x86_64.pl -30fedf48dfc5fec1c2044b6c226dd9fc42a92522cc589797a23a79d452bdd2cf crypto/bn/asm/s390x-gf2m.pl -590388d69d7ac3a0e9af4014792f4f0fdb9552719e8fb48ebc7e5dfca2a491d4 crypto/bn/asm/s390x-mont.pl +fe9278a2504fb40257637a4718081775c29c4eb81f87a8528e5c85f8d0c6281a crypto/bn/asm/ppc64-mont.pl +94b2d5cf0faf2efddeb5fb7c575dabc35c1791715cc9299d59a01d9f96cb2d6f crypto/bn/asm/rsaz-avx2.pl +cd0861a565231f67252e172420f6914fe47a324b35916c29f6304491447fe84c crypto/bn/asm/rsaz-avx512.pl +c19c717d87dd1ba74f138af05c044c05f5d025e26323637f46ba54a8c871a378 crypto/bn/asm/rsaz-x86_64.pl +ae26becda9f6d30e9edde8bb89c251a0c40a9a6c879c4cdaec273d8c09af9cd6 crypto/bn/asm/s390x-gf2m.pl +2700337ef133d6688047a1a8e1c671db06016aae777679923ce2b301896762cf crypto/bn/asm/s390x-mont.pl aa02597f3dc09cfbc190aedb75711859ba0f3efff87067ebfba1ec78ebee40d7 crypto/bn/asm/s390x.S -2f7cbc2c3d93b1bbc4953dda38b9ae0ab3a0a8331a0418d94d9b286183736c9e crypto/bn/asm/sparct4-mont.pl +87d49e83a7df467097fdfc577aa206be9ee622c40fcbbbe5133b35d9783b7816 crypto/bn/asm/sparct4-mont.pl ca21a9ccbc54e19fb7c2e6cdf286ce7cb08b0fba960c777c6edce5c57ccc2101 crypto/bn/asm/sparcv8.S fbc93c8dbbecefe66086f58fe9719ed87b13b2cdc61454a10e841228296fecef crypto/bn/asm/sparcv8plus.S -127832c1e3d298aad805236776488f5f8836b6a0fdbce3f6b42678163df3909f crypto/bn/asm/sparcv9-gf2m.pl -1622f04a8918724ac0e8804baf285fdafa0eeaaecc36c7facd459d0ff13a8cac crypto/bn/asm/sparcv9-mont.pl -b69083f78b4b4f7097de4462d16649532fb82c453a82cdd9cc1393122661d6e2 crypto/bn/asm/sparcv9a-mont.pl +2ec1497fa06826f7bc574239e425dd8dda0d4a2743e1fe87669ede900291fcb6 crypto/bn/asm/sparcv9-gf2m.pl +1f490fe184c7a51b2d0646a59e69aa659bfe51270ad21594951b8d7b785bac38 crypto/bn/asm/sparcv9-mont.pl +277dcb7faa1913b25fd43946c50039bcdd45cb643fd9ddeedd6c207cefa4dd50 crypto/bn/asm/sparcv9a-mont.pl d404375a21d33396824a3da212d6646d4f3150dd141ee4b4a250aefae3482efb crypto/bn/asm/via-mont.pl -d24f3e97239c8eed5efc721521b025b7256c15e67a54ea6b5c4cf8f7cd0f89ea crypto/bn/asm/vis3-mont.pl +d632edf9b9bab7d2cd2d616512a98d15cf4b3ebba7a8e7b83650d654ceb52ecb crypto/bn/asm/vis3-mont.pl 89278854f44d95be916516609ce6f79dcd346bab52574b9b6336a9952aa94bee crypto/bn/asm/x86-gf2m.pl 90d4ae234c08267adce9ed38d56e0edc223f7480cb9605f5d7399d0b3914c6be crypto/bn/asm/x86-mont.pl d444ca73875e97e0ea88b20e4c02f2fcf3850e8b9311e3b67a2d04fe2796d543 crypto/bn/asm/x86_64-gcc.c -709ddee92e9222ee0ed27bfb90db556e85e2d302e4a9131afa25fdc14c4d858f crypto/bn/asm/x86_64-gf2m.pl -da7f7780d27eed164797e5334cd45b35d9c113e86afaca051463aef9a8fd787c crypto/bn/asm/x86_64-mont.pl -259fb8d7f40c0dba46920b1f169d5b37de03b0fda645463d19e3ae2b56de851d crypto/bn/asm/x86_64-mont5.pl +a5481ca55d94dc7ebdc93173610d38ae2569cea1fe9b5180debe0ab94e455ce1 crypto/bn/asm/x86_64-gf2m.pl +d8cc080824a72774cb3343a3d50ddf8f41a5b8321203d4c9a764762b62498b96 crypto/bn/asm/x86_64-mont.pl +03788cb685268e6a50ddfa742ea1fe937570c9b86f2ebc88ee35f3304f67c045 crypto/bn/asm/x86_64-mont5.pl 0ea8185a037a2951bb3d1e590bbbdeac305176d5e618f3e43a04c09733a9de34 crypto/bn/bn_add.c 759c2b9df808b3562fe8b0c7778dbadbf35f261e14fc2d5090d18c35b4181760 crypto/bn/bn_asm.c 14bd5a35c05fcf454854b92fb30b356d7ac618c1eb699dd798f6ad2936d1f5ee crypto/bn/bn_blind.c @@ -83,7 +83,7 @@ baba7c8ae95af6aa36bc9f4be3a2eed33d500451e568ca4bfc6bc7cb48d4f7ea crypto/bn/bn_g 5fbb1ab8463cd5544a1d95cf7996b6387ae634984a42256b7a21482ce3ac30a2 crypto/bn/bn_gf2m.c 081e8a6abc23599307dab3b1a92113a65e0bf8717cbc40c970c7469350bc4581 crypto/bn/bn_intern.c 602ed46fbfe12c899dfb7d9d99ff0dbfff96b454fce3cd02817f3e2488dd9192 crypto/bn/bn_kron.c -bf73a1788a92142963177fb698bc518af9981bbf0ad9784701fbb2462ca10607 crypto/bn/bn_lib.c +81a4afc27dd1e90c4bfa81c8d385214ce8a2b5884537752944a71ebebd91f4b0 crypto/bn/bn_lib.c d5beb9fbac2ff5dc3ccbdfa4d1aabca7225c778cff4e3b05b6d6c63e182637f5 crypto/bn/bn_local.h 96f98cdf50087c5b567c31bf2581728623206d79b3f97f5a0c5fdaa0009e6e3c crypto/bn/bn_mod.c f60f3d49b183b04bcdf9b82f7c961b8c1bcb00e68a2c1166fe9edd95a783356e crypto/bn/bn_mont.c @@ -92,10 +92,10 @@ f60f3d49b183b04bcdf9b82f7c961b8c1bcb00e68a2c1166fe9edd95a783356e crypto/bn/bn_m b3677b73ac29aab660c9a549f7af154ca14347fac5cffd43b153a75211f1373f crypto/bn/bn_nist.c c6760a724d696b7209f0a71f8483fabcf4f081f7e93e2628284c32ef78f69365 crypto/bn/bn_prime.c c56ad3073108a0de21c5820a48beae2bccdbf5aa8075ec21738878222eb9adc3 crypto/bn/bn_prime.h -628419eabdb88b265823e43a7a1c88fdfecef79771180836f6089050dc9eadb1 crypto/bn/bn_rand.c +71186d5bd40d467a919e6449d8aa23d13df88e0c85765d1a165f3eeec6bd33a7 crypto/bn/bn_rand.c 4df8f204c8a06de2b4395be613ca0b9943613c523586e2005876d5c7bb891c75 crypto/bn/bn_recp.c a5c5c9f99961a5a7f22a3dcdce964c8a330f822be17f08652223a20fed747d0a crypto/bn/bn_rsa_fips186_4.c -704b0b4723e5c9e9bae5f3e35f9ae8ae8dca3383929e954de9e5169845abfdb2 crypto/bn/bn_shift.c +6889866bca4673bccb8adf870859a867757ccd3c8ad4652675615afff710add3 crypto/bn/bn_shift.c 622e90766b29e0d25f46474429aebda8eba2246835b9e85dc26da7cdbd49334f crypto/bn/bn_sqr.c 42c8ce944c889abcfcf089d0ad2744b7587696d8d7785efa91b3f7ec53dc062a crypto/bn/bn_sqrt.c 24e62baa56e02f2db6454e10168b7c7fa7638db9221b9acda1803d43f38f36e0 crypto/bn/bn_word.c @@ -104,9 +104,9 @@ c4d64da1cdc732ea918fccd6a7bb2746b03365dd26f7ba1e74e08c307ca4c58e crypto/bn/rsaz 5b82cb8dbf3087c2e671871cb0a92e4039223a51af533a2ee996f3bfd47453a7 crypto/bn/rsaz_exp_x2.c 834db8ff36006e5cb53e09ca6c44290124bd23692f4341ea6563b66fcade4cea crypto/bsearch.c c39334b70e1394e43f378ae8d31b6e6dc125e4d9181e6536d38e649c4eaadb75 crypto/buffer/buffer.c -5f43844b5d8665de9ab895f93599150a327d73ec2674bbf7d7c512d30163022d crypto/c64xpluscpuid.pl +d2bfdfd96b182741d2d51f91478ffcc48491b0da44662bc1c32bc506b3eef1ba crypto/c64xpluscpuid.pl 0e1a41a2d81b5765bca3df448f60bf1fad91e485fe89dd65a7300ffc419e316d crypto/cmac/cmac.c -ff9be205d6d7ff00b0e64508f0eb8d9ec0415fbabc0948d26e308212b3f7b2d8 crypto/context.c +5113d8d12d884f845cad3d35d92f0a1ee20ebafd7a169273642f4e8178711de9 crypto/context.c c309d81ea991ddf5be4337afad2fd132169f7443c76f863349d3f3c82f3374e4 crypto/core_algorithm.c f0fd9eb38bf7f196bbb4d26ce8fdf86d0a4f9db219157e66b2c0ffefb4f42005 crypto/core_fetch.c 799c84d224639c6760c5c28e0e287500a973ca6d0c3d7c1bdcd61b0da4018b3c crypto/core_namemap.c @@ -130,28 +130,28 @@ b0c248efc7dad48eaceb939a18cb2592cbfe5b02dd406592e5e590645488b153 crypto/dh/dh_k 92345c259ea2a8c09e6d6b069d0942bd6ca4642231580f3e8148ae7a832a1115 crypto/dh/dh_lib.c 8300775d88db0a1aa26a77eb49d6c4f7252e7fee69e1440de4c40edadc9da044 crypto/dh/dh_local.h bbcf4fc3067ac462a27d7277973180b7dc140df9262a686c7fbe4318ca01f7b8 crypto/dsa/dsa_backend.c -d7e0d87494e3b3f0898a56785a219e87a2ce14416393ec32d8c0b5f539c7bdbf crypto/dsa/dsa_check.c +786d6c65ced7ee4e25f5dd7c3150259ec95b6aa321a7590d905757b8139f8230 crypto/dsa/dsa_check.c ae727bf6319eb57e682de35d75ea357921987953b3688365c710e7fba51c7c58 crypto/dsa/dsa_gen.c b1de1624e590dbf76f76953802ff162cc8de7c5e2eaba897313c866424d6902b crypto/dsa/dsa_key.c 9f4837c5abe53613a2dc1c5db81d073d4f42bd28b6a2d1e93a2b350d8e25d52a crypto/dsa/dsa_lib.c f4d52d3897219786c6046bf76abb2f174655c584caa50272bf5d281720df5022 crypto/dsa/dsa_local.h -c5c252f205482a71efeabe226d51a1c541a6ba2dfa9b8b8a70901087a9dc1667 crypto/dsa/dsa_ossl.c -d612fd05ff98816ba6cf37f84c0e31443ad9d840ed587a7ab2066027da390325 crypto/dsa/dsa_sign.c +196dc024873e413d92672c3a9b6c062ed6269250b0da6d41c0da1c03cfec9ef8 crypto/dsa/dsa_ossl.c +9f501a59c09fc3cb3caafaff25abd44397a94d1062950a4d62e855d2c8986b5a crypto/dsa/dsa_sign.c 53fa10cc87ac63e35df661882852dc46ae68e6fee83b842f1aeefe00b8900ee1 crypto/dsa/dsa_vrf.c -d9722ad8c6b6e209865a921f3cda831d09bf54a55cacd1edd9802edb6559190a crypto/ec/asm/ecp_nistp521-ppc64.pl -78ad06b88fcc8689a3a846b82f9ee01546e5734acd1bccf2494e523b71dc74d1 crypto/ec/asm/ecp_nistz256-armv4.pl -4617351d2de4d0b2abfd358c58050cee00702d0b4c1acca09312ec870e351c7d crypto/ec/asm/ecp_nistz256-armv8.pl -3715ddd921425f3018741037f01455ed26a840ace08691a800708170a66cf4d2 crypto/ec/asm/ecp_nistz256-ppc64.pl -cfe7e75a2fddc87a7251684469a8808b9da82b2f5725eafad5806920f89932bd crypto/ec/asm/ecp_nistz256-sparcv9.pl +786779d7014bc04846832f80638743784a3850c7ee36e4a8062fe8eb7ac31c9b crypto/ec/asm/ecp_nistp521-ppc64.pl +2e3056ea14fab8b306b0281d6a6f4317a6e86dbf652a79ade726e716cd79bb1e crypto/ec/asm/ecp_nistz256-armv4.pl +a02edef19d22c5aba196080942111ab0172fc2ebe6d6c40db2beb6a1a2d885c6 crypto/ec/asm/ecp_nistz256-armv8.pl +729729f8233c95138158f4647b33a36cf175e707ce29563db0eedc811f324ec0 crypto/ec/asm/ecp_nistz256-ppc64.pl +78a5b172f7c13ae8ac622439ffb9d99b240dbb4bbda3f5c88d1533ae74a445ad crypto/ec/asm/ecp_nistz256-sparcv9.pl 922725c4761cfa567af6ed9ecab04f2c7729ae2595f2fc0fa46dc67879dc87b0 crypto/ec/asm/ecp_nistz256-x86.pl -ac327475c7ec828d11aa05628b4e3b81ec3b1400f30fe7bec01daf3cf71f2dc9 crypto/ec/asm/ecp_nistz256-x86_64.pl -cc727533130f5f1a29229929b3d4e8454585d647be25d6344f3c6a0240998368 crypto/ec/asm/x25519-ppc64.pl -ee897e230964511baa0d1bf95fb938312407a40a88ebe01476879c2763e5f732 crypto/ec/asm/x25519-x86_64.pl +19ba01af58788e2873ebc1d5b503a76604bec0b9b6296fa794946e141fc945a4 crypto/ec/asm/ecp_nistz256-x86_64.pl +e806141073aa3792e2748f6feeee6d3017124b3bc6059a9eca0d53a2f5785346 crypto/ec/asm/x25519-ppc64.pl +a397592dc9fdb13016311db6184b4a3a4f2e198aacb03528f770f30ea4966cc4 crypto/ec/asm/x25519-x86_64.pl 340336e01aa04fcde9bfd56536f90c9bc0ad56a002b6cfa321a1e421f1e93ceb crypto/ec/curve25519.c 9a95ec8366154bb20aeb24f4767a8cbb9953ca0380708eb2f39caca6078cd59e crypto/ec/curve448/arch_32/f_impl32.c 063dac1e4a9573c47532123e9e03e3532a7473cc3e146521ba9ec6f486ddf3b1 crypto/ec/curve448/arch_64/arch_intrinsics.h 43423b7ee85a5c740c1d81499ee06f4a17732c7731a598e7429d5e402ee77cf4 crypto/ec/curve448/arch_64/f_impl.h -1689097ae10e4982a8cbe50c2f6eddb03c83436f331f0b67edb98d6b58adc962 crypto/ec/curve448/arch_64/f_impl64.c +012d4a9c8aed4a66cd3a3eef17d4b4d8f3c6f384449cd057bd292b98e072a283 crypto/ec/curve448/arch_64/f_impl64.c 9b408ec0d43f3b6d714ef5963147e2c2abaddc88633db7dd759193d3c56ed727 crypto/ec/curve448/curve448.c 3c12d90e3fdd59b5d32d63186f1a6f15c75eb73f5035b844a2054356a9459780 crypto/ec/curve448/curve448_local.h 178fb9863c33174b633c2e7607160b1bedb506d66cc06d53382d87431441f306 crypto/ec/curve448/curve448_tables.c @@ -178,7 +178,7 @@ fa901b996eb0e460359cd470843bdb03af7a77a2f1136c5e1d30daef70f3e4d2 crypto/ec/ec_m 129c6b42417bfcf582f4a959cfd65433e6f85b158274f4fa38f9c62615ac9166 crypto/ec/ec_oct.c c7fba2f2c33f67dafa23caef8c3abd12f5336274a9a07d412b83be0366969ee6 crypto/ec/ecdh_kdf.c b2cf8f052a5716137da7b0e857ed7a5df5fb513b6d14534199a05e32f2b5a866 crypto/ec/ecdh_ossl.c -099f7836a31643c58bda3829090ea81fe3d5acaa4c6f7b145d8355a4293d0ccc crypto/ec/ecdsa_ossl.c +2e00c2e0e6f6d58b81fc23fe500f59e98793dc828ca87d64eba10cc0fddd0dc1 crypto/ec/ecdsa_ossl.c b6baa42b16e8df69a12e0ab101033100cddc808ec2682ba1574373e6ec86ae93 crypto/ec/ecdsa_sign.c f686cea8c8a3259d95c1e6142813d9da47b6d624c62f26c7e4a16d5607cddb35 crypto/ec/ecdsa_vrf.c 141cfc1459214555b623517a054a9e8d5e4065a11301237b7247be2c6f397a0a crypto/ec/ecp_mont.c @@ -191,7 +191,7 @@ e2705097cfab64e8d7eb2feba37c3f12b18aec74b135ad0c7f073efccf336d4c crypto/ec/ecx_ 22c44f561ab42d1bd7fd3a3c538ebaba375a704f98056b035e7949d73963c580 crypto/ec/ecx_key.c 28abc295dad8888b5482eb61d31cd78dd80545ecb67dc6f9446a36deb8c40a5e crypto/evp/asymcipher.c 0e75a058dcbbb62cfe39fec6c4a85385dc1a8fce794e4278ce6cebb29763b82b crypto/evp/dh_support.c -1af3872164b4a4757bc7896a24b4d2f8eb2cfb4cba0d872a93db69975693e0a6 crypto/evp/digest.c +8f9e9da65ab1d0fb3feae5abd6b5c3649d3a4d03e936bb7624a431080de181ae crypto/evp/digest.c 838277f228cd3025cf95a9cd435e5606ad1fb5d207bbb057aa29892e6a657c55 crypto/evp/ec_support.c 61df3942752307b7006f09d7628348a0cc9e5555469a3a8862349067a52824b7 crypto/evp/evp_enc.c 62c994fd91dc4a5a1a81dfa9391d6eadae62d3549b2e1b22acb2e7c4cd278f27 crypto/evp/evp_fetch.c @@ -203,7 +203,7 @@ ca8c6cfd30efd53f2e5d1f19bcf09a3a3d0dff6d8947c3943d07a3f4b354aa86 crypto/evp/exc 9e25042581b73e295c059c6217f3ecf809134d518eb79b1b67f34e3ca9145677 crypto/evp/kdf_lib.c 1d72f5506984df1df8606e8c7045f041cf517223e2e1b50c4da8ba8bf1c6c186 crypto/evp/kdf_meth.c 5179624b8e03615dc9caedc9ec16d094fa081495613dd552d71c2c39475bcd83 crypto/evp/kem.c -5016dd7ef8b4cf7e9ea8465c18d1daa4c8808cb589261cf236058ee75bc868d7 crypto/evp/keymgmt_lib.c +5cf3e490bf917bd37ae70313d126ae4720432fbec518e4a45e8fa886d5e1689a crypto/evp/keymgmt_lib.c 46ffdc73f8a7fc314dc8988f2751a6e9f9784719f4f162dc4be2450b65b55261 crypto/evp/keymgmt_meth.c e1a052839b8b70dca20dbac1282d61abd1c415bf4fb6afb56b811e8770d8a2e1 crypto/evp/m_sigver.c 4290c95f63b43688a8da57690d122add5161a6811f9753da1444d28f46739961 crypto/evp/mac_lib.c @@ -212,7 +212,7 @@ e7e8eb5683cd3fbd409df888020dc353b65ac291361829cc4131d5bc86c9fcb3 crypto/evp/mac 3b4228b92eebd04616ecc3ee58684095313dd5ffd1b43cf698a7d6c202cb4622 crypto/evp/pmeth_check.c 1f0e9e94e9b0ad322956521b438b78d44cfcd8eb974e8921d05f9e21ba1c05cf crypto/evp/pmeth_gn.c 76511fba789089a50ef87774817a5482c33633a76a94ecf7b6e8eb915585575d crypto/evp/pmeth_lib.c -4b2dbddf0f9ceed34c3822347138be754fb194febca1c21c46bcc3a5cce33674 crypto/evp/signature.c +53058617c153a7676e7ca18c98c23df867a93087d67935907076f3c5bd65c15e crypto/evp/signature.c f2acfb82aac20251d05a9c252cc6c282bd44e43feac4ac2e0faf68b9a38aef57 crypto/ex_data.c 1c8389c5d49616d491978f0f2b2a54ba82d805ec41c8f75c67853216953cf46a crypto/ffc/ffc_backend.c a12af33e605315cdddd6d759e70cd9632f0f33682b9aa7103ed1ecd354fc7e55 crypto/ffc/ffc_dh.c @@ -228,19 +228,19 @@ f897493b50f4e9dd4cacb2a7accda6683c10ece602641874cdff1dac7128a751 crypto/initthr 7290d8d7ec31a98b17618f218d4f27b393501c7606c814a43db8af1975ad1d10 crypto/lhash/lhash.c 5d49ce00fc06df1b64cbc139ef45c71e0faf08a33f966bc608c82d574521a49e crypto/lhash/lhash_local.h f866aafae928db1b439ac950dc90744a2397dfe222672fe68b3798396190c8b0 crypto/mem_clr.c -e14f48d4112c0efe3826b4aa390cc24045a85298cc551ec7f3f36ac4236d7d81 crypto/modes/asm/aes-gcm-armv8_64.pl -1d686af304f94743038f916125effcb51790c025f3165d8d37b526bbeee781f0 crypto/modes/asm/aesni-gcm-x86_64.pl -c2e874a8deb418b5d8c935b2e256370566a5150e040c9fa008cdb5b463c26904 crypto/modes/asm/ghash-alpha.pl -6bc7d63569c73d7020ede481f2de05221ac92403c7cc11e7263ada7644f6aa9b crypto/modes/asm/ghash-armv4.pl -097975df63370de7ebea012d17de14fc1f361fb83acf03b432a99ae7d5bceb24 crypto/modes/asm/ghash-c64xplus.pl -fdde3bc48b37790c6e0006014da71e7a831bbb4fdbfcda2d01dbe0ceb0ba88fa crypto/modes/asm/ghash-ia64.pl -e472d73d06933667a51a0af973479993eed333c71b43af03095450acb36dbeb4 crypto/modes/asm/ghash-parisc.pl -6fb4332ac88113a20915ad4de1931ef88b0114b5379b16e1d967820e1229fbb0 crypto/modes/asm/ghash-s390x.pl -6af1a05981e1d41e4dea51e58938360e3abc4a4f58e179908242466d032b1a8a crypto/modes/asm/ghash-sparcv9.pl +78a20112586dbce2b8b6e509a0f46f6a36f2a4acf53c3f3511daf7932a71c391 crypto/modes/asm/aes-gcm-armv8_64.pl +e482f02932d77d61142548ca4f3c8d5709d88ec14ab84723d82331444c0f57da crypto/modes/asm/aesni-gcm-x86_64.pl +8fdcb4313fa3a6e541a697525856b9527a06ddf4c794f9393e843f86d67f543c crypto/modes/asm/ghash-alpha.pl +ace8c376b394439301cecaf468d2a9a8adae21eff1d43191cefbf6765023452d crypto/modes/asm/ghash-armv4.pl +c22f4945e7de3bd7bfef73447f09983e40a3e4dd0938244d902a1c44c98a8467 crypto/modes/asm/ghash-c64xplus.pl +315a76491cdba48c88df6549c9efd96b50515400810b185a568b7a871681e03d crypto/modes/asm/ghash-ia64.pl +25e9f494fcb6eb636c04af2f322736fae8aa339037e199332c96b8c9c3a50afa crypto/modes/asm/ghash-parisc.pl +f22d5fa646b4fc2db008b6b05ec07c8790d3ad5485d2b10218fd11d0e81030ba crypto/modes/asm/ghash-s390x.pl +de97107e0c19ff9dd4069f0761eccb00e0b3ced345e1f119ab3b918dd2f9c5f6 crypto/modes/asm/ghash-sparcv9.pl 26f55a57e77f774d17dfba93d757f78edfa3a03f68a71ffa37ccf3bfc468b1e2 crypto/modes/asm/ghash-x86.pl -72744131007d2389c09665a59a862f5f6bb61b64bd3456e9b400985cb56586b8 crypto/modes/asm/ghash-x86_64.pl -a4e9f2e496bd9362b17a1b5989aa4682647cefcff6117f0607122a9e11a9dfd9 crypto/modes/asm/ghashp8-ppc.pl -69a13f423ca74c22543900c14aef4a848e3bc75504b65d2f51c6903aebcc17a7 crypto/modes/asm/ghashv8-armx.pl +2a0d23a644083e46745c7cb1ca79de393af9336a2e8eab7c85ffeb3b7b1a286f crypto/modes/asm/ghash-x86_64.pl +b407d9fc6ea65fe1a05edc2d139298d78391f3c165314fa6d56dd375b8e453cd crypto/modes/asm/ghashp8-ppc.pl +d8436f6dc43a18d49b1a16999ecb513ccf4483f418f75edc01ce68e777c614a9 crypto/modes/asm/ghashv8-armx.pl 65112dfe63cd59487e7bdb1706b44acfcf48ecede12cc3ae51daa5b661f41f06 crypto/modes/cbc128.c 1611e73dc1e01b5c2201f51756a7405b7673aa0bb872e2957d1ec80c3530486f crypto/modes/ccm128.c d8c2f256532a4b94db6d03aea5cb609cccc938069f644b2fc77c5015648d148d crypto/modes/cfb128.c @@ -249,7 +249,7 @@ af1c034152d82b29cb7c938c8516cfd136b62bac0908c1d40eb50790d23b288c crypto/modes/c bdf25257b15eca206be4d950d2dd807ca5f058f91f54edbd7a0d312ed83eef8e crypto/modes/ofb128.c e55a816c356b2d526bc6e40c8b81afa02576e4d44c7d7b6bbe444fb8b01aad41 crypto/modes/wrap128.c 608a04f387be2a509b4d4ad414b7015ab833e56b85020e692e193160f36883a2 crypto/modes/xts128.c -8aa2504f84a0637b5122f0c963c9d82773ba248bad972ab92be7169995d162b5 crypto/o_str.c +abba788a11469f5c01c766fdac64eccd4fb598b2d4d9a12efb086ae87009acb8 crypto/o_str.c 8ddbbdf43131c10dcd4428aef0eff2b1e98b0410accada0fad41a4925868beef crypto/packet.c c698d5166d091d6bb6e9df3c211fe1cc916fd43a26ec844f28f547cd708f9c55 crypto/param_build.c 2a0f272dd553b698e8c6fa57962694ebd6064cb03fe26a60df529205568d315d crypto/param_build_set.c @@ -257,14 +257,14 @@ c698d5166d091d6bb6e9df3c211fe1cc916fd43a26ec844f28f547cd708f9c55 crypto/param_b 4fda13f6af05d80b0ab89ec4f5813c274a21a9b4565be958a02d006236cef05c crypto/params_dup.c b6cbfc8791b31587f32a3f9e4c117549793528ebddc34a361bad1ad8cf8d4c42 crypto/params_from_text.c 97cb7414dc2f165d5849ee3b46cdfff0afb067729435d9c01a747e0ca41e230c crypto/ppccap.c -3ca43596a7528dec8ff9d1a3cd0d68b62640f84b1d6a8b5e4842cfd0be1133ad crypto/ppccpuid.pl +826a78afb376cbf1e87f12a2a67eef2ee47059a0fd3f9cba7ce7f035e34f8052 crypto/ppccpuid.pl b4d34272a0bd1fbe6562022bf7ea6259b6a5a021a48222d415be47ef5ef2a905 crypto/property/defn_cache.c -3c4ade2fed4605e374d85ec1134a98da34e7124f89f44b81a754e8cfe81f14ba crypto/property/property.c +c3709986fd2ab18f3c6136d8dd7705a4538986aa789ceafe770c3a376db3c569 crypto/property/property.c 66da4f28d408133fb544b14aeb9ad4913e7c5c67e2826e53f0dc5bf4d8fada26 crypto/property/property_local.h -37dba5e1f8a2f8cb8a69e491d52386359c9d08a3c7e43ac1c7a989b72b71593c crypto/property/property_parse.c +b0b382ce829192d2537561cfb0fb5c7afb04305f321f7b3c91441b4ba99b9c92 crypto/property/property_parse.c a7cefda6a117550e2c76e0f307565ce1e11640b11ba10c80e469a837fd1212a3 crypto/property/property_query.c 065698c8d88a5facc0cbc02a3bd0c642c94687a8c5dd79901c942138b406067d crypto/property/property_string.c -0b38639ffc696d6037ace06cc0169bb5c411ee1c6bacc1fa18b3abd82000e69f crypto/provider_core.c +dcc44eba5d01dc248c37ec7b394d48660627c0fa4933d2b93993e1f2ac4b71da crypto/provider_core.c d0af10d4091b2032aac1b7db80f8c2e14fa7176592716b25b9437ab6b53c0a89 crypto/provider_local.h 5ba2e1c74ddcd0453d02e32612299d1eef18eff8493a7606c15d0dc3738ad1d9 crypto/provider_predefined.c a5a4472636b8b0095ad8d4acd37e275ad79da1a67ecff7b7b5c3e46c9ebc65b7 crypto/rand/rand_lib.c @@ -278,7 +278,7 @@ f2222f270e57559537d3da8abbeb1390bc5376b73dae59d536af6e73eb48bba0 crypto/rsa/rsa a65e85be5269d8cb88e86b3413c978fa8994419a671092cbf104ff1a08fda23b crypto/rsa/rsa_local.h cf0b75cd54b61b9b9a290ef18d0ddce9fb26a029a54eb3f720d9b25188440f00 crypto/rsa/rsa_mp_names.c 5c60f6e05db82e13178d805deb1947b8eee4a905e6e77523d3b288da70a46bb5 crypto/rsa/rsa_none.c -33de2accc3af530fd0a4758eb83d5e1d994bf49bac4512b01387dbae656e1a7d crypto/rsa/rsa_oaep.c +f733d03a7f633514bfb33862cd0fa46ac952a86f84000f109c0d37937bac9a1e crypto/rsa/rsa_oaep.c e05fcad237b7e4e7842ad6e142789fe25d060247283c337c78703be6ecc31ed9 crypto/rsa/rsa_ossl.c be3f39c1fcb777d6c0122061f9ef735d10a6bee95d67fcc1ca6ae2a664022d2b crypto/rsa/rsa_pk1.c 174a42e156be48927fe6d6bf0d95575619b8e643a99761275bff933bc3449722 crypto/rsa/rsa_pss.c @@ -288,50 +288,50 @@ f01af62704dbf9457e2669c3e7c1d4d740f0388faa49df93611b987a8aa2bf11 crypto/rsa/rsa 3aba73dacebb046faf8d09dc279149b52c629004b524ec33e6d81c8ad0bc31a8 crypto/rsa/rsa_sp800_56b_gen.c 1c1c2aeeb18bf1d69e8f134315b7e50d8f43d30eb1aa5bf42983eec9136a2fdc crypto/rsa/rsa_x931.c 0acbebed48f6242d595c21e3c1ad69da0daa960d62062e8970209deda144f337 crypto/s390xcap.c -22205848cfb55116ebf999dced8331b575886a609ce29e6886e6267b2310c337 crypto/s390xcpuid.pl +370d98549d4d98e04b60677b319b85904259359bd9401dd5385aa728278e6626 crypto/s390xcpuid.pl 5fa59240ca885cbc0c1cd026934b226d44fc9c3fdf0c2e7e3a7bd7f4963ca2e5 crypto/self_test_core.c -05c533fde7fdba0c76103e97d881b7224c8427451b453e2f6413552996063e31 crypto/sha/asm/keccak1600-armv4.pl -ca3b2b654f9a8c4bc2fa2538c1f19d17acd4a6b9e0df6a4b81df04efa697e67e crypto/sha/asm/keccak1600-armv8.pl -12b7acce2fba0bc0e1ca07842ec84be6a022f141c86e077abb42c864af1d8d9c crypto/sha/asm/keccak1600-avx2.pl -faf0cccb685d5abc807e08db194f847c67b940da2fc3c235c210dc31d73a5334 crypto/sha/asm/keccak1600-avx512.pl -be1e7dd9998e3f31cfa6e1b17bc198aeec584a8b76820e38f71d51b05f8a9f2a crypto/sha/asm/keccak1600-avx512vl.pl -33bdcc6f7668460c3bdf779633e43bfad62b937042a73acb007b462fc5b0a034 crypto/sha/asm/keccak1600-c64x.pl +58a1a8aeb45421954fa0e4bc87157addb96d086ac4e6aade47da96523cecaa74 crypto/sha/asm/keccak1600-armv4.pl +d6df6cfdd4e2fee52dc16fd31c91768c45c48c22700c486406d70ecb37e8a8bb crypto/sha/asm/keccak1600-armv8.pl +81bfb4484d68a3a3e1d704855f76356090867fe10a75db7707b6f7364e8ee8da crypto/sha/asm/keccak1600-avx2.pl +b7bb35d51d439abbf3810454ccb9bfb5a51e2111eaf389fb95796ad6220a61a0 crypto/sha/asm/keccak1600-avx512.pl +37365dcc576f99006132271968bab990e2bebdab7f4168c726bd449a2fa51c6a crypto/sha/asm/keccak1600-avx512vl.pl +2767ae2f379a7a3d0c6dd1471d4d90dd896545b456cb6efd6c230df29e511d70 crypto/sha/asm/keccak1600-c64x.pl 09fc831dd39bd90a701e9b16d9e9987cc215252a22e1e0355f5da6c495fca35a crypto/sha/asm/keccak1600-mmx.pl -ce4a58129e5ee3ac4c9dfec5ecc010440570ebf7bf869e3e9977f2121a64b27a crypto/sha/asm/keccak1600-ppc64.pl -a859fc8cb073b2d0012a93f3155a75fb6eb677441462b0de4f8cf8df1445e970 crypto/sha/asm/keccak1600-s390x.pl -618dcd4891b4064d3b8aa6dcd74bea7ef55f4962a64957b05a05448f6e3e0f17 crypto/sha/asm/keccak1600-x86_64.pl -831b8b02ab25d78ba6300ce960d96c13439bfba5844e13061e19c4e25cbacc3d crypto/sha/asm/keccak1600p8-ppc.pl +485dcc50a51705b86c6dc47e6f58d092fec05dfbfcdf4f2785e4235c67cfe742 crypto/sha/asm/keccak1600-ppc64.pl +49535b60a1a981059a2a9636fdeeab22942d2a15e775b1ec9b5af8937a46aa76 crypto/sha/asm/keccak1600-s390x.pl +093751655b460d33b2fa6aa4d63a86e902f7f20b2d2a02ed948b78e5698c0dd5 crypto/sha/asm/keccak1600-x86_64.pl +e0a4a1df82716053a3f01ec0b096c735a0e3c4f6c9d9ec6b2006b37aaac64448 crypto/sha/asm/keccak1600p8-ppc.pl 75d832db9bf0e98e7a5c522169060a6dd276c5118cfb297fc3f1111f55cd4007 crypto/sha/asm/sha1-586.pl -c96e87d4f5311cd73bbdf499acc03418588be12426d878e157dd67e0099e0219 crypto/sha/asm/sha1-alpha.pl -4ba6d1c7f12fe76bf39babea966f0a4b7f8769e0c0510cbfc2c46a65dd62d45c crypto/sha/asm/sha1-armv4-large.pl -efc69cb0d867b7fac6b3fa8985c343d1f984d552bc8e75bbbbace0adf9ee5f15 crypto/sha/asm/sha1-armv8.pl -11d332b4e058e9fa418d6633316d2e9f9bf520a08b2d933e877bdf38b2edefcf crypto/sha/asm/sha1-c64xplus.pl -32ff0e701a7b8f25bcfe8477b20795de54f536527bd87d3ce694fd9aaae356d4 crypto/sha/asm/sha1-ia64.pl -471c27efca685b2a82ad7fefe329ca54172df9f49b9785da6d706b913b75e693 crypto/sha/asm/sha1-mb-x86_64.pl -0f5c63cf09e950d1b488935ab3b5562e3e9d5cd1a563fb88a41e3dae90a35e6d crypto/sha/asm/sha1-mips.pl -b5ffd7b6dbb04c05de7efa2945adb67ea845e7e61a3bf163a532f7b6acdf4267 crypto/sha/asm/sha1-parisc.pl -482cd23ca6ec38d6f62b90c68f9f20643579c50f2c0fbb0dab1c10a0e35efe77 crypto/sha/asm/sha1-ppc.pl -28cf69efd53d7a5a8c32e0f8db32c193f41b91faf44f5f59944334bc3f5aa337 crypto/sha/asm/sha1-s390x.pl -7fd355b412ddfa1c510e0ba3284f75b1c0d621b6db2ecb1d2a935d5cdb706628 crypto/sha/asm/sha1-sparcv9.pl -24554e68b0e7b7db7b635ff149549015f623ca0bcd9ae90439586a2076f6ae80 crypto/sha/asm/sha1-sparcv9a.pl -74d197cdd72400cabbff7e173f72c8976723081508b095dc995e8cd1abf3daa6 crypto/sha/asm/sha1-thumb.pl -a59a86293e28f5600609dc8af2b39c5285580ae8636520990b000eeeb67bb889 crypto/sha/asm/sha1-x86_64.pl +8d937771993f04407f5fdcca8ca8565f9f8a4d9c9a8f7bfd4e9f9121dd0450bb crypto/sha/asm/sha1-alpha.pl +ab7ecd62896324393b1fd9020515b9c0d2b9cc34d559f2efafa35affc9a1485d crypto/sha/asm/sha1-armv4-large.pl +0acc4e40f793d4d2b960af2baaecc91176ba6742ddd62dca0c33ddc838c58772 crypto/sha/asm/sha1-armv8.pl +c36f51761e7f59bdd0f61230297fb802542ac5d2d1c6d2b1096ed937131bd583 crypto/sha/asm/sha1-c64xplus.pl +4ab7c9153b085274a579b388ddff97a4ac7e11585e01811ca95b93a3ec786605 crypto/sha/asm/sha1-ia64.pl +7a392c5ef7dc19c39d67c7080e0c5214e7a80572c85c022be7e7d4378a5f740d crypto/sha/asm/sha1-mb-x86_64.pl +c0fea5a0d32001263c8bcf7fc0757aa68c6a7377f20fef8d28708e1b81de5dec crypto/sha/asm/sha1-mips.pl +f11b75a54c5f42aa3a052de8091bfba47d7cac01920b2fe0ddcb637d4c9d0eb9 crypto/sha/asm/sha1-parisc.pl +d46ef3fc166271a83144d90985034e2c514bd1020b84ec0fe5427ad593bfeb74 crypto/sha/asm/sha1-ppc.pl +a48c7d9403fe99fbd4daec60e96eb22058da766ab9e606d084a63613962851a2 crypto/sha/asm/sha1-s390x.pl +0e2951e0574c64ee055ffddf16ceefdec00823107d60362976605f139ad8ae68 crypto/sha/asm/sha1-sparcv9.pl +5da48400d4fae85e205e95a2fa368e7bf525e51e274b1dd680dfb48645426c85 crypto/sha/asm/sha1-sparcv9a.pl +04b73c902d36c28b5a7eab47cb85f743eb9c648ed5936f64f655524a1010a1b5 crypto/sha/asm/sha1-thumb.pl +f36d7ec7464c932230585a754b91f13cea4cde5a381fc9f798d959256d07910e crypto/sha/asm/sha1-x86_64.pl c099059ef107f548ea2c2bab64a4eb8c277070ce6d74c4d32bb9808dc19c5fa3 crypto/sha/asm/sha256-586.pl -b9cee5c5a283f61f601d2dba68a7a76e7aba10bfafffc1a5c4987f9c0aa6f87d crypto/sha/asm/sha256-armv4.pl -93ddc97651ee3e779144a3c6b3e46a1bc4aa81e75cd7b9df068a2aef8743d25f crypto/sha/asm/sha256-c64xplus.pl -8be5c5d69733ecb16774aa8410b4bcb3623a9f060d2be103d8aa67bf6e4c5843 crypto/sha/asm/sha256-mb-x86_64.pl +3a8cf38dd398a7ab1d9c6701fa61c428b07c4431a0041ed3a2ddf937897825c1 crypto/sha/asm/sha256-armv4.pl +c394bb5b0ff05595a9e6848b6602a0f29f73a79fc006593740f3ca645ad9d316 crypto/sha/asm/sha256-c64xplus.pl +f33af8e2e2f57b7b63b8c8b35722d7d11ca6ef1f73fb6c4ccebdd3e86912f4b1 crypto/sha/asm/sha256-mb-x86_64.pl dd82e1311703abb019975fc7b61fb87d67e1ed916dddd065aced051e851114b9 crypto/sha/asm/sha512-586.pl -8d84164f3cfd53290c0c14bb5655510b7a9238857866328c0604d64b4e76fe21 crypto/sha/asm/sha512-armv4.pl -dadacb6d66b160913bffb4e1a6c3e5f7be6509b26e2c099701d8d3fdb92c1be0 crypto/sha/asm/sha512-armv8.pl -6f548a088feae3b6faa179653ba449df9d3f5cda1e0561e5b5f120b32274d1eb crypto/sha/asm/sha512-c64xplus.pl -9fa54fbc34fd881f4b344374b9b4f8fb15b641424be7af9a31c71af89ae5d577 crypto/sha/asm/sha512-ia64.pl -fb06844e7c3b014a58dccc8ec6020c71843cfdc5be08288bc7d204f0a840c474 crypto/sha/asm/sha512-mips.pl -11548f06d213947104a80898e000218ec0d6ff3f6913f6582de498476482ce9f crypto/sha/asm/sha512-parisc.pl -7c0c490ce6bb11a228853aecad5e164ce84e5bdabb8a6658ae7184782076c7d3 crypto/sha/asm/sha512-ppc.pl -38e0455fd6a2b93a7a5385379ca92bc6526585ca1eb4af365fac4c78f7285c72 crypto/sha/asm/sha512-s390x.pl -0611845c52091b0208dd41f22ddef9dd1e68d3d92fa4c4360738b840a6314de6 crypto/sha/asm/sha512-sparcv9.pl -f64d16c1e5c3fa4a7969de494a8372127502171a517c14be7a1e3a43a7308699 crypto/sha/asm/sha512-x86_64.pl -8725cabb8d695c576619f19283b034074a3fa0f1c0be952a9dbe9793be15b907 crypto/sha/asm/sha512p8-ppc.pl +1f9ba79b1d591b7aa37b62382422cb025f5b45784d26cc5790c05cf4eb52b792 crypto/sha/asm/sha512-armv4.pl +8136196fce18b736f671a4b4945cd4aa4ab25a28c90c6fc9ab31ff771e8e0d9f crypto/sha/asm/sha512-armv8.pl +5b6796a9978b69fd78ee2ff1adc5cf35d44cad8194a38d1c2aba2023012cf252 crypto/sha/asm/sha512-c64xplus.pl +e8df660671ba61aa2e8f51358baf5d8ca913093e2ee1a40c9cb46d9c2c0851f6 crypto/sha/asm/sha512-ia64.pl +525f253ef8051bfb0e344ac2e40688ce359a42707fe360d23a03f522cc88c81a crypto/sha/asm/sha512-mips.pl +3c3e03529d8514467f8d77c01978348636bb339315feb8041fbde7640565001e crypto/sha/asm/sha512-parisc.pl +952ef1b10e8bbe3f638cc798b91ab9c5b47b66ed8fe94647b1beec9874f2e71e crypto/sha/asm/sha512-ppc.pl +193a0ea240264b29dd68a425f604a6da4b18e28838dcf909dd7e711af880f782 crypto/sha/asm/sha512-s390x.pl +dcb466a1e5938fb64ecb38b0533602192d61334da864ee8dfdcfa12d3cdfa273 crypto/sha/asm/sha512-sparcv9.pl +bb6503967a58b767a3e73441cfabc77f15c8ac747f377e276d4aa63d05f2c3c4 crypto/sha/asm/sha512-x86_64.pl +68d2f3b2dccb978ee42640f4fb4d2eae6b74d071017a3eedd9e7cb77762817dc crypto/sha/asm/sha512p8-ppc.pl 57f6cf54b1b5d2cac7a8f622b7b6bd1878f360fff3fa0f02352061c24162ebbb crypto/sha/keccak1600.c 306cacd3f86e5cacaca74c58ef862516515e5c0cafaff48636d537fd84f1c2fb crypto/sha/sha1dgst.c 4d8cf04f5806611e7586aab47fb28165ec1afb00168e2c9876bb36cb5c29bf8b crypto/sha/sha256.c @@ -346,12 +346,12 @@ c50c584c55e56347bb43aca4b796b5344d70daece3061f586b79c871c21f5d1a crypto/sparse_ a41ae93a755e2ec89b3cb5b4932e2b508fdda92ace2e025a2650a6da0e9e972c crypto/threads_none.c 3729e2bd36f945808b578e0d89fac0fcb3114e4fc9381614bcbd8a9869991716 crypto/threads_pthread.c 88423960f0414f6fd41fba4f4c67f9f7260c2741e4788adcd52493e895ec8027 crypto/threads_win.c -fd6c27cf7c6b5449b17f2b725f4203c4c10207f1973db09fd41571efe5de08fd crypto/x86_64cpuid.pl +af0af59fe2cb8668a96751f343232d7faa3e7a937beb2bda09ed74fe60b9cb5f crypto/x86_64cpuid.pl bbec287bb9bf35379885f8f8998b7fd9e8fc22efee9e1b299109af0f33a7ee16 crypto/x86cpuid.pl -0a9c484f640d96e918921f57f592e82e99ccdbe35d3138d64b10c7af839e9a07 e_os.h -6f353dc7c8c4d8f24f7ffbf920668ccb224ebb5810805a7c80d96770cd858005 include/crypto/aes_platform.h +acbb841170d4d3eb91d969be1c0e4973b1babfd5fcd76440b0628f509f82fd76 e_os.h +249a0e58e9692920eddc1ada2ac772a0cfd749cfbf618f2f5da08280df545d8f include/crypto/aes_platform.h 8c6f308c1ca774e6127e325c3b80511dbcdc99631f032694d8db53a5c02364ee include/crypto/asn1_dsa.h -8ce1b35c6924555ef316c7c51d6c27656869e6da7f513f45b7a7051579e3e54d include/crypto/bn.h +2e8c284672c4e8e395b3da56a3abf3e65bb4346313fb6f7358e925d077a2e1e2 include/crypto/bn.h 1c46818354d42bd1b1c4e5fdae9e019814936e775fd8c918ca49959c2a6416df include/crypto/bn_conf.h.in 7a43a4898fcc8446065e6c99249bcc14e475716e8c1d40d50408c0ab179520e6 include/crypto/bn_dh.h e69b2b20fb415e24b970941c84a62b752b5d0175bc68126e467f7cc970495504 include/crypto/cryptlib.h @@ -373,7 +373,7 @@ f326212c978576c5346c89ae0336c2428594494b54054f6045b1f1038bfbc004 include/crypto 7676b02824b2d68df6bddeb251e9b8a8fa2e35a95dad9a7ebeca53f9ab8d2dad include/crypto/sparse_array.h 7ad02c7de77304c3b298deeb038ab2550cf8b2bce03021994477c6c43dbcf86e include/crypto/types.h 782a83d4e489fd865e2768a20bfa31e78c2071fd0ceeb9eb077276ae2bcc6590 include/internal/bio.h -92aacb3e49288f91b44f97e41933e88fe455706e1dd21a365683c2ab545db131 include/internal/constant_time.h +8e984890c7c62cdd6356963f034831831f7167c65096cb4d23bc765d84d2c598 include/internal/constant_time.h c5bb97f654984130c8b44c09a52395bce0b22985d5dbc9c4d9377d86283f11f8 include/internal/core.h 0b572801dfb8a41cc239e3439f8097a0ad11bbdf5d54811d10ceba3175cf2f17 include/internal/cryptlib.h 9571cfd3d5666749084b354a6d65adee443deeb5713a58c098c7b03bc69dbc63 include/internal/deprecated.h @@ -495,7 +495,7 @@ eec462d685dd3b4764b076a3c18ecd9dd254350a0b78ddc2f8a60587829e1ce3 providers/comm 5b94312727ca33e4f5c038f4caaae8417bf584cfde22df83d91f3c55c30c81ee providers/common/securitycheck.c 527eda471e26763a5fcf123b2d290234d5c836de7b8ef6eef2166ef439919d82 providers/common/securitycheck_fips.c abd5997bc33b681a4ab275978b92aebca0806a4a3f0c2f41dacf11b3b6f4e101 providers/fips/fips_entry.c -0f761a26c8fa6ad8d5a15c817afe1741352b21769b2164a2eb7dd50e1f6fe04f providers/fips/fipsprov.c +4a5ed1059ea6c5ef8d4b2a074b3da332443468852f58c18555f67f5d6d98606a providers/fips/fipsprov.c 5d24ba30f9cc7ca48546fb85dc285bd68590f3a604a0bd471bcb0c2a61169591 providers/fips/self_test.c f822a03138e8b83ccaa910b89d72f31691da6778bf6638181f993ec7ae1167e3 providers/fips/self_test.h d3c95c9c6cc4e3b1a5e4b2bfb2ae735a4109d763bcda7b1e9b8f9eb253f79820 providers/fips/self_test_data.inc @@ -546,8 +546,8 @@ de342d04be6af69037922d5c97bdc40c0c27f6740636e72786a765d0d8ad9173 providers/impl b5f94d597df72ca58486c59b2a70b4057d13f09528f861ed41a84b7125b54a82 providers/implementations/exchange/dh_exch.c 9c46dc0d859875fcc0bc3d61a7b610cd3520b1bf63718775c1124f54a1fe5f24 providers/implementations/exchange/ecdh_exch.c 9bf87b8429398a6465c7e9f749a33b84974303a458736b56f3359b30726d3969 providers/implementations/exchange/ecx_exch.c -0cc02005660c5c340660123decac838c59b7460ef1003d9d50edc604cfd8e375 providers/implementations/exchange/kdf_exch.c -a0d1c1d49557d32497877b2d549d2a7a7729a550306275bfe6ddcefca0d8fc80 providers/implementations/include/prov/ciphercommon.h +4692ea3852bf5763db576359bd793fc1ec3bcd0ca42fc906991d7ec4cced7b2a providers/implementations/exchange/kdf_exch.c +996f1397f61b9eab1e31b5d06bccd9ac958dbd5982fd41fdb263ee889b84275c providers/implementations/include/prov/ciphercommon.h a9f5de1623221f327245957ec1dfd66a1914bff25adf4bcb81213c7955d19382 providers/implementations/include/prov/ciphercommon_aead.h dd07797d61988fd4124cfb920616df672938da80649fac5977bfd061c981edc5 providers/implementations/include/prov/ciphercommon_ccm.h 0c1e99d70155402a790e4de65923228c8df8ad970741caccfe8b513837457d7f providers/implementations/include/prov/ciphercommon_gcm.h @@ -557,7 +557,7 @@ b9a61ce951c1904d8315b1bb26c0ab0aaadb47e71d4ead5df0a891608c728c4b providers/impl c95ce5498e724b9b3d58e3c2f4723e7e3e4beb07f9bea9422e43182cbadb43af providers/implementations/include/prov/macsignature.h 29d1a112b799e1f45fdf8bcee8361c2ed67428c250c1cdf408a9fbb7ebf4cce1 providers/implementations/include/prov/names.h 2187713b446d8b6d24ee986748b941ac3e24292c71e07ff9fb53a33021decdda providers/implementations/include/prov/seeding.h -4e71ffd329f1715d14b54e14036b4b2618deb2fd81675287ce5eeb6c76a31d54 providers/implementations/kdfs/hkdf.c +d376c58489ae36fbece94bb88939845ced04a2a0bdd55d6a3562e45a56577ae1 providers/implementations/kdfs/hkdf.c a62e3af09f5af84dcf36f951ba4ac90ca1694adaf3747126186020b155f94186 providers/implementations/kdfs/kbkdf.c e0644e727aacfea4da3cf2c4d2602d7ef0626ebb760b6467432ffd54d5fbb24d providers/implementations/kdfs/pbkdf2.c c0778565abff112c0c5257329a7750ec4605e62f26cc36851fa1fbee6e03c70c providers/implementations/kdfs/pbkdf2.h @@ -580,11 +580,11 @@ e69aa06f8f3c6f5a26702b9f44a844b8589b99dc0ee590953a29e8b9ef10acbe providers/impl 895c8dc7235b9ad5ff893be0293cbc245a5455e8850195ac7d446646e4ea71d0 providers/implementations/macs/hmac_prov.c 8640b63fd8325aaf8f7128d6cc448d9af448a65bf51a8978075467d33a67944e providers/implementations/macs/kmac_prov.c bf30274dd6b528ae913984775bd8f29c6c48c0ef06d464d0f738217727b7aa5c providers/implementations/rands/crngt.c -9d23df7f99beec7392c9d4ed813407050bc2d150098888fe802e2c9705fc33fa providers/implementations/rands/drbg.c -bb5f8161a80d0d1a7ee919af2b167972b00afd62e326252ca6aa93101f315f19 providers/implementations/rands/drbg_ctr.c -a05adc3f6d9d6f948e5ead75f0522ed3164cb5b2d301169242f3cb97c4a7fac3 providers/implementations/rands/drbg_hash.c -0876dfae991028c569631938946e458e6829cacf4cfb673d2b144ae50a3160bb providers/implementations/rands/drbg_hmac.c -fc43558964bdf12442d3f6ab6cc3e6849f7adb42f4d0123a1279819befcf71cb providers/implementations/rands/drbg_local.h +f9457255fc57ef5739aa2584e535195e38cc947e31fd044d28d64c28c8a946ce providers/implementations/rands/drbg.c +7e8fa6333845778474ed1313a66867512512372c9397f699a8f68fa6d5fc05fa providers/implementations/rands/drbg_ctr.c +8337994f4bc95e421d6d2833bb4481ad9d84deb3913d0faec6e1791ea372a793 providers/implementations/rands/drbg_hash.c +1f040090f596f88cb64d6eb89109a8b75e66caee113708fb59335ad2547027fc providers/implementations/rands/drbg_hmac.c +7a1b8516f891f25f3dc07ffe0455200f20d3a1f0345a917f00c7d9afe900bb0a providers/implementations/rands/drbg_local.h 04339b66c10017229ef368cb48077f58a252ebfda9ab12b9f919e4149b1036ed providers/implementations/rands/test_rng.c cafb9e6f54ad15889fcebddac6df61336bff7d78936f7de3bb5aab8aee5728d2 providers/implementations/signature/dsa_sig.c a30dc6308de0ca33406e7ce909f3bcf7580fb84d863b0976b275839f866258df providers/implementations/signature/ecdsa_sig.c diff --git a/deps/openssl/openssl/providers/fips.checksum b/deps/openssl/openssl/providers/fips.checksum index 8fe82e0257063e..cbd9c09511796a 100644 --- a/deps/openssl/openssl/providers/fips.checksum +++ b/deps/openssl/openssl/providers/fips.checksum @@ -1 +1 @@ -9597c676c418928e2ba5075a6352a7d5b398e64db622b577822391424300ed43 providers/fips-sources.checksums +101807560af8f62c064ad796dfa1e4c269d45aaf5303b47ad0b25fdd6cc92466 providers/fips-sources.checksums diff --git a/deps/openssl/openssl/providers/fips/fipsprov.c b/deps/openssl/openssl/providers/fips/fipsprov.c index 6a88039423d9d8..3889dcd88a71a2 100644 --- a/deps/openssl/openssl/providers/fips/fipsprov.c +++ b/deps/openssl/openssl/providers/fips/fipsprov.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -655,6 +655,8 @@ int OSSL_provider_init_int(const OSSL_CORE_HANDLE *handle, } } + OPENSSL_cpuid_setup(); + /* Create a context. */ if ((*provctx = ossl_prov_ctx_new()) == NULL || (libctx = OSSL_LIB_CTX_new()) == NULL) { diff --git a/deps/openssl/openssl/providers/implementations/encode_decode/decode_der2key.c b/deps/openssl/openssl/providers/implementations/encode_decode/decode_der2key.c index d598f7eba1acc9..f2d9d825564882 100644 --- a/deps/openssl/openssl/providers/implementations/encode_decode/decode_der2key.c +++ b/deps/openssl/openssl/providers/implementations/encode_decode/decode_der2key.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -32,6 +32,7 @@ #include "crypto/ecx.h" #include "crypto/rsa.h" #include "crypto/x509.h" +#include "openssl/obj_mac.h" #include "prov/bio.h" #include "prov/implementations.h" #include "endecoder_local.h" @@ -107,7 +108,10 @@ static void *der2key_decode_p8(const unsigned char **input_der, if ((p8inf = d2i_PKCS8_PRIV_KEY_INFO(NULL, input_der, input_der_len)) != NULL && PKCS8_pkey_get0(NULL, NULL, NULL, &alg, p8inf) - && OBJ_obj2nid(alg->algorithm) == ctx->desc->evp_type) + && (OBJ_obj2nid(alg->algorithm) == ctx->desc->evp_type + /* Allow decoding sm2 private key with id_ecPublicKey */ + || (OBJ_obj2nid(alg->algorithm) == NID_X9_62_id_ecPublicKey + && ctx->desc->evp_type == NID_sm2))) key = key_from_pkcs8(p8inf, PROV_LIBCTX_OF(ctx->provctx), NULL); PKCS8_PRIV_KEY_INFO_free(p8inf); @@ -286,10 +290,19 @@ static int der2key_decode(void *vctx, OSSL_CORE_BIO *cin, int selection, params[0] = OSSL_PARAM_construct_int(OSSL_OBJECT_PARAM_TYPE, &object_type); - params[1] = - OSSL_PARAM_construct_utf8_string(OSSL_OBJECT_PARAM_DATA_TYPE, - (char *)ctx->desc->keytype_name, - 0); + +#ifndef OPENSSL_NO_SM2 + if (strcmp(ctx->desc->keytype_name, "EC") == 0 + && (EC_KEY_get_flags(key) & EC_FLAG_SM2_RANGE) != 0) + params[1] = + OSSL_PARAM_construct_utf8_string(OSSL_OBJECT_PARAM_DATA_TYPE, + "SM2", 0); + else +#endif + params[1] = + OSSL_PARAM_construct_utf8_string(OSSL_OBJECT_PARAM_DATA_TYPE, + (char *)ctx->desc->keytype_name, + 0); /* The address of the key becomes the octet string */ params[2] = OSSL_PARAM_construct_octet_string(OSSL_OBJECT_PARAM_REFERENCE, @@ -409,10 +422,16 @@ static void *ec_d2i_PKCS8(void **key, const unsigned char **der, long der_len, static int ec_check(void *key, struct der2key_ctx_st *ctx) { /* We're trying to be clever by comparing two truths */ - + int ret = 0; int sm2 = (EC_KEY_get_flags(key) & EC_FLAG_SM2_RANGE) != 0; - return sm2 == (ctx->desc->evp_type == EVP_PKEY_SM2); + if (sm2) + ret = ctx->desc->evp_type == EVP_PKEY_SM2 + || ctx->desc->evp_type == NID_X9_62_id_ecPublicKey; + else + ret = ctx->desc->evp_type != EVP_PKEY_SM2; + + return ret; } static void ec_adjust(void *key, struct der2key_ctx_st *ctx) diff --git a/deps/openssl/openssl/providers/implementations/exchange/kdf_exch.c b/deps/openssl/openssl/providers/implementations/exchange/kdf_exch.c index 527a866c3d8dcc..148a3c422a8fc4 100644 --- a/deps/openssl/openssl/providers/implementations/exchange/kdf_exch.c +++ b/deps/openssl/openssl/providers/implementations/exchange/kdf_exch.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -28,9 +28,13 @@ static OSSL_FUNC_keyexch_derive_fn kdf_derive; static OSSL_FUNC_keyexch_freectx_fn kdf_freectx; static OSSL_FUNC_keyexch_dupctx_fn kdf_dupctx; static OSSL_FUNC_keyexch_set_ctx_params_fn kdf_set_ctx_params; +static OSSL_FUNC_keyexch_get_ctx_params_fn kdf_get_ctx_params; static OSSL_FUNC_keyexch_settable_ctx_params_fn kdf_tls1_prf_settable_ctx_params; static OSSL_FUNC_keyexch_settable_ctx_params_fn kdf_hkdf_settable_ctx_params; static OSSL_FUNC_keyexch_settable_ctx_params_fn kdf_scrypt_settable_ctx_params; +static OSSL_FUNC_keyexch_gettable_ctx_params_fn kdf_tls1_prf_gettable_ctx_params; +static OSSL_FUNC_keyexch_gettable_ctx_params_fn kdf_hkdf_gettable_ctx_params; +static OSSL_FUNC_keyexch_gettable_ctx_params_fn kdf_scrypt_gettable_ctx_params; typedef struct { void *provctx; @@ -169,6 +173,13 @@ static int kdf_set_ctx_params(void *vpkdfctx, const OSSL_PARAM params[]) return EVP_KDF_CTX_set_params(pkdfctx->kdfctx, params); } +static int kdf_get_ctx_params(void *vpkdfctx, OSSL_PARAM params[]) +{ + PROV_KDF_CTX *pkdfctx = (PROV_KDF_CTX *)vpkdfctx; + + return EVP_KDF_CTX_get_params(pkdfctx->kdfctx, params); +} + static const OSSL_PARAM *kdf_settable_ctx_params(ossl_unused void *vpkdfctx, void *provctx, const char *kdfname) @@ -197,6 +208,34 @@ KDF_SETTABLE_CTX_PARAMS(tls1_prf, "TLS1-PRF") KDF_SETTABLE_CTX_PARAMS(hkdf, "HKDF") KDF_SETTABLE_CTX_PARAMS(scrypt, "SCRYPT") +static const OSSL_PARAM *kdf_gettable_ctx_params(ossl_unused void *vpkdfctx, + void *provctx, + const char *kdfname) +{ + EVP_KDF *kdf = EVP_KDF_fetch(PROV_LIBCTX_OF(provctx), kdfname, + NULL); + const OSSL_PARAM *params; + + if (kdf == NULL) + return NULL; + + params = EVP_KDF_gettable_ctx_params(kdf); + EVP_KDF_free(kdf); + + return params; +} + +#define KDF_GETTABLE_CTX_PARAMS(funcname, kdfname) \ + static const OSSL_PARAM *kdf_##funcname##_gettable_ctx_params(void *vpkdfctx, \ + void *provctx) \ + { \ + return kdf_gettable_ctx_params(vpkdfctx, provctx, kdfname); \ + } + +KDF_GETTABLE_CTX_PARAMS(tls1_prf, "TLS1-PRF") +KDF_GETTABLE_CTX_PARAMS(hkdf, "HKDF") +KDF_GETTABLE_CTX_PARAMS(scrypt, "SCRYPT") + #define KDF_KEYEXCH_FUNCTIONS(funcname) \ const OSSL_DISPATCH ossl_kdf_##funcname##_keyexch_functions[] = { \ { OSSL_FUNC_KEYEXCH_NEWCTX, (void (*)(void))kdf_##funcname##_newctx }, \ @@ -205,8 +244,11 @@ KDF_SETTABLE_CTX_PARAMS(scrypt, "SCRYPT") { OSSL_FUNC_KEYEXCH_FREECTX, (void (*)(void))kdf_freectx }, \ { OSSL_FUNC_KEYEXCH_DUPCTX, (void (*)(void))kdf_dupctx }, \ { OSSL_FUNC_KEYEXCH_SET_CTX_PARAMS, (void (*)(void))kdf_set_ctx_params }, \ + { OSSL_FUNC_KEYEXCH_GET_CTX_PARAMS, (void (*)(void))kdf_get_ctx_params }, \ { OSSL_FUNC_KEYEXCH_SETTABLE_CTX_PARAMS, \ (void (*)(void))kdf_##funcname##_settable_ctx_params }, \ + { OSSL_FUNC_KEYEXCH_GETTABLE_CTX_PARAMS, \ + (void (*)(void))kdf_##funcname##_gettable_ctx_params }, \ { 0, NULL } \ }; diff --git a/deps/openssl/openssl/providers/implementations/include/prov/ciphercommon.h b/deps/openssl/openssl/providers/implementations/include/prov/ciphercommon.h index 383b759304d45c..aacd49707f84c6 100644 --- a/deps/openssl/openssl/providers/implementations/include/prov/ciphercommon.h +++ b/deps/openssl/openssl/providers/implementations/include/prov/ciphercommon.h @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -42,6 +42,13 @@ typedef int (PROV_CIPHER_HW_FN)(PROV_CIPHER_CTX *dat, unsigned char *out, #define PROV_CIPHER_FLAG_INVERSE_CIPHER 0x0200 struct prov_cipher_ctx_st { + /* place buffer at the beginning for memory alignment */ + /* The original value of the iv */ + unsigned char oiv[GENERIC_BLOCK_SIZE]; + /* Buffer of partial blocks processed via update calls */ + unsigned char buf[GENERIC_BLOCK_SIZE]; + unsigned char iv[GENERIC_BLOCK_SIZE]; + block128_f block; union { cbc128_f cbc; @@ -83,12 +90,6 @@ struct prov_cipher_ctx_st { * manage partial blocks themselves. */ unsigned int num; - - /* The original value of the iv */ - unsigned char oiv[GENERIC_BLOCK_SIZE]; - /* Buffer of partial blocks processed via update calls */ - unsigned char buf[GENERIC_BLOCK_SIZE]; - unsigned char iv[GENERIC_BLOCK_SIZE]; const PROV_CIPHER_HW *hw; /* hardware specific functions */ const void *ks; /* Pointer to algorithm specific key data */ OSSL_LIB_CTX *libctx; diff --git a/deps/openssl/openssl/providers/implementations/kdfs/hkdf.c b/deps/openssl/openssl/providers/implementations/kdfs/hkdf.c index 25819ea2397765..3db8b43891a000 100644 --- a/deps/openssl/openssl/providers/implementations/kdfs/hkdf.c +++ b/deps/openssl/openssl/providers/implementations/kdfs/hkdf.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -340,6 +340,13 @@ static int kdf_hkdf_get_ctx_params(void *vctx, OSSL_PARAM params[]) return 0; return OSSL_PARAM_set_size_t(p, sz); } + if ((p = OSSL_PARAM_locate(params, OSSL_KDF_PARAM_INFO)) != NULL) { + if (ctx->info == NULL || ctx->info_len == 0) { + p->return_size = 0; + return 1; + } + return OSSL_PARAM_set_octet_string(p, ctx->info, ctx->info_len); + } return -2; } @@ -348,6 +355,7 @@ static const OSSL_PARAM *kdf_hkdf_gettable_ctx_params(ossl_unused void *ctx, { static const OSSL_PARAM known_gettable_ctx_params[] = { OSSL_PARAM_size_t(OSSL_KDF_PARAM_SIZE, NULL), + OSSL_PARAM_octet_string(OSSL_KDF_PARAM_INFO, NULL, 0), OSSL_PARAM_END }; return known_gettable_ctx_params; diff --git a/deps/openssl/openssl/providers/implementations/rands/drbg.c b/deps/openssl/openssl/providers/implementations/rands/drbg.c index e30836c53cabbb..9ab18af900bcbc 100644 --- a/deps/openssl/openssl/providers/implementations/rands/drbg.c +++ b/deps/openssl/openssl/providers/implementations/rands/drbg.c @@ -1,5 +1,5 @@ /* - * Copyright 2011-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2011-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -203,6 +203,11 @@ static size_t get_entropy(PROV_DRBG *drbg, unsigned char **pout, int entropy, return ossl_crngt_get_entropy(drbg, pout, entropy, min_len, max_len, prediction_resistance); #else + /* + * In normal use (i.e. OpenSSL's own uses), this is never called. + * Outside of the FIPS provider, OpenSSL sets its DRBGs up so that + * they always have a parent. This remains purely for legacy reasons. + */ return ossl_prov_get_entropy(drbg->provctx, pout, entropy, min_len, max_len); #endif @@ -765,6 +770,7 @@ int ossl_drbg_enable_locking(void *vctx) PROV_DRBG *ossl_rand_drbg_new (void *provctx, void *parent, const OSSL_DISPATCH *p_dispatch, int (*dnew)(PROV_DRBG *ctx), + void (*dfree)(void *vctx), int (*instantiate)(PROV_DRBG *drbg, const unsigned char *entropy, size_t entropylen, const unsigned char *nonce, size_t noncelen, @@ -844,7 +850,7 @@ PROV_DRBG *ossl_rand_drbg_new return drbg; err: - ossl_rand_drbg_free(drbg); + dfree(drbg); return NULL; } diff --git a/deps/openssl/openssl/providers/implementations/rands/drbg_ctr.c b/deps/openssl/openssl/providers/implementations/rands/drbg_ctr.c index 451113c4d16205..21fdce640816dc 100644 --- a/deps/openssl/openssl/providers/implementations/rands/drbg_ctr.c +++ b/deps/openssl/openssl/providers/implementations/rands/drbg_ctr.c @@ -1,5 +1,5 @@ /* - * Copyright 2011-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2011-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -581,7 +581,7 @@ static int drbg_ctr_init(PROV_DRBG *drbg) EVP_CIPHER_CTX_free(ctr->ctx_ecb); EVP_CIPHER_CTX_free(ctr->ctx_ctr); ctr->ctx_ecb = ctr->ctx_ctr = NULL; - return 0; + return 0; } static int drbg_ctr_new(PROV_DRBG *drbg) @@ -602,7 +602,8 @@ static int drbg_ctr_new(PROV_DRBG *drbg) static void *drbg_ctr_new_wrapper(void *provctx, void *parent, const OSSL_DISPATCH *parent_dispatch) { - return ossl_rand_drbg_new(provctx, parent, parent_dispatch, &drbg_ctr_new, + return ossl_rand_drbg_new(provctx, parent, parent_dispatch, + &drbg_ctr_new, &drbg_ctr_free, &drbg_ctr_instantiate, &drbg_ctr_uninstantiate, &drbg_ctr_reseed, &drbg_ctr_generate); } diff --git a/deps/openssl/openssl/providers/implementations/rands/drbg_hash.c b/deps/openssl/openssl/providers/implementations/rands/drbg_hash.c index 6deb0a29256b23..de9375793d5a63 100644 --- a/deps/openssl/openssl/providers/implementations/rands/drbg_hash.c +++ b/deps/openssl/openssl/providers/implementations/rands/drbg_hash.c @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2011-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -410,7 +410,8 @@ static int drbg_hash_new(PROV_DRBG *ctx) static void *drbg_hash_new_wrapper(void *provctx, void *parent, const OSSL_DISPATCH *parent_dispatch) { - return ossl_rand_drbg_new(provctx, parent, parent_dispatch, &drbg_hash_new, + return ossl_rand_drbg_new(provctx, parent, parent_dispatch, + &drbg_hash_new, &drbg_hash_free, &drbg_hash_instantiate, &drbg_hash_uninstantiate, &drbg_hash_reseed, &drbg_hash_generate); } diff --git a/deps/openssl/openssl/providers/implementations/rands/drbg_hmac.c b/deps/openssl/openssl/providers/implementations/rands/drbg_hmac.c index e68465a78cd9c1..4eb78de0653da5 100644 --- a/deps/openssl/openssl/providers/implementations/rands/drbg_hmac.c +++ b/deps/openssl/openssl/providers/implementations/rands/drbg_hmac.c @@ -1,5 +1,5 @@ /* - * Copyright 2011-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2011-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -296,7 +296,8 @@ static int drbg_hmac_new(PROV_DRBG *drbg) static void *drbg_hmac_new_wrapper(void *provctx, void *parent, const OSSL_DISPATCH *parent_dispatch) { - return ossl_rand_drbg_new(provctx, parent, parent_dispatch, &drbg_hmac_new, + return ossl_rand_drbg_new(provctx, parent, parent_dispatch, + &drbg_hmac_new, &drbg_hmac_free, &drbg_hmac_instantiate, &drbg_hmac_uninstantiate, &drbg_hmac_reseed, &drbg_hmac_generate); } diff --git a/deps/openssl/openssl/providers/implementations/rands/drbg_local.h b/deps/openssl/openssl/providers/implementations/rands/drbg_local.h index 8bc5df89c2363b..2f3aacdea8714a 100644 --- a/deps/openssl/openssl/providers/implementations/rands/drbg_local.h +++ b/deps/openssl/openssl/providers/implementations/rands/drbg_local.h @@ -1,5 +1,5 @@ /* - * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -181,6 +181,7 @@ struct prov_drbg_st { PROV_DRBG *ossl_rand_drbg_new (void *provctx, void *parent, const OSSL_DISPATCH *parent_dispatch, int (*dnew)(PROV_DRBG *ctx), + void (*dfree)(void *vctx), int (*instantiate)(PROV_DRBG *drbg, const unsigned char *entropy, size_t entropylen, const unsigned char *nonce, size_t noncelen, diff --git a/deps/openssl/openssl/ssl/bio_ssl.c b/deps/openssl/openssl/ssl/bio_ssl.c index 401178f0c2e48c..be3159b32a9ff0 100644 --- a/deps/openssl/openssl/ssl/bio_ssl.c +++ b/deps/openssl/openssl/ssl/bio_ssl.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -77,7 +77,7 @@ static int ssl_free(BIO *a) return 0; bs = BIO_get_data(a); if (BIO_get_shutdown(a)) { - if (bs->ssl != NULL) + if (bs->ssl != NULL && !SSL_in_init(bs->ssl)) SSL_shutdown(bs->ssl); if (BIO_get_init(a)) SSL_free(bs->ssl); diff --git a/deps/openssl/openssl/ssl/record/rec_layer_s3.c b/deps/openssl/openssl/ssl/record/rec_layer_s3.c index 4bcffcc41e3649..779e998bb6ee06 100644 --- a/deps/openssl/openssl/ssl/record/rec_layer_s3.c +++ b/deps/openssl/openssl/ssl/record/rec_layer_s3.c @@ -81,6 +81,15 @@ int RECORD_LAYER_read_pending(const RECORD_LAYER *rl) return SSL3_BUFFER_get_left(&rl->rbuf) != 0; } +int RECORD_LAYER_data_present(const RECORD_LAYER *rl) +{ + if (rl->rstate == SSL_ST_READ_BODY) + return 1; + if (RECORD_LAYER_processed_read_pending(rl)) + return 1; + return 0; +} + /* Checks if we have decrypted unread record data pending */ int RECORD_LAYER_processed_read_pending(const RECORD_LAYER *rl) { @@ -221,6 +230,12 @@ int ssl3_read_n(SSL *s, size_t n, size_t max, int extend, int clearold, /* ... now we can act as if 'extend' was set */ } + if (!ossl_assert(s->rlayer.packet != NULL)) { + /* does not happen */ + SSLfatal(s, SSL_AD_INTERNAL_ERROR, ERR_R_INTERNAL_ERROR); + return -1; + } + len = s->rlayer.packet_length; pkt = rb->buf + align; /* diff --git a/deps/openssl/openssl/ssl/record/record.h b/deps/openssl/openssl/ssl/record/record.h index 234656bf939421..a2db6aa88e14b0 100644 --- a/deps/openssl/openssl/ssl/record/record.h +++ b/deps/openssl/openssl/ssl/record/record.h @@ -1,5 +1,5 @@ /* - * Copyright 1995-2020 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -205,6 +205,7 @@ void RECORD_LAYER_release(RECORD_LAYER *rl); int RECORD_LAYER_read_pending(const RECORD_LAYER *rl); int RECORD_LAYER_processed_read_pending(const RECORD_LAYER *rl); int RECORD_LAYER_write_pending(const RECORD_LAYER *rl); +int RECORD_LAYER_data_present(const RECORD_LAYER *rl); void RECORD_LAYER_reset_read_sequence(RECORD_LAYER *rl); void RECORD_LAYER_reset_write_sequence(RECORD_LAYER *rl); int RECORD_LAYER_is_sslv2_record(RECORD_LAYER *rl); diff --git a/deps/openssl/openssl/ssl/record/ssl3_buffer.c b/deps/openssl/openssl/ssl/record/ssl3_buffer.c index 97b0c26ced81e9..e769235fe0dea6 100644 --- a/deps/openssl/openssl/ssl/record/ssl3_buffer.c +++ b/deps/openssl/openssl/ssl/record/ssl3_buffer.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -191,5 +191,7 @@ int ssl3_release_read_buffer(SSL *s) OPENSSL_cleanse(b->buf, b->len); OPENSSL_free(b->buf); b->buf = NULL; + s->rlayer.packet = NULL; + s->rlayer.packet_length = 0; return 1; } diff --git a/deps/openssl/openssl/ssl/ssl_err.c b/deps/openssl/openssl/ssl/ssl_err.c index 79c2ed95c1859f..fe0d9c280f7e55 100644 --- a/deps/openssl/openssl/ssl/ssl_err.c +++ b/deps/openssl/openssl/ssl/ssl_err.c @@ -1,6 +1,6 @@ /* * Generated by util/mkerr.pl DO NOT EDIT - * Copyright 1995-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -459,6 +459,8 @@ static const ERR_STRING_DATA SSL_str_reasons[] = { "tlsv1 alert insufficient security"}, {ERR_PACK(ERR_LIB_SSL, 0, SSL_R_TLSV1_ALERT_INTERNAL_ERROR), "tlsv1 alert internal error"}, + {ERR_PACK(ERR_LIB_SSL, 0, SSL_R_TLSV1_ALERT_NO_APPLICATION_PROTOCOL), + "tlsv1 alert no application protocol"}, {ERR_PACK(ERR_LIB_SSL, 0, SSL_R_TLSV1_ALERT_NO_RENEGOTIATION), "tlsv1 alert no renegotiation"}, {ERR_PACK(ERR_LIB_SSL, 0, SSL_R_TLSV1_ALERT_PROTOCOL_VERSION), @@ -467,6 +469,8 @@ static const ERR_STRING_DATA SSL_str_reasons[] = { "tlsv1 alert record overflow"}, {ERR_PACK(ERR_LIB_SSL, 0, SSL_R_TLSV1_ALERT_UNKNOWN_CA), "tlsv1 alert unknown ca"}, + {ERR_PACK(ERR_LIB_SSL, 0, SSL_R_TLSV1_ALERT_UNKNOWN_PSK_IDENTITY), + "tlsv1 alert unknown psk identity"}, {ERR_PACK(ERR_LIB_SSL, 0, SSL_R_TLSV1_ALERT_USER_CANCELLED), "tlsv1 alert user cancelled"}, {ERR_PACK(ERR_LIB_SSL, 0, SSL_R_TLSV1_BAD_CERTIFICATE_HASH_VALUE), diff --git a/deps/openssl/openssl/ssl/ssl_lib.c b/deps/openssl/openssl/ssl/ssl_lib.c index 20ddf8d2fb045a..2619636df88372 100644 --- a/deps/openssl/openssl/ssl/ssl_lib.c +++ b/deps/openssl/openssl/ssl/ssl_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright (c) 2002, Oracle and/or its affiliates. All rights reserved * Copyright 2005 Nokia. All rights reserved. * @@ -3037,37 +3037,54 @@ int SSL_select_next_proto(unsigned char **out, unsigned char *outlen, unsigned int server_len, const unsigned char *client, unsigned int client_len) { - unsigned int i, j; - const unsigned char *result; - int status = OPENSSL_NPN_UNSUPPORTED; + PACKET cpkt, csubpkt, spkt, ssubpkt; + + if (!PACKET_buf_init(&cpkt, client, client_len) + || !PACKET_get_length_prefixed_1(&cpkt, &csubpkt) + || PACKET_remaining(&csubpkt) == 0) { + *out = NULL; + *outlen = 0; + return OPENSSL_NPN_NO_OVERLAP; + } + + /* + * Set the default opportunistic protocol. Will be overwritten if we find + * a match. + */ + *out = (unsigned char *)PACKET_data(&csubpkt); + *outlen = (unsigned char)PACKET_remaining(&csubpkt); /* * For each protocol in server preference order, see if we support it. */ - for (i = 0; i < server_len;) { - for (j = 0; j < client_len;) { - if (server[i] == client[j] && - memcmp(&server[i + 1], &client[j + 1], server[i]) == 0) { - /* We found a match */ - result = &server[i]; - status = OPENSSL_NPN_NEGOTIATED; - goto found; + if (PACKET_buf_init(&spkt, server, server_len)) { + while (PACKET_get_length_prefixed_1(&spkt, &ssubpkt)) { + if (PACKET_remaining(&ssubpkt) == 0) + continue; /* Invalid - ignore it */ + if (PACKET_buf_init(&cpkt, client, client_len)) { + while (PACKET_get_length_prefixed_1(&cpkt, &csubpkt)) { + if (PACKET_equal(&csubpkt, PACKET_data(&ssubpkt), + PACKET_remaining(&ssubpkt))) { + /* We found a match */ + *out = (unsigned char *)PACKET_data(&ssubpkt); + *outlen = (unsigned char)PACKET_remaining(&ssubpkt); + return OPENSSL_NPN_NEGOTIATED; + } + } + /* Ignore spurious trailing bytes in the client list */ + } else { + /* This should never happen */ + return OPENSSL_NPN_NO_OVERLAP; } - j += client[j]; - j++; } - i += server[i]; - i++; + /* Ignore spurious trailing bytes in the server list */ } - /* There's no overlap between our protocols and the server's list. */ - result = client; - status = OPENSSL_NPN_NO_OVERLAP; - - found: - *out = (unsigned char *)result + 1; - *outlen = result[0]; - return status; + /* + * There's no overlap between our protocols and the server's list. We use + * the default opportunistic protocol selected earlier + */ + return OPENSSL_NPN_NO_OVERLAP; } #ifndef OPENSSL_NO_NEXTPROTONEG @@ -3821,9 +3838,10 @@ void ssl_update_cache(SSL *s, int mode) /* * If the session_id_length is 0, we are not supposed to cache it, and it - * would be rather hard to do anyway :-) + * would be rather hard to do anyway :-). Also if the session has already + * been marked as not_resumable we should not cache it for later reuse. */ - if (s->session->session_id_length == 0) + if (s->session->session_id_length == 0 || s->session->not_resumable) return; /* @@ -5596,6 +5614,9 @@ int SSL_free_buffers(SSL *ssl) if (RECORD_LAYER_read_pending(rl) || RECORD_LAYER_write_pending(rl)) return 0; + if (RECORD_LAYER_data_present(rl)) + return 0; + RECORD_LAYER_release(rl); return 1; } diff --git a/deps/openssl/openssl/ssl/ssl_sess.c b/deps/openssl/openssl/ssl/ssl_sess.c index d836b33ed0e81d..ec937a321c3018 100644 --- a/deps/openssl/openssl/ssl/ssl_sess.c +++ b/deps/openssl/openssl/ssl/ssl_sess.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright 2005 Nokia. All rights reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -53,21 +53,36 @@ __owur static int timeoutcmp(SSL_SESSION *a, SSL_SESSION *b) return 0; } +#ifdef __DJGPP__ /* time_t is unsigned on djgpp, it's signed anywhere else */ +# define TMAX(_type_) ((time_t)-1) +#else +# define TMAX(_type_) ((time_t)(((_type_)-1) >> 1)) +#endif + +#define CALCULATE_TIMEOUT(_ss_, _type_) do { \ + _type_ overflow; \ + time_t tmax = TMAX(_type_); \ + overflow = (_type_)tmax - (_type_)(_ss_)->time; \ + if ((_ss_)->timeout > (time_t)overflow) { \ + (_ss_)->timeout_ovf = 1; \ + (_ss_)->calc_timeout = (_ss_)->timeout - (time_t)overflow; \ + } else { \ + (_ss_)->timeout_ovf = 0; \ + (_ss_)->calc_timeout = (_ss_)->time + (_ss_)->timeout; \ + } \ + } while (0) /* * Calculates effective timeout, saving overflow state * Locking must be done by the caller of this function */ void ssl_session_calculate_timeout(SSL_SESSION *ss) { - /* Force positive timeout */ - if (ss->timeout < 0) - ss->timeout = 0; - ss->calc_timeout = ss->time + ss->timeout; - /* - * |timeout| is always zero or positive, so the check for - * overflow only needs to consider if |time| is positive - */ - ss->timeout_ovf = ss->time > 0 && ss->calc_timeout < ss->time; + + if (sizeof(time_t) == 8) + CALCULATE_TIMEOUT(ss, uint64_t); + else + CALCULATE_TIMEOUT(ss, uint32_t); + /* * N.B. Realistic overflow can only occur in our lifetimes on a * 32-bit machine in January 2038. @@ -132,6 +147,7 @@ SSL_SESSION *SSL_SESSION_new(void) return NULL; } + ss->ext.max_fragment_len_mode = TLSEXT_max_fragment_length_UNSPECIFIED; ss->verify_result = 1; /* avoid 0 (= X509_V_OK) just in case */ ss->references = 1; ss->timeout = 60 * 5 + 4; /* 5 minute timeout by default */ @@ -152,16 +168,11 @@ SSL_SESSION *SSL_SESSION_new(void) return ss; } -SSL_SESSION *SSL_SESSION_dup(const SSL_SESSION *src) -{ - return ssl_session_dup(src, 1); -} - /* * Create a new SSL_SESSION and duplicate the contents of |src| into it. If * ticket == 0 then no ticket information is duplicated, otherwise it is. */ -SSL_SESSION *ssl_session_dup(const SSL_SESSION *src, int ticket) +static SSL_SESSION *ssl_session_dup_intern(const SSL_SESSION *src, int ticket) { SSL_SESSION *dest; @@ -285,6 +296,27 @@ SSL_SESSION *ssl_session_dup(const SSL_SESSION *src, int ticket) return NULL; } +SSL_SESSION *SSL_SESSION_dup(const SSL_SESSION *src) +{ + return ssl_session_dup_intern(src, 1); +} + +/* + * Used internally when duplicating a session which might be already shared. + * We will have resumed the original session. Subsequently we might have marked + * it as non-resumable (e.g. in another thread) - but this copy should be ok to + * resume from. + */ +SSL_SESSION *ssl_session_dup(const SSL_SESSION *src, int ticket) +{ + SSL_SESSION *sess = ssl_session_dup_intern(src, ticket); + + if (sess != NULL) + sess->not_resumable = 0; + + return sess; +} + const unsigned char *SSL_SESSION_get_id(const SSL_SESSION *s, unsigned int *len) { if (len) @@ -515,6 +547,12 @@ SSL_SESSION *lookup_sess_in_cache(SSL *s, const unsigned char *sess_id, ret = s->session_ctx->get_session_cb(s, sess_id, sess_id_len, ©); if (ret != NULL) { + if (ret->not_resumable) { + /* If its not resumable then ignore this session */ + if (!copy) + SSL_SESSION_free(ret); + return NULL; + } ssl_tsan_counter(s->session_ctx, &s->session_ctx->stats.sess_cb_hit); diff --git a/deps/openssl/openssl/ssl/statem/extensions.c b/deps/openssl/openssl/ssl/statem/extensions.c index d686bd6dba8a9b..ed78744119e209 100644 --- a/deps/openssl/openssl/ssl/statem/extensions.c +++ b/deps/openssl/openssl/ssl/statem/extensions.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -1712,15 +1712,9 @@ static int final_early_data(SSL *s, unsigned int context, int sent) static int final_maxfragmentlen(SSL *s, unsigned int context, int sent) { - /* - * Session resumption on server-side with MFL extension active - * BUT MFL extension packet was not resent (i.e. sent == 0) - */ - if (s->server && s->hit && USE_MAX_FRAGMENT_LENGTH_EXT(s->session) - && !sent ) { - SSLfatal(s, SSL_AD_MISSING_EXTENSION, SSL_R_BAD_EXTENSION); - return 0; - } + /* MaxFragmentLength defaults to disabled */ + if (s->session->ext.max_fragment_len_mode == TLSEXT_max_fragment_length_UNSPECIFIED) + s->session->ext.max_fragment_len_mode = TLSEXT_max_fragment_length_DISABLED; /* Current SSL buffer is lower than requested MFL */ if (s->session && USE_MAX_FRAGMENT_LENGTH_EXT(s->session) diff --git a/deps/openssl/openssl/ssl/statem/extensions_clnt.c b/deps/openssl/openssl/ssl/statem/extensions_clnt.c index caeb818a37ce4a..3b0781fc71c70e 100644 --- a/deps/openssl/openssl/ssl/statem/extensions_clnt.c +++ b/deps/openssl/openssl/ssl/statem/extensions_clnt.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -1576,7 +1576,8 @@ int tls_parse_stoc_npn(SSL *s, PACKET *pkt, unsigned int context, X509 *x, PACKET_data(pkt), PACKET_remaining(pkt), s->ctx->ext.npn_select_cb_arg) != - SSL_TLSEXT_ERR_OK) { + SSL_TLSEXT_ERR_OK + || selected_len == 0) { SSLfatal(s, SSL_AD_HANDSHAKE_FAILURE, SSL_R_BAD_EXTENSION); return 0; } @@ -1605,6 +1606,8 @@ int tls_parse_stoc_alpn(SSL *s, PACKET *pkt, unsigned int context, X509 *x, size_t chainidx) { size_t len; + PACKET confpkt, protpkt; + int valid = 0; /* We must have requested it. */ if (!s->s3.alpn_sent) { @@ -1623,6 +1626,28 @@ int tls_parse_stoc_alpn(SSL *s, PACKET *pkt, unsigned int context, X509 *x, SSLfatal(s, SSL_AD_DECODE_ERROR, SSL_R_BAD_EXTENSION); return 0; } + + /* It must be a protocol that we sent */ + if (!PACKET_buf_init(&confpkt, s->ext.alpn, s->ext.alpn_len)) { + SSLfatal(s, SSL_AD_INTERNAL_ERROR, ERR_R_INTERNAL_ERROR); + return 0; + } + while (PACKET_get_length_prefixed_1(&confpkt, &protpkt)) { + if (PACKET_remaining(&protpkt) != len) + continue; + if (memcmp(PACKET_data(pkt), PACKET_data(&protpkt), len) == 0) { + /* Valid protocol found */ + valid = 1; + break; + } + } + + if (!valid) { + /* The protocol sent from the server does not match one we advertised */ + SSLfatal(s, SSL_AD_DECODE_ERROR, SSL_R_BAD_EXTENSION); + return 0; + } + OPENSSL_free(s->s3.alpn_selected); s->s3.alpn_selected = OPENSSL_malloc(len); if (s->s3.alpn_selected == NULL) { diff --git a/deps/openssl/openssl/ssl/statem/extensions_srvr.c b/deps/openssl/openssl/ssl/statem/extensions_srvr.c index 0dfbfed9a4af6c..546d11dd1f73af 100644 --- a/deps/openssl/openssl/ssl/statem/extensions_srvr.c +++ b/deps/openssl/openssl/ssl/statem/extensions_srvr.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -181,21 +181,26 @@ int tls_parse_ctos_maxfragmentlen(SSL *s, PACKET *pkt, unsigned int context, } /* - * RFC 6066: The negotiated length applies for the duration of the session + * When doing a full handshake or a renegotiation max_fragment_len_mode will + * be TLSEXT_max_fragment_length_UNSPECIFIED + * + * In case of a resumption max_fragment_len_mode will be one of + * TLSEXT_max_fragment_length_DISABLED, TLSEXT_max_fragment_length_512, + * TLSEXT_max_fragment_length_1024, TLSEXT_max_fragment_length_2048. + * TLSEXT_max_fragment_length_4096 + * + * RFC 6066: The negotiated length applies for the duration of the session * including session resumptions. - * We should receive the same code as in resumed session ! + * + * So we only set the value in case it is unspecified. */ - if (s->hit && s->session->ext.max_fragment_len_mode != value) { - SSLfatal(s, SSL_AD_ILLEGAL_PARAMETER, - SSL_R_SSL3_EXT_INVALID_MAX_FRAGMENT_LENGTH); - return 0; - } + if (s->session->ext.max_fragment_len_mode == TLSEXT_max_fragment_length_UNSPECIFIED) + /* + * Store it in session, so it'll become binding for us + * and we'll include it in a next Server Hello. + */ + s->session->ext.max_fragment_len_mode = value; - /* - * Store it in session, so it'll become binding for us - * and we'll include it in a next Server Hello. - */ - s->session->ext.max_fragment_len_mode = value; return 1; } @@ -1505,9 +1510,10 @@ EXT_RETURN tls_construct_stoc_next_proto_neg(SSL *s, WPACKET *pkt, return EXT_RETURN_FAIL; } s->s3.npn_seen = 1; + return EXT_RETURN_SENT; } - return EXT_RETURN_SENT; + return EXT_RETURN_NOT_SENT; } #endif diff --git a/deps/openssl/openssl/ssl/statem/statem_lib.c b/deps/openssl/openssl/ssl/statem/statem_lib.c index cb31835265ff76..121929b06f4062 100644 --- a/deps/openssl/openssl/ssl/statem/statem_lib.c +++ b/deps/openssl/openssl/ssl/statem/statem_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright (c) 2002, Oracle and/or its affiliates. All rights reserved * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -501,6 +501,10 @@ MSG_PROCESS_RETURN tls_process_cert_verify(SSL *s, PACKET *pkt) SSLfatal(s, SSL_AD_DECODE_ERROR, SSL_R_LENGTH_MISMATCH); goto err; } + if (PACKET_remaining(pkt) != 0) { + SSLfatal(s, SSL_AD_DECODE_ERROR, SSL_R_LENGTH_MISMATCH); + goto err; + } if (!get_cert_verify_tbs_data(s, tls13tbs, &hdata, &hdatalen)) { /* SSLfatal() already called */ diff --git a/deps/openssl/openssl/ssl/statem/statem_srvr.c b/deps/openssl/openssl/ssl/statem/statem_srvr.c index ddd85cc38c4a95..92e4f793ab24e9 100644 --- a/deps/openssl/openssl/ssl/statem/statem_srvr.c +++ b/deps/openssl/openssl/ssl/statem/statem_srvr.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright (c) 2002, Oracle and/or its affiliates. All rights reserved * Copyright 2005 Nokia. All rights reserved. * @@ -2358,9 +2358,8 @@ int tls_construct_server_hello(SSL *s, WPACKET *pkt) * so the following won't overwrite an ID that we're supposed * to send back. */ - if (s->session->not_resumable || - (!(s->ctx->session_cache_mode & SSL_SESS_CACHE_SERVER) - && !s->hit)) + if (!(s->ctx->session_cache_mode & SSL_SESS_CACHE_SERVER) + && !s->hit) s->session->session_id_length = 0; if (usetls13) { @@ -3155,7 +3154,7 @@ static int tls_process_cke_gost(SSL *s, PACKET *pkt) } if (EVP_PKEY_decrypt_init(pkey_ctx) <= 0) { SSLfatal(s, SSL_AD_INTERNAL_ERROR, ERR_R_INTERNAL_ERROR); - return 0; + goto err; } /* * If client certificate is present and is of the same type, maybe diff --git a/deps/openssl/openssl/ssl/t1_lib.c b/deps/openssl/openssl/ssl/t1_lib.c index 8be00a4f340598..bbb3b514d77f14 100644 --- a/deps/openssl/openssl/ssl/t1_lib.c +++ b/deps/openssl/openssl/ssl/t1_lib.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -734,7 +734,8 @@ static int gid_cb(const char *elem, int len, void *arg) return 0; if (garg->gidcnt == garg->gidmax) { uint16_t *tmp = - OPENSSL_realloc(garg->gid_arr, garg->gidmax + GROUPLIST_INCREMENT); + OPENSSL_realloc(garg->gid_arr, + (garg->gidmax + GROUPLIST_INCREMENT) * sizeof(*garg->gid_arr)); if (tmp == NULL) return 0; garg->gidmax += GROUPLIST_INCREMENT; @@ -3400,6 +3401,8 @@ int SSL_set_tlsext_max_fragment_length(SSL *ssl, uint8_t mode) uint8_t SSL_SESSION_get_max_fragment_length(const SSL_SESSION *session) { + if (session->ext.max_fragment_len_mode == TLSEXT_max_fragment_length_UNSPECIFIED) + return TLSEXT_max_fragment_length_DISABLED; return session->ext.max_fragment_len_mode; } diff --git a/deps/openssl/openssl/test/bad_dtls_test.c b/deps/openssl/openssl/test/bad_dtls_test.c index e6ee1ea09f6de9..dc3d4bc0d75001 100644 --- a/deps/openssl/openssl/test/bad_dtls_test.c +++ b/deps/openssl/openssl/test/bad_dtls_test.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -503,7 +503,6 @@ static int test_bad_dtls(void) if (!TEST_ptr(con) || !TEST_true(SSL_set_session(con, sess))) goto end; - SSL_SESSION_free(sess); rbio = BIO_new(BIO_s_mem()); wbio = BIO_new(BIO_s_mem()); @@ -591,6 +590,7 @@ static int test_bad_dtls(void) testresult = 1; end: + SSL_SESSION_free(sess); BIO_free(rbio); BIO_free(wbio); SSL_free(con); diff --git a/deps/openssl/openssl/test/build.info b/deps/openssl/openssl/test/build.info index 416c2270771aa9..76ff945ab8b390 100644 --- a/deps/openssl/openssl/test/build.info +++ b/deps/openssl/openssl/test/build.info @@ -40,7 +40,7 @@ IF[{- !$disabled{tests} -}] exptest pbetest localetest evp_pkey_ctx_new_from_name\ evp_pkey_provided_test evp_test evp_extra_test evp_extra_test2 \ evp_fetch_prov_test evp_libctx_test ossl_store_test \ - v3nametest v3ext punycode_test \ + v3nametest v3ext punycode_test evp_byname_test \ crltest danetest bad_dtls_test lhash_test sparse_array_test \ conf_include_test params_api_test params_conversion_test \ constant_time_test verify_extra_test clienthellotest \ @@ -305,6 +305,10 @@ IF[{- !$disabled{tests} -}] INCLUDE[punycode_test]=../include ../apps/include DEPEND[punycode_test]=../libcrypto.a libtestutil.a + SOURCE[evp_byname_test]=evp_byname_test.c + INCLUDE[evp_byname_test]=../include ../apps/include + DEPEND[evp_byname_test]=../libcrypto libtestutil.a + SOURCE[stack_test]=stack_test.c INCLUDE[stack_test]=../include ../apps/include DEPEND[stack_test]=../libcrypto libtestutil.a @@ -874,6 +878,7 @@ IF[{- !$disabled{tests} -}] ENDIF IF[{- $disabled{module} || !$target{dso_scheme} -}] DEFINE[provider_test]=NO_PROVIDER_MODULE + DEFINE[prov_config_test]=NO_PROVIDER_MODULE DEFINE[provider_internal_test]=NO_PROVIDER_MODULE ENDIF DEPEND[]=provider_internal_test.cnf diff --git a/deps/openssl/openssl/test/cmp_hdr_test.c b/deps/openssl/openssl/test/cmp_hdr_test.c index ed49a0bb619901..e2bd210118d893 100644 --- a/deps/openssl/openssl/test/cmp_hdr_test.c +++ b/deps/openssl/openssl/test/cmp_hdr_test.c @@ -1,5 +1,5 @@ /* - * Copyright 2007-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2007-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright Nokia 2007-2019 * Copyright Siemens AG 2015-2019 * @@ -71,25 +71,30 @@ static int test_HDR_set_get_pvno(void) static int execute_HDR_get0_senderNonce_test(CMP_HDR_TEST_FIXTURE *fixture) { + int res = 0; X509_NAME *sender = X509_NAME_new(); ASN1_OCTET_STRING *sn; if (!TEST_ptr(sender)) - return 0; + goto err; X509_NAME_ADD(sender, "CN", "A common sender name"); if (!TEST_int_eq(OSSL_CMP_CTX_set1_subjectName(fixture->cmp_ctx, sender), 1)) - return 0; + goto err; if (!TEST_int_eq(ossl_cmp_hdr_init(fixture->cmp_ctx, fixture->hdr), 1)) - return 0; + goto err; sn = ossl_cmp_hdr_get0_senderNonce(fixture->hdr); if (!TEST_int_eq(ASN1_OCTET_STRING_cmp(fixture->cmp_ctx->senderNonce, sn), 0)) - return 0; + goto err; + + res = 1; +err: X509_NAME_free(sender); - return 1; + + return res; } static int test_HDR_get0_senderNonce(void) @@ -102,23 +107,28 @@ static int test_HDR_get0_senderNonce(void) static int execute_HDR_set1_sender_test(CMP_HDR_TEST_FIXTURE *fixture) { + int res = 0; X509_NAME *x509name = X509_NAME_new(); if (!TEST_ptr(x509name)) - return 0; + goto err; X509_NAME_ADD(x509name, "CN", "A common sender name"); if (!TEST_int_eq(ossl_cmp_hdr_set1_sender(fixture->hdr, x509name), 1)) - return 0; + goto err; + if (!TEST_int_eq(fixture->hdr->sender->type, GEN_DIRNAME)) - return 0; + goto err; if (!TEST_int_eq(X509_NAME_cmp(fixture->hdr->sender->d.directoryName, x509name), 0)) - return 0; + goto err; + res = 1; +err: X509_NAME_free(x509name); - return 1; + + return res; } static int test_HDR_set1_sender(void) @@ -131,24 +141,28 @@ static int test_HDR_set1_sender(void) static int execute_HDR_set1_recipient_test(CMP_HDR_TEST_FIXTURE *fixture) { + int res = 0; X509_NAME *x509name = X509_NAME_new(); if (!TEST_ptr(x509name)) - return 0; + goto err; X509_NAME_ADD(x509name, "CN", "A common recipient name"); if (!TEST_int_eq(ossl_cmp_hdr_set1_recipient(fixture->hdr, x509name), 1)) - return 0; + goto err; if (!TEST_int_eq(fixture->hdr->recipient->type, GEN_DIRNAME)) - return 0; + goto err; if (!TEST_int_eq(X509_NAME_cmp(fixture->hdr->recipient->d.directoryName, x509name), 0)) - return 0; + goto err; + res = 1; +err: X509_NAME_free(x509name); - return 1; + + return res; } static int test_HDR_set1_recipient(void) @@ -203,7 +217,7 @@ static int execute_HDR_set1_senderKID_test(CMP_HDR_TEST_FIXTURE *fixture) int res = 0; if (!TEST_ptr(senderKID)) - return 0; + goto err; if (!TEST_int_eq(ASN1_OCTET_STRING_set(senderKID, rand_data, sizeof(rand_data)), 1)) @@ -265,7 +279,7 @@ static int execute_HDR_push1_freeText_test(CMP_HDR_TEST_FIXTURE *fixture) int res = 0; if (!TEST_ptr(text)) - return 0; + goto err; if (!ASN1_STRING_set(text, "A free text", -1)) goto err; @@ -280,6 +294,7 @@ static int execute_HDR_push1_freeText_test(CMP_HDR_TEST_FIXTURE *fixture) res = 1; err: ASN1_UTF8STRING_free(text); + return res; } diff --git a/deps/openssl/openssl/test/crltest.c b/deps/openssl/openssl/test/crltest.c index d17fac43f82a31..c96561c69bfa92 100644 --- a/deps/openssl/openssl/test/crltest.c +++ b/deps/openssl/openssl/test/crltest.c @@ -1,5 +1,5 @@ /* - * Copyright 2015-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2015-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -101,6 +101,13 @@ static const char *kRevokedCRL[] = { NULL }; +static const char *kInvalidCRL[] = { + "-----BEGIN X509 CRL-----\n", + "AAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAAA\n", + "-----END X509 CRL-----\n", + NULL +}; + static const char *kBadIssuerCRL[] = { "-----BEGIN X509 CRL-----\n", "MIIBwjCBqwIBATANBgkqhkiG9w0BAQsFADBSMQswCQYDVQQGEwJVUzETMBEGA1UE\n", @@ -371,24 +378,58 @@ static int test_unknown_critical_crl(int n) return r; } -static int test_reuse_crl(void) +static int test_reuse_crl(int idx) { - X509_CRL *reused_crl = CRL_from_strings(kBasicCRL); - char *p; - BIO *b = glue2bio(kRevokedCRL, &p); + X509_CRL *result, *reused_crl = CRL_from_strings(kBasicCRL); + X509_CRL *addref_crl = NULL; + char *p = NULL; + BIO *b = NULL; + int r = 0; - if (b == NULL) { - OPENSSL_free(p); - X509_CRL_free(reused_crl); - return 0; + if (!TEST_ptr(reused_crl)) + goto err; + + if (idx & 1) { + if (!TEST_true(X509_CRL_up_ref(reused_crl))) + goto err; + addref_crl = reused_crl; } - reused_crl = PEM_read_bio_X509_CRL(b, &reused_crl, NULL, NULL); + idx >>= 1; + b = glue2bio(idx == 2 ? kRevokedCRL : kInvalidCRL + idx, &p); + if (!TEST_ptr(b)) + goto err; + + result = PEM_read_bio_X509_CRL(b, &reused_crl, NULL, NULL); + + switch (idx) { + case 0: /* valid PEM + invalid DER */ + if (!TEST_ptr_null(result) + || !TEST_ptr_null(reused_crl)) + goto err; + break; + case 1: /* invalid PEM */ + if (!TEST_ptr_null(result) + || !TEST_ptr(reused_crl)) + goto err; + break; + case 2: + if (!TEST_ptr(result) + || !TEST_ptr(reused_crl) + || !TEST_ptr_eq(result, reused_crl)) + goto err; + break; + } + + r = 1; + + err: OPENSSL_free(p); BIO_free(b); X509_CRL_free(reused_crl); - return 1; + X509_CRL_free(addref_crl); + return r; } int setup_tests(void) @@ -402,7 +443,7 @@ int setup_tests(void) ADD_TEST(test_bad_issuer_crl); ADD_TEST(test_known_critical_crl); ADD_ALL_TESTS(test_unknown_critical_crl, OSSL_NELEM(unknown_critical_crls)); - ADD_TEST(test_reuse_crl); + ADD_ALL_TESTS(test_reuse_crl, 6); return 1; } diff --git a/deps/openssl/openssl/test/ct_test.c b/deps/openssl/openssl/test/ct_test.c index 26d5bc1084503d..ff253414f8063f 100644 --- a/deps/openssl/openssl/test/ct_test.c +++ b/deps/openssl/openssl/test/ct_test.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -450,13 +450,18 @@ static int test_encode_tls_sct(void) fixture->sct_list = sk_SCT_new_null(); if (fixture->sct_list == NULL) - return 0; + { + tear_down(fixture); + return 0; + } if (!TEST_ptr(sct = SCT_new_from_base64(SCT_VERSION_V1, log_id, CT_LOG_ENTRY_TYPE_X509, timestamp, extensions, signature))) - + { + tear_down(fixture); return 0; + } sk_SCT_push(fixture->sct_list, sct); fixture->sct_dir = ct_dir; diff --git a/deps/openssl/openssl/test/dsatest.c b/deps/openssl/openssl/test/dsatest.c index 5fa83020f87a22..b849105d33d8f2 100644 --- a/deps/openssl/openssl/test/dsatest.c +++ b/deps/openssl/openssl/test/dsatest.c @@ -1,5 +1,5 @@ /* - * Copyright 1995-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 1995-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -332,6 +332,7 @@ static int test_dsa_sig_infinite_loop(void) BIGNUM *p = NULL, *q = NULL, *g = NULL, *priv = NULL, *pub = NULL, *priv2 = NULL; BIGNUM *badq = NULL, *badpriv = NULL; const unsigned char msg[] = { 0x00 }; + unsigned int signature_len0; unsigned int signature_len; unsigned char signature[64]; @@ -375,10 +376,13 @@ static int test_dsa_sig_infinite_loop(void) goto err; /* Test passing signature as NULL */ - if (!TEST_true(DSA_sign(0, msg, sizeof(msg), NULL, &signature_len, dsa))) + if (!TEST_true(DSA_sign(0, msg, sizeof(msg), NULL, &signature_len0, dsa)) + || !TEST_int_gt(signature_len0, 0)) goto err; - if (!TEST_true(DSA_sign(0, msg, sizeof(msg), signature, &signature_len, dsa))) + if (!TEST_true(DSA_sign(0, msg, sizeof(msg), signature, &signature_len, dsa)) + || !TEST_int_gt(signature_len, 0) + || !TEST_int_le(signature_len, signature_len0)) goto err; /* Test using a private key of zero fails - this causes an infinite loop without the retry test */ diff --git a/deps/openssl/openssl/test/ecdsatest.c b/deps/openssl/openssl/test/ecdsatest.c index 33a52eb1b5624d..0ddbf6690dcace 100644 --- a/deps/openssl/openssl/test/ecdsatest.c +++ b/deps/openssl/openssl/test/ecdsatest.c @@ -1,5 +1,5 @@ /* - * Copyright 2002-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2002-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright (c) 2002, Oracle and/or its affiliates. All rights reserved * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -350,15 +350,39 @@ static int test_builtin_as_sm2(int n) static int test_ecdsa_sig_NULL(void) { int ret; + unsigned int siglen0; unsigned int siglen; unsigned char dgst[128] = { 0 }; EC_KEY *eckey = NULL; + unsigned char *sig = NULL; + BIGNUM *kinv = NULL, *rp = NULL; ret = TEST_ptr(eckey = EC_KEY_new_by_curve_name(NID_X9_62_prime256v1)) && TEST_int_eq(EC_KEY_generate_key(eckey), 1) - && TEST_int_eq(ECDSA_sign(0, dgst, sizeof(dgst), NULL, &siglen, eckey), 1) - && TEST_int_gt(siglen, 0); + && TEST_int_eq(ECDSA_sign(0, dgst, sizeof(dgst), NULL, &siglen0, + eckey), 1) + && TEST_int_gt(siglen0, 0) + && TEST_ptr(sig = OPENSSL_malloc(siglen0)) + && TEST_int_eq(ECDSA_sign(0, dgst, sizeof(dgst), sig, &siglen, + eckey), 1) + && TEST_int_gt(siglen, 0) + && TEST_int_le(siglen, siglen0) + && TEST_int_eq(ECDSA_verify(0, dgst, sizeof(dgst), sig, siglen, + eckey), 1) + && TEST_int_eq(ECDSA_sign_setup(eckey, NULL, &kinv, &rp), 1) + && TEST_int_eq(ECDSA_sign_ex(0, dgst, sizeof(dgst), NULL, &siglen, + kinv, rp, eckey), 1) + && TEST_int_gt(siglen, 0) + && TEST_int_le(siglen, siglen0) + && TEST_int_eq(ECDSA_sign_ex(0, dgst, sizeof(dgst), sig, &siglen0, + kinv, rp, eckey), 1) + && TEST_int_eq(siglen, siglen0) + && TEST_int_eq(ECDSA_verify(0, dgst, sizeof(dgst), sig, siglen, + eckey), 1); EC_KEY_free(eckey); + OPENSSL_free(sig); + BN_free(kinv); + BN_free(rp); return ret; } diff --git a/deps/openssl/openssl/test/ecstresstest.c b/deps/openssl/openssl/test/ecstresstest.c index 22d46c50da2cc9..237def095c7e24 100644 --- a/deps/openssl/openssl/test/ecstresstest.c +++ b/deps/openssl/openssl/test/ecstresstest.c @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2017-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -79,7 +79,7 @@ static int test_curve(void) || !TEST_ptr(point = EC_POINT_dup(EC_GROUP_get0_generator(group), group)) || !TEST_ptr(result = walk_curve(group, point, num_repeats))) - return 0; + goto err; if (print_mode) { BN_print(bio_out, result); diff --git a/deps/openssl/openssl/test/endecode_test.c b/deps/openssl/openssl/test/endecode_test.c index 5158b39ee41f19..0611d94216f014 100644 --- a/deps/openssl/openssl/test/endecode_test.c +++ b/deps/openssl/openssl/test/endecode_test.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -1028,6 +1028,10 @@ DOMAIN_KEYS(ECExplicitTri2G); IMPLEMENT_TEST_SUITE(ECExplicitTri2G, "EC", 0) IMPLEMENT_TEST_SUITE_LEGACY(ECExplicitTri2G, "EC") # endif +# ifndef OPENSSL_NO_SM2 +KEYS(SM2); +IMPLEMENT_TEST_SUITE(SM2, "SM2", 0) +# endif KEYS(ED25519); IMPLEMENT_TEST_SUITE(ED25519, "ED25519", 1) KEYS(ED448); @@ -1333,9 +1337,7 @@ int setup_tests(void) } /* FIPS(3.0.0): provider imports explicit params but they won't work #17998 */ - is_fips_3_0_0 = fips_provider_version_eq(testctx, 3, 0, 0); - if (is_fips_3_0_0 < 0) - return 0; + is_fips_3_0_0 = is_fips && fips_provider_version_eq(testctx, 3, 0, 0); /* Separate provider/ctx for generating the test data */ if (!TEST_ptr(keyctx = OSSL_LIB_CTX_new())) @@ -1382,6 +1384,9 @@ int setup_tests(void) # ifndef OPENSSL_NO_EC2M MAKE_DOMAIN_KEYS(ECExplicitTriNamedCurve, "EC", ec_explicit_tri_params_nc); MAKE_DOMAIN_KEYS(ECExplicitTri2G, "EC", ec_explicit_tri_params_explicit); +# endif +# ifndef OPENSSL_NO_SM2 + MAKE_KEYS(SM2, "SM2", NULL); # endif MAKE_KEYS(ED25519, "ED25519", NULL); MAKE_KEYS(ED448, "ED448", NULL); @@ -1428,6 +1433,12 @@ int setup_tests(void) ADD_TEST_SUITE_LEGACY(ECExplicitTriNamedCurve); ADD_TEST_SUITE(ECExplicitTri2G); ADD_TEST_SUITE_LEGACY(ECExplicitTri2G); +# endif +# ifndef OPENSSL_NO_SM2 + if (!is_fips_3_0_0) { + /* 3.0.0 FIPS provider imports explicit EC params and then fails. */ + ADD_TEST_SUITE(SM2); + } # endif ADD_TEST_SUITE(ED25519); ADD_TEST_SUITE(ED448); @@ -1485,6 +1496,9 @@ void cleanup_tests(void) # ifndef OPENSSL_NO_EC2M FREE_DOMAIN_KEYS(ECExplicitTriNamedCurve); FREE_DOMAIN_KEYS(ECExplicitTri2G); +# endif +# ifndef OPENSSL_NO_SM2 + FREE_KEYS(SM2); # endif FREE_KEYS(ED25519); FREE_KEYS(ED448); diff --git a/deps/openssl/openssl/test/evp_byname_test.c b/deps/openssl/openssl/test/evp_byname_test.c new file mode 100644 index 00000000000000..e16e27a3a5ec33 --- /dev/null +++ b/deps/openssl/openssl/test/evp_byname_test.c @@ -0,0 +1,40 @@ +/* + * Copyright 2024 The OpenSSL Project Authors. All Rights Reserved. + * + * Licensed under the Apache License 2.0 (the "License"). You may not use + * this file except in compliance with the License. You can obtain a copy + * in the file LICENSE in the source distribution or at + * https://www.openssl.org/source/license.html + */ + +#include +#include +#include + +#include +#include "testutil.h" + +static int test_evp_get_digestbyname(void) +{ + const EVP_MD *md; + + if (!TEST_ptr(md = EVP_get_digestbyname("SHA2-256"))) + return 0; + return 1; +} + +static int test_evp_get_cipherbyname(void) +{ + const EVP_CIPHER *cipher; + + if (!TEST_ptr(cipher = EVP_get_cipherbyname("AES-256-WRAP"))) + return 0; + return 1; +} + +int setup_tests(void) +{ + ADD_TEST(test_evp_get_digestbyname); + ADD_TEST(test_evp_get_cipherbyname); + return 1; +} diff --git a/deps/openssl/openssl/test/evp_extra_test.c b/deps/openssl/openssl/test/evp_extra_test.c index 6b484f8711ce65..c5fbbf8a830921 100644 --- a/deps/openssl/openssl/test/evp_extra_test.c +++ b/deps/openssl/openssl/test/evp_extra_test.c @@ -1100,7 +1100,7 @@ static int test_EC_priv_only_legacy(void) goto err; eckey = NULL; - while (dup_pk == NULL) { + for (;;) { ret = 0; ctx = EVP_MD_CTX_new(); if (!TEST_ptr(ctx)) @@ -1116,6 +1116,9 @@ static int test_EC_priv_only_legacy(void) EVP_MD_CTX_free(ctx); ctx = NULL; + if (dup_pk != NULL) + break; + if (!TEST_ptr(dup_pk = EVP_PKEY_dup(pkey))) goto err; /* EVP_PKEY_eq() returns -2 with missing public keys */ @@ -1125,6 +1128,7 @@ static int test_EC_priv_only_legacy(void) if (!ret) goto err; } + ret = 1; err: EVP_MD_CTX_free(ctx); @@ -2583,6 +2587,47 @@ static int test_emptyikm_HKDF(void) return ret; } +static int test_empty_salt_info_HKDF(void) +{ + EVP_PKEY_CTX *pctx; + unsigned char out[20]; + size_t outlen; + int ret = 0; + unsigned char salt[] = ""; + unsigned char key[] = "012345678901234567890123456789"; + unsigned char info[] = ""; + const unsigned char expected[] = { + 0x67, 0x12, 0xf9, 0x27, 0x8a, 0x8a, 0x3a, 0x8f, 0x7d, 0x2c, 0xa3, 0x6a, + 0xaa, 0xe9, 0xb3, 0xb9, 0x52, 0x5f, 0xe0, 0x06, + }; + size_t expectedlen = sizeof(expected); + + if (!TEST_ptr(pctx = EVP_PKEY_CTX_new_from_name(testctx, "HKDF", testpropq))) + goto done; + + outlen = sizeof(out); + memset(out, 0, outlen); + + if (!TEST_int_gt(EVP_PKEY_derive_init(pctx), 0) + || !TEST_int_gt(EVP_PKEY_CTX_set_hkdf_md(pctx, EVP_sha256()), 0) + || !TEST_int_gt(EVP_PKEY_CTX_set1_hkdf_salt(pctx, salt, + sizeof(salt) - 1), 0) + || !TEST_int_gt(EVP_PKEY_CTX_set1_hkdf_key(pctx, key, + sizeof(key) - 1), 0) + || !TEST_int_gt(EVP_PKEY_CTX_add1_hkdf_info(pctx, info, + sizeof(info) - 1), 0) + || !TEST_int_gt(EVP_PKEY_derive(pctx, out, &outlen), 0) + || !TEST_mem_eq(out, outlen, expected, expectedlen)) + goto done; + + ret = 1; + + done: + EVP_PKEY_CTX_free(pctx); + + return ret; +} + #ifndef OPENSSL_NO_EC static int test_X509_PUBKEY_inplace(void) { @@ -5306,6 +5351,25 @@ static int test_aes_rc4_keylen_change_cve_2023_5363(void) } #endif +static int test_invalid_ctx_for_digest(void) +{ + int ret; + EVP_MD_CTX *mdctx; + + mdctx = EVP_MD_CTX_new(); + if (!TEST_ptr(mdctx)) + return 0; + + if (!TEST_int_eq(EVP_DigestUpdate(mdctx, "test", sizeof("test") - 1), 0)) + ret = 0; + else + ret = 1; + + EVP_MD_CTX_free(mdctx); + + return ret; +} + int setup_tests(void) { OPTION_CHOICE o; @@ -5381,6 +5445,7 @@ int setup_tests(void) #endif ADD_TEST(test_HKDF); ADD_TEST(test_emptyikm_HKDF); + ADD_TEST(test_empty_salt_info_HKDF); #ifndef OPENSSL_NO_EC ADD_TEST(test_X509_PUBKEY_inplace); ADD_TEST(test_X509_PUBKEY_dup); @@ -5468,6 +5533,8 @@ int setup_tests(void) ADD_TEST(test_aes_rc4_keylen_change_cve_2023_5363); #endif + ADD_TEST(test_invalid_ctx_for_digest); + return 1; } diff --git a/deps/openssl/openssl/test/evp_pkey_provided_test.c b/deps/openssl/openssl/test/evp_pkey_provided_test.c index 27f90e42a7c1c3..688a8c1c5e558c 100644 --- a/deps/openssl/openssl/test/evp_pkey_provided_test.c +++ b/deps/openssl/openssl/test/evp_pkey_provided_test.c @@ -389,7 +389,7 @@ static int test_fromdata_rsa(void) fromdata_params), 1)) goto err; - while (dup_pk == NULL) { + for (;;) { ret = 0; if (!TEST_int_eq(EVP_PKEY_get_bits(pk), 32) || !TEST_int_eq(EVP_PKEY_get_security_bits(pk), 8) @@ -417,7 +417,10 @@ static int test_fromdata_rsa(void) ret = test_print_key_using_pem("RSA", pk) && test_print_key_using_encoder("RSA", pk); - if (!ret || !TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) + if (!ret || dup_pk != NULL) + break; + + if (!TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) goto err; ret = ret && TEST_int_eq(EVP_PKEY_eq(pk, dup_pk), 1); EVP_PKEY_free(pk); @@ -602,7 +605,7 @@ static int test_fromdata_dh_named_group(void) &len))) goto err; - while (dup_pk == NULL) { + for (;;) { ret = 0; if (!TEST_int_eq(EVP_PKEY_get_bits(pk), 2048) || !TEST_int_eq(EVP_PKEY_get_security_bits(pk), 112) @@ -682,7 +685,10 @@ static int test_fromdata_dh_named_group(void) ret = test_print_key_using_pem("DH", pk) && test_print_key_using_encoder("DH", pk); - if (!ret || !TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) + if (!ret || dup_pk != NULL) + break; + + if (!TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) goto err; ret = ret && TEST_int_eq(EVP_PKEY_eq(pk, dup_pk), 1); EVP_PKEY_free(pk); @@ -783,7 +789,7 @@ static int test_fromdata_dh_fips186_4(void) fromdata_params), 1)) goto err; - while (dup_pk == NULL) { + for (;;) { ret = 0; if (!TEST_int_eq(EVP_PKEY_get_bits(pk), 2048) || !TEST_int_eq(EVP_PKEY_get_security_bits(pk), 112) @@ -857,7 +863,10 @@ static int test_fromdata_dh_fips186_4(void) ret = test_print_key_using_pem("DH", pk) && test_print_key_using_encoder("DH", pk); - if (!ret || !TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) + if (!ret || dup_pk != NULL) + break; + + if (!TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) goto err; ret = ret && TEST_int_eq(EVP_PKEY_eq(pk, dup_pk), 1); EVP_PKEY_free(pk); @@ -1090,7 +1099,7 @@ static int test_fromdata_ecx(int tst) fromdata_params), 1)) goto err; - while (dup_pk == NULL) { + for (;;) { ret = 0; if (!TEST_int_eq(EVP_PKEY_get_bits(pk), bits) || !TEST_int_eq(EVP_PKEY_get_security_bits(pk), security_bits) @@ -1145,7 +1154,10 @@ static int test_fromdata_ecx(int tst) ret = test_print_key_using_pem(alg, pk) && test_print_key_using_encoder(alg, pk); - if (!ret || !TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) + if (!ret || dup_pk != NULL) + break; + + if (!TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) goto err; ret = ret && TEST_int_eq(EVP_PKEY_eq(pk, dup_pk), 1); EVP_PKEY_free(pk); @@ -1262,7 +1274,7 @@ static int test_fromdata_ec(void) fromdata_params), 1)) goto err; - while (dup_pk == NULL) { + for (;;) { ret = 0; if (!TEST_int_eq(EVP_PKEY_get_bits(pk), 256) || !TEST_int_eq(EVP_PKEY_get_security_bits(pk), 128) @@ -1301,6 +1313,15 @@ static int test_fromdata_ec(void) || !TEST_BN_eq(group_b, b)) goto err; + EC_GROUP_free(group); + group = NULL; + BN_free(group_p); + group_p = NULL; + BN_free(group_a); + group_a = NULL; + BN_free(group_b); + group_b = NULL; + if (!EVP_PKEY_get_utf8_string_param(pk, OSSL_PKEY_PARAM_GROUP_NAME, out_curve_name, sizeof(out_curve_name), @@ -1329,7 +1350,10 @@ static int test_fromdata_ec(void) ret = test_print_key_using_pem(alg, pk) && test_print_key_using_encoder(alg, pk); - if (!ret || !TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) + if (!ret || dup_pk != NULL) + break; + + if (!TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) goto err; ret = ret && TEST_int_eq(EVP_PKEY_eq(pk, dup_pk), 1); EVP_PKEY_free(pk); @@ -1575,7 +1599,7 @@ static int test_fromdata_dsa_fips186_4(void) fromdata_params), 1)) goto err; - while (dup_pk == NULL) { + for (;;) { ret = 0; if (!TEST_int_eq(EVP_PKEY_get_bits(pk), 2048) || !TEST_int_eq(EVP_PKEY_get_security_bits(pk), 112) @@ -1624,12 +1648,12 @@ static int test_fromdata_dsa_fips186_4(void) &pcounter_out)) || !TEST_int_eq(pcounter, pcounter_out)) goto err; - BN_free(p); - p = NULL; - BN_free(q); - q = NULL; - BN_free(g); - g = NULL; + BN_free(p_out); + p_out = NULL; + BN_free(q_out); + q_out = NULL; + BN_free(g_out); + g_out = NULL; BN_free(j_out); j_out = NULL; BN_free(pub_out); @@ -1657,7 +1681,10 @@ static int test_fromdata_dsa_fips186_4(void) ret = test_print_key_using_pem("DSA", pk) && test_print_key_using_encoder("DSA", pk); - if (!ret || !TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) + if (!ret || dup_pk != NULL) + break; + + if (!TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) goto err; ret = ret && TEST_int_eq(EVP_PKEY_eq(pk, dup_pk), 1); EVP_PKEY_free(pk); diff --git a/deps/openssl/openssl/test/evp_test.c b/deps/openssl/openssl/test/evp_test.c index 782841a69258b0..2701040dabe7fc 100644 --- a/deps/openssl/openssl/test/evp_test.c +++ b/deps/openssl/openssl/test/evp_test.c @@ -2773,30 +2773,33 @@ static int kdf_test_ctrl(EVP_TEST *t, EVP_KDF_CTX *kctx, if (!TEST_ptr(name = OPENSSL_strdup(value))) return 0; p = strchr(name, ':'); - if (p != NULL) + if (p == NULL) + p = ""; + else *p++ = '\0'; rv = OSSL_PARAM_allocate_from_text(kdata->p, defs, name, p, - p != NULL ? strlen(p) : 0, NULL); + strlen(p), NULL); *++kdata->p = OSSL_PARAM_construct_end(); if (!rv) { t->err = "KDF_PARAM_ERROR"; OPENSSL_free(name); return 0; } - if (p != NULL && strcmp(name, "digest") == 0) { + if (strcmp(name, "digest") == 0) { if (is_digest_disabled(p)) { TEST_info("skipping, '%s' is disabled", p); t->skip = 1; } } - if (p != NULL - && (strcmp(name, "cipher") == 0 - || strcmp(name, "cekalg") == 0) + + if ((strcmp(name, "cipher") == 0 + || strcmp(name, "cekalg") == 0) && is_cipher_disabled(p)) { TEST_info("skipping, '%s' is disabled", p); t->skip = 1; } + OPENSSL_free(name); return 1; } diff --git a/deps/openssl/openssl/test/helpers/handshake.c b/deps/openssl/openssl/test/helpers/handshake.c index 285391bc03b64f..5744c6a54ee54e 100644 --- a/deps/openssl/openssl/test/helpers/handshake.c +++ b/deps/openssl/openssl/test/helpers/handshake.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -348,6 +348,12 @@ static int parse_protos(const char *protos, unsigned char **out, size_t *outlen) len = strlen(protos); + if (len == 0) { + *out = NULL; + *outlen = 0; + return 1; + } + /* Should never have reuse. */ if (!TEST_ptr_null(*out) /* Test values are small, so we omit length limit checks. */ diff --git a/deps/openssl/openssl/test/helpers/ssltestlib.c b/deps/openssl/openssl/test/helpers/ssltestlib.c index ef4a6177aa7ddc..b0ef7d719c5366 100644 --- a/deps/openssl/openssl/test/helpers/ssltestlib.c +++ b/deps/openssl/openssl/test/helpers/ssltestlib.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -7,8 +7,17 @@ * https://www.openssl.org/source/license.html */ +/* + * We need access to the deprecated low level ENGINE APIs for legacy purposes + * when the deprecated calls are not hidden + */ +#ifndef OPENSSL_NO_DEPRECATED_3_0 +# define OPENSSL_SUPPRESS_DEPRECATED +#endif + #include +#include #include "internal/nelem.h" #include "ssltestlib.h" #include "../testutil.h" @@ -1182,3 +1191,27 @@ void shutdown_ssl_connection(SSL *serverssl, SSL *clientssl) SSL_free(serverssl); SSL_free(clientssl); } + +ENGINE *load_dasync(void) +{ +#if !defined(OPENSSL_NO_TLS1_2) && !defined(OPENSSL_NO_DYNAMIC_ENGINE) + ENGINE *e; + + if (!TEST_ptr(e = ENGINE_by_id("dasync"))) + return NULL; + + if (!TEST_true(ENGINE_init(e))) { + ENGINE_free(e); + return NULL; + } + + if (!TEST_true(ENGINE_register_ciphers(e))) { + ENGINE_free(e); + return NULL; + } + + return e; +#else + return NULL; +#endif +} diff --git a/deps/openssl/openssl/test/helpers/ssltestlib.h b/deps/openssl/openssl/test/helpers/ssltestlib.h index 8e9daa5601d3ea..0fbca34afa7504 100644 --- a/deps/openssl/openssl/test/helpers/ssltestlib.h +++ b/deps/openssl/openssl/test/helpers/ssltestlib.h @@ -1,5 +1,5 @@ /* - * Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -59,4 +59,5 @@ typedef struct mempacket_st MEMPACKET; DEFINE_STACK_OF(MEMPACKET) +ENGINE *load_dasync(void); #endif /* OSSL_TEST_SSLTESTLIB_H */ diff --git a/deps/openssl/openssl/test/hexstr_test.c b/deps/openssl/openssl/test/hexstr_test.c index 5a9684e0e69779..566615ed6d6857 100644 --- a/deps/openssl/openssl/test/hexstr_test.c +++ b/deps/openssl/openssl/test/hexstr_test.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -120,9 +120,14 @@ static int test_hexstr_ex_to_from(int test_index) return TEST_true(OPENSSL_hexstr2buf_ex(buf, sizeof(buf), &len, test->in, ':')) && TEST_mem_eq(buf, len, test->expected, test->expected_len) + && TEST_false(OPENSSL_buf2hexstr_ex(out, 3 * len - 1, NULL, buf, len, + ':')) && TEST_true(OPENSSL_buf2hexstr_ex(out, sizeof(out), NULL, buf, len, - ':')) - && TEST_str_eq(out, test->in); + ':')) + && TEST_str_eq(out, test->in) + && TEST_true(OPENSSL_buf2hexstr_ex(out, sizeof(out), NULL, buf, 0, + ':')) + && TEST_size_t_eq(strlen(out), 0); } int setup_tests(void) diff --git a/deps/openssl/openssl/test/keymgmt_internal_test.c b/deps/openssl/openssl/test/keymgmt_internal_test.c index ce2e458f8c311d..8d5aa22dab3ec8 100644 --- a/deps/openssl/openssl/test/keymgmt_internal_test.c +++ b/deps/openssl/openssl/test/keymgmt_internal_test.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -224,7 +224,7 @@ static int test_pass_rsa(FIXTURE *fixture) || !TEST_ptr_ne(km1, km2)) goto err; - while (dup_pk == NULL) { + for (;;) { ret = 0; km = km3; /* Check that we can't export an RSA key into an RSA-PSS keymanager */ @@ -255,7 +255,11 @@ static int test_pass_rsa(FIXTURE *fixture) } ret = (ret == OSSL_NELEM(expected)); - if (!ret || !TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) + + if (!ret || dup_pk != NULL) + break; + + if (!TEST_ptr(dup_pk = EVP_PKEY_dup(pk))) goto err; ret = TEST_int_eq(EVP_PKEY_eq(pk, dup_pk), 1); diff --git a/deps/openssl/openssl/test/pathed.cnf b/deps/openssl/openssl/test/pathed.cnf new file mode 100644 index 00000000000000..07bdc1fdb209a5 --- /dev/null +++ b/deps/openssl/openssl/test/pathed.cnf @@ -0,0 +1,22 @@ +openssl_conf = openssl_init + +# Comment out the next line to ignore configuration errors +config_diagnostics = 1 + +[openssl_init] +providers = provider_sect + +[provider_sect] +default = default_sect +legacy = legacy_sect +test = test_sect + +[test_sect] +module = ../test/p_test.so +activate = false + +[default_sect] +activate = true + +[legacy_sect] +activate = false diff --git a/deps/openssl/openssl/test/pkey_meth_kdf_test.c b/deps/openssl/openssl/test/pkey_meth_kdf_test.c index f816d24fb56fa5..ad58adf4826143 100644 --- a/deps/openssl/openssl/test/pkey_meth_kdf_test.c +++ b/deps/openssl/openssl/test/pkey_meth_kdf_test.c @@ -1,5 +1,5 @@ /* - * Copyright 2017-2020 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2017-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -16,7 +16,7 @@ #include #include "testutil.h" -static int test_kdf_tls1_prf(void) +static int test_kdf_tls1_prf(int index) { int ret = 0; EVP_PKEY_CTX *pctx; @@ -40,10 +40,23 @@ static int test_kdf_tls1_prf(void) TEST_error("EVP_PKEY_CTX_set1_tls1_prf_secret"); goto err; } - if (EVP_PKEY_CTX_add1_tls1_prf_seed(pctx, - (unsigned char *)"seed", 4) <= 0) { - TEST_error("EVP_PKEY_CTX_add1_tls1_prf_seed"); - goto err; + if (index == 0) { + if (EVP_PKEY_CTX_add1_tls1_prf_seed(pctx, + (unsigned char *)"seed", 4) <= 0) { + TEST_error("EVP_PKEY_CTX_add1_tls1_prf_seed"); + goto err; + } + } else { + if (EVP_PKEY_CTX_add1_tls1_prf_seed(pctx, + (unsigned char *)"se", 2) <= 0) { + TEST_error("EVP_PKEY_CTX_add1_tls1_prf_seed"); + goto err; + } + if (EVP_PKEY_CTX_add1_tls1_prf_seed(pctx, + (unsigned char *)"ed", 2) <= 0) { + TEST_error("EVP_PKEY_CTX_add1_tls1_prf_seed"); + goto err; + } } if (EVP_PKEY_derive(pctx, out, &outlen) <= 0) { TEST_error("EVP_PKEY_derive"); @@ -65,7 +78,7 @@ static int test_kdf_tls1_prf(void) return ret; } -static int test_kdf_hkdf(void) +static int test_kdf_hkdf(int index) { int ret = 0; EVP_PKEY_CTX *pctx; @@ -94,10 +107,23 @@ static int test_kdf_hkdf(void) TEST_error("EVP_PKEY_CTX_set1_hkdf_key"); goto err; } - if (EVP_PKEY_CTX_add1_hkdf_info(pctx, (const unsigned char *)"label", 5) + if (index == 0) { + if (EVP_PKEY_CTX_add1_hkdf_info(pctx, (const unsigned char *)"label", 5) <= 0) { - TEST_error("EVP_PKEY_CTX_set1_hkdf_info"); - goto err; + TEST_error("EVP_PKEY_CTX_add1_hkdf_info"); + goto err; + } + } else { + if (EVP_PKEY_CTX_add1_hkdf_info(pctx, (const unsigned char *)"lab", 3) + <= 0) { + TEST_error("EVP_PKEY_CTX_add1_hkdf_info"); + goto err; + } + if (EVP_PKEY_CTX_add1_hkdf_info(pctx, (const unsigned char *)"el", 2) + <= 0) { + TEST_error("EVP_PKEY_CTX_add1_hkdf_info"); + goto err; + } } if (EVP_PKEY_derive(pctx, out, &outlen) <= 0) { TEST_error("EVP_PKEY_derive"); @@ -195,8 +221,13 @@ static int test_kdf_scrypt(void) int setup_tests(void) { - ADD_TEST(test_kdf_tls1_prf); - ADD_TEST(test_kdf_hkdf); + int tests = 1; + + if (fips_provider_version_ge(NULL, 3, 3, 1)) + tests = 2; + + ADD_ALL_TESTS(test_kdf_tls1_prf, tests); + ADD_ALL_TESTS(test_kdf_hkdf, tests); #ifndef OPENSSL_NO_SCRYPT ADD_TEST(test_kdf_scrypt); #endif diff --git a/deps/openssl/openssl/test/prov_config_test.c b/deps/openssl/openssl/test/prov_config_test.c index b44ec78d8d24b4..2fd913deadf4f8 100644 --- a/deps/openssl/openssl/test/prov_config_test.c +++ b/deps/openssl/openssl/test/prov_config_test.c @@ -1,5 +1,5 @@ /* - * Copyright 2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2021-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -7,12 +7,14 @@ * https://www.openssl.org/source/license.html */ +#include #include #include #include "testutil.h" static char *configfile = NULL; static char *recurseconfigfile = NULL; +static char *pathedconfig = NULL; /* * Test to make sure there are no leaks or failures from loading the config @@ -24,15 +26,13 @@ static int test_double_config(void) int testresult = 0; EVP_MD *sha256 = NULL; - if (!TEST_ptr(configfile)) - return 0; if (!TEST_ptr(ctx)) return 0; if (!TEST_true(OSSL_LIB_CTX_load_config(ctx, configfile))) - return 0; + goto err; if (!TEST_true(OSSL_LIB_CTX_load_config(ctx, configfile))) - return 0; + goto err; /* Check we can actually fetch something */ sha256 = EVP_MD_fetch(ctx, "SHA2-256", NULL); @@ -52,9 +52,6 @@ static int test_recursive_config(void) int testresult = 0; unsigned long err; - if (!TEST_ptr(recurseconfigfile)) - goto err; - if (!TEST_ptr(ctx)) goto err; @@ -70,6 +67,54 @@ static int test_recursive_config(void) return testresult; } +#define P_TEST_PATH "/../test/p_test.so" +static int test_path_config(void) +{ + OSSL_LIB_CTX *ctx = NULL; + OSSL_PROVIDER *prov; + int testresult = 0; + struct stat sbuf; + char *module_path = getenv("OPENSSL_MODULES"); + char *full_path = NULL; + int rc; + + if (!TEST_ptr(module_path)) + return 0; + + full_path = OPENSSL_zalloc(strlen(module_path) + strlen(P_TEST_PATH) + 1); + if (!TEST_ptr(full_path)) + return 0; + + strcpy(full_path, module_path); + full_path = strcat(full_path, P_TEST_PATH); + TEST_info("full path is %s", full_path); + rc = stat(full_path, &sbuf); + OPENSSL_free(full_path); + if (rc == -1) + return TEST_skip("Skipping modulepath test as provider not present"); + + if (!TEST_ptr(pathedconfig)) + return 0; + + ctx = OSSL_LIB_CTX_new(); + if (!TEST_ptr(ctx)) + return 0; + + if (!TEST_true(OSSL_LIB_CTX_load_config(ctx, pathedconfig))) + goto err; + + /* attempt to manually load the test provider */ + if (!TEST_ptr(prov = OSSL_PROVIDER_load(ctx, "test"))) + goto err; + + OSSL_PROVIDER_unload(prov); + + testresult = 1; + err: + OSSL_LIB_CTX_free(ctx); + return testresult; +} + OPT_TEST_DECLARE_USAGE("configfile\n") int setup_tests(void) @@ -85,7 +130,11 @@ int setup_tests(void) if (!TEST_ptr(recurseconfigfile = test_get_argument(1))) return 0; + if (!TEST_ptr(pathedconfig = test_get_argument(2))) + return 0; + ADD_TEST(test_recursive_config); ADD_TEST(test_double_config); + ADD_TEST(test_path_config); return 1; } diff --git a/deps/openssl/openssl/test/provider_fallback_test.c b/deps/openssl/openssl/test/provider_fallback_test.c index 5902f57c85e723..26ba9ea1dcb06b 100644 --- a/deps/openssl/openssl/test/provider_fallback_test.c +++ b/deps/openssl/openssl/test/provider_fallback_test.c @@ -1,5 +1,5 @@ /* - * Copyright 2020-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -39,9 +39,15 @@ static int test_explicit_provider(void) int ok; ok = TEST_ptr(ctx = OSSL_LIB_CTX_new()) - && TEST_ptr(prov = OSSL_PROVIDER_load(ctx, "default")) - && test_provider(ctx) - && TEST_true(OSSL_PROVIDER_unload(prov)); + && TEST_ptr(prov = OSSL_PROVIDER_load(ctx, "default")); + + if (ok) { + ok = test_provider(ctx); + if (ok) + ok = TEST_true(OSSL_PROVIDER_unload(prov)); + else + OSSL_PROVIDER_unload(prov); + } OSSL_LIB_CTX_free(ctx); return ok; diff --git a/deps/openssl/openssl/test/provider_internal_test.c b/deps/openssl/openssl/test/provider_internal_test.c index 1fe8fb0cc5c412..e42af73b1746ba 100644 --- a/deps/openssl/openssl/test/provider_internal_test.c +++ b/deps/openssl/openssl/test/provider_internal_test.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -22,7 +22,7 @@ static OSSL_PARAM greeting_request[] = { static int test_provider(OSSL_PROVIDER *prov, const char *expected_greeting) { - const char *greeting = NULL; + const char *greeting = "no greeting received"; int ret = 0; ret = diff --git a/deps/openssl/openssl/test/provider_test.c b/deps/openssl/openssl/test/provider_test.c index b2e0a5da716f91..762c2ee16eff1c 100644 --- a/deps/openssl/openssl/test/provider_test.c +++ b/deps/openssl/openssl/test/provider_test.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -197,6 +197,7 @@ static int test_builtin_provider_with_child(void) if (!TEST_true(OSSL_PROVIDER_add_builtin(libctx, name, PROVIDER_INIT_FUNCTION_NAME))) { + OSSL_PROVIDER_unload(legacy); OSSL_LIB_CTX_free(libctx); return 0; } diff --git a/deps/openssl/openssl/test/recipes/03-test_fipsinstall.t b/deps/openssl/openssl/test/recipes/03-test_fipsinstall.t index c39b2cee09ecc7..5f514e231b5978 100644 --- a/deps/openssl/openssl/test/recipes/03-test_fipsinstall.t +++ b/deps/openssl/openssl/test/recipes/03-test_fipsinstall.t @@ -1,5 +1,5 @@ #! /usr/bin/env perl -# Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -30,6 +30,9 @@ my $infile = bldtop_file('providers', platform->dso('fips')); my $fipskey = $ENV{FIPSKEY} // config('FIPSKEY') // '00'; my $provconf = srctop_file("test", "fips-and-base.cnf"); +run(test(["fips_version_test", "-config", $provconf, "<3.4.0"]), + capture => 1, statusvar => \my $indicatorpost); + # Read in a text $infile and replace the regular expression in $srch with the # value in $repl and output to a new file $outfile. sub replace_line_file_internal { @@ -182,7 +185,7 @@ ok(!run(app(['openssl', 'fipsinstall', '-out', 'fips.cnf', '-module', $infile, ok(!run(app(['openssl', 'fipsinstall', '-out', 'fips_fail.cnf', '-module', $infile, '-provider_name', 'fips', '-mac_name', 'HMAC', '-macopt', 'digest:SHA256', '-macopt', "hexkey:$fipskey", - '-section_name', 'fips_sect', '-corrupt_desc', 'SHA1'])), + '-section_name', 'fips_sect', '-corrupt_desc', 'SHA2'])), "fipsinstall fails when the digest result is corrupted"); # corrupt another digest @@ -269,24 +272,27 @@ ok(replace_parent_line_file('fips_no_module_mac.cnf', '-config', 'fips_parent_no_module_mac.cnf'])), "verify load config fail no module mac"); -ok(replace_parent_line_file('fips_no_install_mac.cnf', - 'fips_parent_no_install_mac.cnf') - && !run(app(['openssl', 'fipsinstall', - '-config', 'fips_parent_no_install_mac.cnf'])), - "verify load config fail no install mac"); - -ok(replace_parent_line_file('fips_bad_indicator.cnf', - 'fips_parent_bad_indicator.cnf') - && !run(app(['openssl', 'fipsinstall', - '-config', 'fips_parent_bad_indicator.cnf'])), - "verify load config fail bad indicator"); - -ok(replace_parent_line_file('fips_bad_install_mac.cnf', - 'fips_parent_bad_install_mac.cnf') - && !run(app(['openssl', 'fipsinstall', - '-config', 'fips_parent_bad_install_mac.cnf'])), - "verify load config fail bad install mac"); +SKIP: { + skip "Newer FIPS provider version does not support this feature", 3 + if !$indicatorpost; + + ok(replace_parent_line_file('fips_no_install_mac.cnf', + 'fips_parent_no_install_mac.cnf') + && !run(app(['openssl', 'fipsinstall', + '-config', 'fips_parent_no_install_mac.cnf'])), + "verify load config fail no install mac"); + ok(replace_parent_line_file('fips_bad_indicator.cnf', + 'fips_parent_bad_indicator.cnf') + && !run(app(['openssl', 'fipsinstall', + '-config', 'fips_parent_bad_indicator.cnf'])), + "verify load config fail bad indicator"); + ok(replace_parent_line_file('fips_bad_install_mac.cnf', + 'fips_parent_bad_install_mac.cnf') + && !run(app(['openssl', 'fipsinstall', + '-config', 'fips_parent_bad_install_mac.cnf'])), + "verify load config fail bad install mac"); +} ok(replace_parent_line_file('fips_bad_module_mac.cnf', 'fips_parent_bad_module_mac.cnf') diff --git a/deps/openssl/openssl/test/recipes/04-test_conf.t b/deps/openssl/openssl/test/recipes/04-test_conf.t index f987e43c8e94ea..574859e90e1ccf 100644 --- a/deps/openssl/openssl/test/recipes/04-test_conf.t +++ b/deps/openssl/openssl/test/recipes/04-test_conf.t @@ -1,5 +1,5 @@ #! /usr/bin/env perl -# Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2017-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -18,6 +18,7 @@ setup('test_conf'); my %input_result = ( 'dollarid_on.cnf' => 'dollarid_on.txt', 'dollarid_off.cnf' => 'dollarid_off.txt', + 'oversized_line.cnf' => 'oversized_line.txt', ); plan skip_all => 'This is unsupported for cross compiled configurations' diff --git a/deps/openssl/openssl/test/recipes/04-test_conf_data/oversized_line.cnf b/deps/openssl/openssl/test/recipes/04-test_conf_data/oversized_line.cnf new file mode 100644 index 00000000000000..08988a2e0f1d83 --- /dev/null +++ b/deps/openssl/openssl/test/recipes/04-test_conf_data/oversized_line.cnf @@ -0,0 +1,3 @@ +foo = a_line_longer_than_512_characters_\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"_end +bar = a_line_longer_than_512_characters__\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"\"_end +last = x diff --git a/deps/openssl/openssl/test/recipes/04-test_conf_data/oversized_line.txt b/deps/openssl/openssl/test/recipes/04-test_conf_data/oversized_line.txt new file mode 100644 index 00000000000000..c15b654300c765 --- /dev/null +++ b/deps/openssl/openssl/test/recipes/04-test_conf_data/oversized_line.txt @@ -0,0 +1,4 @@ +[ default ] +foo = a_line_longer_than_512_characters_""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""_end +bar = a_line_longer_than_512_characters__""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""""_end +last = x diff --git a/deps/openssl/openssl/test/recipes/15-test_dsaparam_data/invalid/p10240_q256_too_big.pem b/deps/openssl/openssl/test/recipes/15-test_dsaparam_data/invalid/p10240_q256_too_big.pem new file mode 100644 index 00000000000000..e85e2953b7a241 --- /dev/null +++ b/deps/openssl/openssl/test/recipes/15-test_dsaparam_data/invalid/p10240_q256_too_big.pem @@ -0,0 +1,57 @@ +-----BEGIN DSA PARAMETERS----- +MIIKLAKCBQEAym47LzPFZdbz16WvjczLKuzLtsP8yRk/exxL4bBthJhP1qOwctja +p1586SF7gDxCMn7yWVEYdfRbFefGoq0gj1XOE917XqlbnkmZhMgxut2KbNJo/xil +XNFUjGvKs3F413U9rAodC8f07cWHP1iTcWL+vPe6u2yilKWYYfnLWHQH+Z6aPrrF +x/R08LI6DZ6nEsIo+hxaQnEtx+iqNTJC6Q1RIjWDqxQkFVTkJ0Y7miRDXmRdneWk +oLrMZRpaXr5l5tSjEghh1pBgJcdyOv0lh4dlDy/alAiqE2Qlb667yHl6A9dDPlpW +dAntpffy4LwOxfbuEhISvKjjQoBwIvYE4TBPqL0Q6bC6HgQ4+tqd9b44pQjdIQjb +Xcjc6azheITSnPEex3OdKtKoQeRq01qCeLBpMXu1c+CTf4ApKArZvT3vZSg0hM1O +pR71bRZrEEegDj0LH2HCgI5W6H3blOS9A0kUTddCoQXr2lsVdiPtRbPKH1gcd9FQ +P8cGrvbakpTiC0dCczOMDaCteM1QNILlkM7ZoV6VghsKvDnFPxFsiIr5GgjasXP5 +hhbn3g7sDoq1LiTEo+IKQY28pBWx7etSOSRuXW/spnvCkivZla7lSEGljoy9QlQ2 +UZmsEQI9G3YyzgpxHvKZBK1CiZVTywdYKTZ4TYCxvqzhYhjv2bqbpjI12HRFLojB +koyEmMSp53lldCzp158PrIanqSp2rksMR8SmmCL3FwfAp2OjqFMEglG9DT8x0WaN +TLSkjGC6t2csMte7WyU1ekNoFDKfMjDSAz0+xIx21DEmZtYqFOg1DNPK1xYLS0pl +RSMRRkJVN2mk/G7/1oxlB8Wb9wgi3GKUqqCYT11SnBjzq0NdoJ3E4GMedp5Lx3AZ +4mFuRPUd4iV86tE0XDSHSFE7Y3ZkrOjD7Q/26/L53L/UH5z4HW6CHP5os7QERJjg +c1S3x87wXWo9QXbB9b2xmf+c+aWwAAr1cviw38tru58jF3/IGyduj9H8claKQqBG +cIOUF4aNe1hK2K3ArAOApUxr4KE+tCvrltRfiTmVFip0g9Jt1CPY3Zu7Bd4Z2ZkE +DtSztpwa49HrWF5E9xpquvBL2U8jQ68E7Xd8Wp4orI/TIChriamBmdkgRz3H2LvN +Ozb6+hsnEGrz3sp2RVAToSqA9ysa6nHZdfufPNtMEbQdO/k1ehmGRb0ljBRsO6b2 +rsG2eYuC8tg8eCrIkua0TGRI7g6a4K32AJdzaX6NsISaaIW+OYJuoDSscvD3oOg8 +PPEhU+zM7xJskTA+jxvPlikKx8V7MNHOCQECldJlUBwzJvqp40JvwfnDsF+8VYwd +UaiieR3pzMzyTjpReXRmZbnRPusRcsVzxb2OhB79wmuy4UPjjQBX+7eD0rs8xxvW +5a5q1Cjq4AvbwmmcA/wDrHDOjcbD/zodad2O1QtBWa/R4xyWea4zKsflgACE1zY9 +wW2br7+YQFekcrXkkkEzgxd6zxv8KVEDpXRZjmAM1cI5LvkoN64To4GedN8Qe/G7 +R9SZh9gnS17PTP64hK+aYqhFafMdu87q/+qLfxaSux727qE5hiW01u4nnWhACf9s +xuOozowKqxZxkolMIyZv6Lddwy1Zv5qjCyd0DvM/1skpXWkb9kfabYC+OhjsjVhs +0Ktfs6a5B3eixiw5x94hhIcTEcS4hmvhGUL72FiTca6ZeSERTKmNBy8CIQC9/ZUN +uU/V5JTcnYyUGHzm7+XcZBjyGBagBj9rCmW3SQKCBQAJ/k9rb39f1cO+/3XDEMjy +9bIEXSuS48g5RAc1UGd5nrrBQwuDxGWFyz0yvAY7LgyidZuJS21+MAp9EY7AOMmx +TDttifNaBJYt4GZ8of166PcqTKkHQwq5uBpxeSDv/ZE8YbYfaCtLTcUC8KlO+l36 +gjJHSkdkflSsGy1yObSNDQDfVAAwQs//TjDMnuEtvlNXZllsTvFFBceXVETn10K2 +ZMmdSIJNfLnjReUKEN6PfeGqv7F4xoyGwUybEfRE4u5RmXrqCODaIjY3SNMrOq8B +R3Ata/cCozsM1jIdIW2z+OybDJH+BYsYm2nkSZQjZS6javTYClLrntEKG/hAQwL8 +F16YLOQXpHhgiAaWnTZzANtLppB2+5qCVy5ElzKongOwT8JTjTFXOaRnqe/ngm9W +SSbrxfDaoWUOyK9XD8Cydzpv3n4Y8nWNGayi7/yAFCU36Ri040ufgv/TZLuKacnl ++3ga3ZUpRlSigzx0kb1+KjTSWeQ8vE/psdWjvBukVEbzdUauMLyRLo/6znSVvvPX +UGhviThE5uhrsUg+wEPFINriSHfF7JDKVhDcJnLBdaXvfN52pkF/naLBF5Rt3Gvq +fjCxjx0Sy9Lag1hDN4dor7dzuO7wmwOS01DJW1PtNLuuH0Bbqh1kYSaQkmyXBZWX +qo8K3nkoDM0niOtJJubOhTNrGmSaZpNXkK3Mcy9rBbdvEs5O0Jmqaax/eOdU0Yot +B3lX+3ddOseT2ZEFjzObqTtkWuFBeBxuYNcRTsu3qMdIBsEb8URQdsTtjoIja2fK +hreVgjK36GW70KXEl8V/vq5qjQulmqkBEjmilcDuiREKqQuyeagUOnhQaBplqVco +4xznh5DMBMRbpGb5lHxKv4cPNi+uNAJ5i98zWUM1JRt6aXnRCuWcll1z8fRZ+5kD +vK9FaZU3VRMK/eknEG49cGr8OuJ6ZRSaC+tKwV1y+amkSZpKPWnk2bUnQI3ApJv3 +k1e1EToeECpMUkLMDgNbpKBoz4nqMEvAAlYgw9xKNbLlQlahqTVEAmaJHh4yDMDy +i7IZ9Wrn47IGoR7s3cvhDHUpRPeW4nsmgzj+tf5EAxemI61STZJTTWo0iaPGJxct +9nhOOhw1I38Mvm4vkAbFH7YJ0B6QrjjYL2MbOTp5JiIh4vdOeWwNo9/y4ffyaN5+ +ADpxuuIAmcbdr6GPOhkOFFixRJa0B2eP1i032HESlLs8RB9oYtdTXdXQotnIgJGd +Y8tSKOa1zjzeLHn3AVpRZTUW++/BxmApV3GKIeG8fsUjg/df0QRrBcdC/1uccdaG +KKlAOwlywVn5jUlwHkTmDiTM9w5AqVVGHZ2b+4ZgQW8jnPKN0SrKf6U555D+zp7E +x4uXoE8ojN9y8m8UKf0cTLnujH2XgZorjPfuMOt5VZEhQFMS2QaljSeni5CJJ8gk +XtztNqfBlAtWR4V5iAHeQOfIB2YaOy8GESda89tyKraKeaez41VblpTVHTeq9IIF +YB4cQA2PfuNaGVRGLMAgT3Dvl+mxxxeJyxnGAiUcETU/jJJt9QombiuszBlYGQ5d +ELOSm/eQSRARV9zNSt5jaQlMSjMBqenIEM09BzYqa7jDwqoztFxNdO8bcuQPuKwa +4z3bBZ1yYm63WFdNbQqqGEwc0OYmqg1raJ0zltgHyjFyw8IGu4g/wETs+nVQcH7D +vKuje86bePD6kD/LH3wmkA== +-----END DSA PARAMETERS----- diff --git a/deps/openssl/openssl/test/recipes/25-test_eai_data.t b/deps/openssl/openssl/test/recipes/25-test_eai_data.t index 522982ddfb8025..63548d060293ba 100644 --- a/deps/openssl/openssl/test/recipes/25-test_eai_data.t +++ b/deps/openssl/openssl/test/recipes/25-test_eai_data.t @@ -1,5 +1,5 @@ #! /usr/bin/env perl -# Copyright 2019-2021 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -21,16 +21,18 @@ setup("test_eai_data"); #./util/wrap.pl apps/openssl verify -nameopt utf8 -no_check_time -CAfile test/recipes/25-test_eai_data/utf8_chain.pem test/recipes/25-test_eai_data/ascii_leaf.pem #./util/wrap.pl apps/openssl verify -nameopt utf8 -no_check_time -CAfile test/recipes/25-test_eai_data/ascii_chain.pem test/recipes/25-test_eai_data/utf8_leaf.pem -plan tests => 12; +plan tests => 16; require_ok(srctop_file('test','recipes','tconversion.pl')); my $folder = "test/recipes/25-test_eai_data"; my $ascii_pem = srctop_file($folder, "ascii_leaf.pem"); my $utf8_pem = srctop_file($folder, "utf8_leaf.pem"); +my $kdc_pem = srctop_file($folder, "kdc-cert.pem"); my $ascii_chain_pem = srctop_file($folder, "ascii_chain.pem"); my $utf8_chain_pem = srctop_file($folder, "utf8_chain.pem"); +my $kdc_chain_pem = srctop_file($folder, "kdc-root-cert.pem"); my $out; my $outcnt = 0; @@ -56,10 +58,18 @@ SKIP: { ok(run(app(["openssl", "verify", "-nameopt", "utf8", "-no_check_time", "-CAfile", $ascii_chain_pem, $ascii_pem]))); ok(run(app(["openssl", "verify", "-nameopt", "utf8", "-no_check_time", "-CAfile", $utf8_chain_pem, $utf8_pem]))); +ok(run(app(["openssl", "verify", "-nameopt", "utf8", "-no_check_time", "-CAfile", $kdc_chain_pem, $kdc_pem]))); ok(!run(app(["openssl", "verify", "-nameopt", "utf8", "-no_check_time", "-CAfile", $ascii_chain_pem, $utf8_pem]))); ok(!run(app(["openssl", "verify", "-nameopt", "utf8", "-no_check_time", "-CAfile", $utf8_chain_pem, $ascii_pem]))); +# Check an otherName does not get misparsed as an DNS name, (should trigger ASAN errors if violated). +ok(run(app(["openssl", "verify", "-nameopt", "utf8", "-no_check_time", "-verify_hostname", 'mx1.example.com', "-CAfile", $kdc_chain_pem, $kdc_pem]))); +# Check an otherName does not get misparsed as an email address, (should trigger ASAN errors if violated). +ok(run(app(["openssl", "verify", "-nameopt", "utf8", "-no_check_time", "-verify_email", 'joe@example.com', "-CAfile", $kdc_chain_pem, $kdc_pem]))); +# We expect SmtpUTF8Mailbox to be a UTF8 String, not an IA5String. +ok(!run(app(["openssl", "verify", "-nameopt", "utf8", "-no_check_time", "-verify_email", 'moe@example.com', "-CAfile", $kdc_chain_pem, $kdc_pem]))); + #Check that we get the expected failure return code with({ exit_checker => sub { return shift == 2; } }, sub { diff --git a/deps/openssl/openssl/test/recipes/25-test_eai_data/kdc-cert.pem b/deps/openssl/openssl/test/recipes/25-test_eai_data/kdc-cert.pem new file mode 100644 index 00000000000000..e8a2c6f55d4598 --- /dev/null +++ b/deps/openssl/openssl/test/recipes/25-test_eai_data/kdc-cert.pem @@ -0,0 +1,21 @@ +-----BEGIN CERTIFICATE----- +MIIDbDCCAlSgAwIBAgIBAjANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDARSb290 +MCAXDTI0MDYyMDA2MTQxNVoYDzIxMjQwNjIwMDYxNDE1WjAXMRUwEwYDVQQDDAxU +RVNULkVYQU1QTEUwggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAwggEKAoIBAQC6wfP+ +6go79dkpo/dGLMlPZ7Gw/Q6gUYrCWZWUEgEeRVHCrqOlgUEyA+PcWas/XDPUxXry +BQlJHLvlqamAQn8gs4QPBARFYWKNiTVGyaRkgNA1N5gqyZdrP9UE+ZJmdqxRAAe8 +vvpGZWSgevPhLUiSCFYDiD0Rtji2Hm3rGUrReQFBQDEw2pNGwz9zIaxUs08kQZcx +Yzyiplz5Oau+R/6sAgUwDlrD9xOlUxx/tA/MSDIfkK8qioU11uUZtO5VjkNQy/bT +7zQMmXxWgm2MIgOs1u4YN7YGOtgqHE9v9iPHHfgrkbQDtVDGQsa8AQEhkUDSCtW9 +3VFAKx6dGNXYzFwfAgMBAAGjgcgwgcUwHQYDVR0OBBYEFFR5tZycW19DmtbL4Zqj +te1c2vZLMAkGA1UdIwQCMAAwCQYDVR0TBAIwADCBjQYDVR0RBIGFMIGCoD8GBisG +AQUCAqA1MDOgDhsMVEVTVC5FWEFNUExFoSEwH6ADAgEBoRgwFhsGa3JidGd0GwxU +RVNULkVYQU1QTEWgHQYIKwYBBQUHCAmgERYPbW9lQGV4YW1wbGUuY29tgQ9qb2VA +ZXhhbXBsZS5jb22CD214MS5leGFtcGxlLmNvbTANBgkqhkiG9w0BAQsFAAOCAQEA +T0xzVtVpRtaOzIhgzw7XQUdzWD5UEGSJJ1cBCOmKUWwDLTAouCYLFB4TbEE7MMUb +iuMy60bjmVtvfJIXorGUgSadRe5RWJ5DamJWvPA0Q9x7blnEcXqEF+9Td+ypevgU +UYHFmg83OYwxOsFXZ5cRuXMk3WCsDHQIBi6D1L6oDDZ2pfArs5mqm3thQKVlqyl1 +El3XRYEdqAz/5eCOFNfwxF0ALxjxVr/Z50StUZU8I7Zfev6+kHhyrR7dqzYJImv9 +0fTCOBEMjIETDsrA70OxAMu4V16nrWZdJdvzblS2qrt97Omkj+2kiPAJFB76RpwI +oDQ9fKfUOAmUFth2/R/eGA== +-----END CERTIFICATE----- diff --git a/deps/openssl/openssl/test/recipes/25-test_eai_data/kdc-root-cert.pem b/deps/openssl/openssl/test/recipes/25-test_eai_data/kdc-root-cert.pem new file mode 100644 index 00000000000000..a74c96bf31469f --- /dev/null +++ b/deps/openssl/openssl/test/recipes/25-test_eai_data/kdc-root-cert.pem @@ -0,0 +1,16 @@ +-----BEGIN CERTIFICATE----- +MIICnDCCAYQCCQCBswYcrlZSHjANBgkqhkiG9w0BAQsFADAPMQ0wCwYDVQQDDARS +b290MCAXDTI0MDYyMDA2MTQxNVoYDzIxMjQwNjIwMDYxNDE1WjAPMQ0wCwYDVQQD +DARSb290MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAqRj8S4kBbIUj +61kZfi6nE35Q38U140+qt4uAiwAhKumfVHlBM0zQ98WFt5zMHIBQwIb3yjc2zj+0 +qzUnQfwm1r/RfcMmBPEti9Ge+aEMSsds2gMXziOFM8wd2aAFPy7UVE0XpEWofsRK +MGi61MKVdPSbGIxBwY9VW38/7D/wf1HtJe7y0xpuecR7GB2XAs+qST59NjuF+7wS +dLM8Hb3TATgeYbXXWsRJgwz+SPzExg5WmLnU+7y4brZ32dHtdSmkRVSgSlaIf7Xj +3Tc6Zi7I+W/JYk7hy1zUexVdWCak4PHcoWrXe0gNNN/t8VfLfMExt5z/HIylXnU7 +pGUyqZlTGQIDAQABMA0GCSqGSIb3DQEBCwUAA4IBAQAHpLF1UCRy7b6Hk0rLokxI +lgwiH9BU9mktigAGASvkbllpt+YbUbWnuYAvpHBGiP1qZtfX2r96UrSJaGO9BEzT +Gp9ThnSjoj4Srul0+s/NArU22irFLmDzbalgevAmm9gMGkdqkiIm/mXbwrPj0ncl +KGicevXryVpvaP62eZ8cc3C4p97frMmXxRX8sTdQpD/gRI7prdEILRSKveqT+AEW +7rFGM5AOevb4U8ddop8A3D/kX0wcCAIBF6jCNk3uEJ57jVcagL04kPnVfdRiedTS +vfq1DRNcD29d1H/9u0fHdSn1/+8Ep3X+afQ3C6//5NvOEaXcIGO4QSwkprQydfv8 +-----END CERTIFICATE----- diff --git a/deps/openssl/openssl/test/recipes/25-test_eai_data/kdc.sh b/deps/openssl/openssl/test/recipes/25-test_eai_data/kdc.sh new file mode 100755 index 00000000000000..7a8dbc719fb71e --- /dev/null +++ b/deps/openssl/openssl/test/recipes/25-test_eai_data/kdc.sh @@ -0,0 +1,41 @@ +#! /usr/bin/env bash + +# Create a root CA, signing a leaf cert with a KDC principal otherName SAN, and +# also a non-UTF8 smtpUtf8Mailbox SAN followed by an rfc822Name SAN and a DNS +# name SAN. In the vulnerable EAI code, the KDC principal `otherName` should +# trigger ASAN errors in DNS name checks, while the non-UTF8 `smtpUtf8Mailbox` +# should likewise lead to ASAN issues with email name checks. + +rm -f root-key.pem root-cert.pem +openssl req -nodes -new -newkey rsa:2048 -keyout kdc-root-key.pem \ + -x509 -subj /CN=Root -days 36524 -out kdc-root-cert.pem + +exts=$( + printf "%s\n%s\n%s\n%s = " \ + "subjectKeyIdentifier = hash" \ + "authorityKeyIdentifier = keyid" \ + "basicConstraints = CA:false" \ + "subjectAltName" + printf "%s, " "otherName:1.3.6.1.5.2.2;SEQUENCE:kdc_princ_name" + printf "%s, " "otherName:1.3.6.1.5.5.7.8.9;IA5:moe@example.com" + printf "%s, " "email:joe@example.com" + printf "%s\n" "DNS:mx1.example.com" + printf "[kdc_princ_name]\n" + printf "realm = EXP:0, GeneralString:TEST.EXAMPLE\n" + printf "principal_name = EXP:1, SEQUENCE:kdc_principal_seq\n" + printf "[kdc_principal_seq]\n" + printf "name_type = EXP:0, INTEGER:1\n" + printf "name_string = EXP:1, SEQUENCE:kdc_principal_components\n" + printf "[kdc_principal_components]\n" + printf "princ1 = GeneralString:krbtgt\n" + printf "princ2 = GeneralString:TEST.EXAMPLE\n" + ) + +printf "%s\n" "$exts" + +openssl req -nodes -new -newkey rsa:2048 -keyout kdc-key.pem \ + -subj "/CN=TEST.EXAMPLE" | + openssl x509 -req -out kdc-cert.pem \ + -CA "kdc-root-cert.pem" -CAkey "kdc-root-key.pem" \ + -set_serial 2 -days 36524 \ + -extfile <(printf "%s\n" "$exts") diff --git a/deps/openssl/openssl/test/recipes/25-test_req.t b/deps/openssl/openssl/test/recipes/25-test_req.t index fe02d29c634f2f..932635f4b2c182 100644 --- a/deps/openssl/openssl/test/recipes/25-test_req.t +++ b/deps/openssl/openssl/test/recipes/25-test_req.t @@ -15,7 +15,7 @@ use OpenSSL::Test qw/:DEFAULT srctop_file/; setup("test_req"); -plan tests => 49; +plan tests => 50; require_ok(srctop_file('test', 'recipes', 'tconversion.pl')); @@ -53,6 +53,7 @@ ok(!run(app([@addext_args, "-addext", $val, "-addext", $val2]))); ok(!run(app([@addext_args, "-addext", $val, "-addext", $val3]))); ok(!run(app([@addext_args, "-addext", $val2, "-addext", $val3]))); ok(run(app([@addext_args, "-addext", "SXNetID=1:one, 2:two, 3:three"]))); +ok(run(app([@addext_args, "-addext", "subjectAltName=dirName:dirname_sec"]))); # If a CSR is provided with neither of -key or -CA/-CAkey, this should fail. ok(!run(app(["openssl", "req", "-x509", diff --git a/deps/openssl/openssl/test/recipes/30-test_evp_byname.t b/deps/openssl/openssl/test/recipes/30-test_evp_byname.t new file mode 100644 index 00000000000000..d06e874fe927b3 --- /dev/null +++ b/deps/openssl/openssl/test/recipes/30-test_evp_byname.t @@ -0,0 +1,16 @@ +#! /usr/bin/env perl +# Copyright 2024 The OpenSSL Project Authors. All Rights Reserved. +# +# Licensed under the Apache License 2.0 (the "License"). You may not use +# this file except in compliance with the License. You can obtain a copy +# in the file LICENSE in the source distribution or at +# https://www.openssl.org/source/license.html + +use strict; +use OpenSSL::Test; +use OpenSSL::Test::Simple; +use OpenSSL::Test::Utils; + +setup("test_evp_byname"); + +simple_test("test_evp_byname", "evp_byname_test"); diff --git a/deps/openssl/openssl/test/recipes/30-test_evp_data/evppkey_dsa.txt b/deps/openssl/openssl/test/recipes/30-test_evp_data/evppkey_dsa.txt index debd62bca84cbc..39f9a01343bf74 100644 --- a/deps/openssl/openssl/test/recipes/30-test_evp_data/evppkey_dsa.txt +++ b/deps/openssl/openssl/test/recipes/30-test_evp_data/evppkey_dsa.txt @@ -1,5 +1,5 @@ # -# Copyright 2001-2023 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2001-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -270,6 +270,7 @@ Title = FIPS Tests (using different key sizes and digests) # Test sign with a 2048 bit key with N == 160 is not allowed in fips mode Availablein = fips +FIPSversion = <3.4.0 DigestSign = SHA256 Key = DSA-2048-160 Input = "Hello" @@ -324,6 +325,7 @@ Title = Fips Negative Tests (using different key sizes and digests) # Test sign with a 1024 bit key is not allowed in fips mode Availablein = fips +FIPSversion = <3.4.0 DigestSign = SHA256 Securitycheck = 1 Key = DSA-1024-FIPS186-2 @@ -340,6 +342,7 @@ Result = DIGESTSIGNINIT_ERROR # Test sign with a 3072 bit key with N == 224 is not allowed in fips mode Availablein = fips +FIPSversion = <3.4.0 DigestSign = SHA256 Securitycheck = 1 Key = DSA-3072-224 @@ -348,6 +351,7 @@ Result = DIGESTSIGNINIT_ERROR # Test sign with a 4096 bit key is not allowed in fips mode Availablein = fips +FIPSversion = <3.4.0 DigestSign = SHA256 Securitycheck = 1 Key = DSA-4096-256 diff --git a/deps/openssl/openssl/test/recipes/30-test_evp_data/evppkey_ecdsa.txt b/deps/openssl/openssl/test/recipes/30-test_evp_data/evppkey_ecdsa.txt index ec3c032aba8d8f..1f9ce93cd1661f 100644 --- a/deps/openssl/openssl/test/recipes/30-test_evp_data/evppkey_ecdsa.txt +++ b/deps/openssl/openssl/test/recipes/30-test_evp_data/evppkey_ecdsa.txt @@ -1,5 +1,5 @@ # -# Copyright 2001-2022 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2001-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -216,6 +216,7 @@ Result = DIGESTSIGNINIT_ERROR # Test that SHA1 is not allowed in fips mode for signing Availablein = fips +FIPSversion = <3.4.0 Sign = P-256 Securitycheck = 1 Ctrl = digest:SHA1 diff --git a/deps/openssl/openssl/test/recipes/30-test_evp_data/evppkey_rsa_common.txt b/deps/openssl/openssl/test/recipes/30-test_evp_data/evppkey_rsa_common.txt index 24ec6a4f770521..5f3b396a675309 100644 --- a/deps/openssl/openssl/test/recipes/30-test_evp_data/evppkey_rsa_common.txt +++ b/deps/openssl/openssl/test/recipes/30-test_evp_data/evppkey_rsa_common.txt @@ -1,5 +1,5 @@ # -# Copyright 2001-2021 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2001-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -1344,6 +1344,7 @@ Output = 80382819f51b197c42f9fc02a85198683d918059afc013ae155992442563dd289700829 # Signing with SHA1 is not allowed in fips mode Availablein = fips +FIPSversion = <3.4.0 DigestSign = SHA1 Securitycheck = 1 Key = RSA-2048 diff --git a/deps/openssl/openssl/test/recipes/30-test_prov_config.t b/deps/openssl/openssl/test/recipes/30-test_prov_config.t index 7f6350fd84e116..1ef8736209c6f7 100644 --- a/deps/openssl/openssl/test/recipes/30-test_prov_config.t +++ b/deps/openssl/openssl/test/recipes/30-test_prov_config.t @@ -1,5 +1,5 @@ #! /usr/bin/env perl -# Copyright 2021 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2021-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -23,13 +23,15 @@ my $no_fips = disabled('fips') || ($ENV{NO_FIPS} // 0); plan tests => 2; ok(run(test(["prov_config_test", srctop_file("test", "default.cnf"), - srctop_file("test", "recursive.cnf")])), + srctop_file("test", "recursive.cnf"), + srctop_file("test", "pathed.cnf")])), "running prov_config_test default.cnf"); SKIP: { skip "Skipping FIPS test in this build", 1 if $no_fips; ok(run(test(["prov_config_test", srctop_file("test", "fips.cnf"), - srctop_file("test", "recursive.cnf")])), + srctop_file("test", "recursive.cnf"), + srctop_file("test", "pathed.cnf")])), "running prov_config_test fips.cnf"); } diff --git a/deps/openssl/openssl/test/recipes/70-test_npn.t b/deps/openssl/openssl/test/recipes/70-test_npn.t new file mode 100644 index 00000000000000..f82e71af6aca14 --- /dev/null +++ b/deps/openssl/openssl/test/recipes/70-test_npn.t @@ -0,0 +1,73 @@ +#! /usr/bin/env perl +# Copyright 2024 The OpenSSL Project Authors. All Rights Reserved. +# +# Licensed under the Apache License 2.0 (the "License"). You may not use +# this file except in compliance with the License. You can obtain a copy +# in the file LICENSE in the source distribution or at +# https://www.openssl.org/source/license.html + +use strict; +use OpenSSL::Test qw/:DEFAULT cmdstr srctop_file/; +use OpenSSL::Test::Utils; + +use TLSProxy::Proxy; + +my $test_name = "test_npn"; +setup($test_name); + +plan skip_all => "TLSProxy isn't usable on $^O" + if $^O =~ /^(VMS)$/; + +plan skip_all => "$test_name needs the dynamic engine feature enabled" + if disabled("engine") || disabled("dynamic-engine"); + +plan skip_all => "$test_name needs the sock feature enabled" + if disabled("sock"); + +plan skip_all => "$test_name needs NPN enabled" + if disabled("nextprotoneg"); + +plan skip_all => "$test_name needs TLSv1.2 enabled" + if disabled("tls1_2"); + +my $proxy = TLSProxy::Proxy->new( + undef, + cmdstr(app(["openssl"]), display => 1), + srctop_file("apps", "server.pem"), + (!$ENV{HARNESS_ACTIVE} || $ENV{HARNESS_VERBOSE}) +); + +$proxy->start() or plan skip_all => "Unable to start up Proxy for tests"; +plan tests => 1; + +my $npnseen = 0; + +# Test 1: Check sending an empty NextProto message from the client works. This is +# valid as per the spec, but OpenSSL does not allow you to send it. +# Therefore we must be prepared to receive such a message but we cannot +# generate it except via TLSProxy +$proxy->clear(); +$proxy->filter(\&npn_filter); +$proxy->clientflags("-nextprotoneg foo -no_tls1_3"); +$proxy->serverflags("-nextprotoneg foo"); +$proxy->start(); +ok($npnseen && TLSProxy::Message->success(), "Empty NPN message"); + +sub npn_filter +{ + my $proxy = shift; + my $message; + + # The NextProto message always appears in flight 2 + return if $proxy->flight != 2; + + foreach my $message (@{$proxy->message_list}) { + if ($message->mt == TLSProxy::Message::MT_NEXT_PROTO) { + # Our TLSproxy NextProto message support doesn't support parsing of + # the message. If we repack it just creates an empty NextProto + # message - which is exactly the scenario we want to test here. + $message->repack(); + $npnseen = 1; + } + } +} diff --git a/deps/openssl/openssl/test/recipes/80-test_pkcs12.t b/deps/openssl/openssl/test/recipes/80-test_pkcs12.t index 4c5bb5744b8c59..de26cbdca4dc71 100644 --- a/deps/openssl/openssl/test/recipes/80-test_pkcs12.t +++ b/deps/openssl/openssl/test/recipes/80-test_pkcs12.t @@ -54,7 +54,7 @@ if (eval { require Win32::API; 1; }) { } $ENV{OPENSSL_WIN32_UTF8}=1; -plan tests => 17; +plan tests => 20; # Test different PKCS#12 formats ok(run(test(["pkcs12_format_test"])), "test pkcs12 formats"); @@ -162,11 +162,23 @@ with({ exit_checker => sub { return shift == 1; } }, "-nomacver"])), "test bad pkcs12 file 1 (nomacver)"); + ok(run(app(["openssl", "pkcs12", "-in", $bad1, "-password", "pass:", + "-info"])), + "test bad pkcs12 file 1 (info)"); + ok(run(app(["openssl", "pkcs12", "-in", $bad2, "-password", "pass:"])), "test bad pkcs12 file 2"); + ok(run(app(["openssl", "pkcs12", "-in", $bad2, "-password", "pass:", + "-info"])), + "test bad pkcs12 file 2 (info)"); + ok(run(app(["openssl", "pkcs12", "-in", $bad3, "-password", "pass:"])), "test bad pkcs12 file 3"); + + ok(run(app(["openssl", "pkcs12", "-in", $bad3, "-password", "pass:", + "-info"])), + "test bad pkcs12 file 3 (info)"); }); SetConsoleOutputCP($savedcp) if (defined($savedcp)); diff --git a/deps/openssl/openssl/test/recipes/90-test_shlibload.t b/deps/openssl/openssl/test/recipes/90-test_shlibload.t index 8f691dee38e823..67afff607e0456 100644 --- a/deps/openssl/openssl/test/recipes/90-test_shlibload.t +++ b/deps/openssl/openssl/test/recipes/90-test_shlibload.t @@ -1,5 +1,5 @@ #! /usr/bin/env perl -# Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -23,6 +23,7 @@ plan skip_all => "Test is disabled on AIX" if config('target') =~ m|^aix|; plan skip_all => "Test is disabled on NonStop" if config('target') =~ m|^nonstop|; plan skip_all => "Test only supported in a dso build" if disabled("dso"); plan skip_all => "Test is disabled in an address sanitizer build" unless disabled("asan"); +plan skip_all => "Test is disabled in no-atexit build" if disabled("atexit"); plan tests => 10; diff --git a/deps/openssl/openssl/test/sm2_internal_test.c b/deps/openssl/openssl/test/sm2_internal_test.c index 4899d5e21313c1..bd0bf0efa74d5b 100644 --- a/deps/openssl/openssl/test/sm2_internal_test.c +++ b/deps/openssl/openssl/test/sm2_internal_test.c @@ -1,5 +1,5 @@ /* - * Copyright 2017-2021 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2017-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -305,7 +305,8 @@ static int test_sm2_sign(const EC_GROUP *group, const char *message, const char *k_hex, const char *r_hex, - const char *s_hex) + const char *s_hex, + int omit_pubkey) { const size_t msg_len = strlen(message); int ok = 0; @@ -327,11 +328,13 @@ static int test_sm2_sign(const EC_GROUP *group, || !TEST_true(EC_KEY_set_private_key(key, priv))) goto done; - pt = EC_POINT_new(group); - if (!TEST_ptr(pt) - || !TEST_true(EC_POINT_mul(group, pt, priv, NULL, NULL, NULL)) - || !TEST_true(EC_KEY_set_public_key(key, pt))) - goto done; + if (omit_pubkey == 0) { + pt = EC_POINT_new(group); + if (!TEST_ptr(pt) + || !TEST_true(EC_POINT_mul(group, pt, priv, NULL, NULL, NULL)) + || !TEST_true(EC_KEY_set_public_key(key, pt))) + goto done; + } start_fake_rand(k_hex); sig = ossl_sm2_do_sign(key, EVP_sm3(), (const uint8_t *)userid, @@ -392,7 +395,25 @@ static int sm2_sig_test(void) "006CB28D99385C175C94F94E934817663FC176D925DD72B727260DBAAE1FB2F96F" "007c47811054c6f99613a578eb8453706ccb96384fe7df5c171671e760bfa8be3a", "40F1EC59F793D9F49E09DCEF49130D4194F79FB1EED2CAA55BACDB49C4E755D1", - "6FC6DAC32C5D5CF10C77DFB20F7C2EB667A457872FB09EC56327A67EC7DEEBE7"))) + "6FC6DAC32C5D5CF10C77DFB20F7C2EB667A457872FB09EC56327A67EC7DEEBE7", 0))) + goto done; + + /* Make sure we fail if we omit the public portion of the key */ + if (!TEST_false(test_sm2_sign( + test_group, + /* the default ID specified in GM/T 0009-2012 (Sec. 10).*/ + SM2_DEFAULT_USERID, + /* privkey */ + "3945208F7B2144B13F36E38AC6D39F95889393692860B51A42FB81EF4DF7C5B8", + /* plaintext message */ + "message digest", + /* ephemeral nonce k */ + "59276E27D506861A16680F3AD9C02DCCEF3CC1FA3CDBE4CE6D54B80DEAC1BC21", + /* expected signature, */ + /* signature R, 0x20 bytes */ + "F5A03B0648D2C4630EEAC513E1BB81A15944DA3827D5B74143AC7EACEEE720B3", + /* signature S, 0x20 bytes */ + "B1B6AA29DF212FD8763182BC0D421CA1BB9038FD1F7F42D4840B69C485BBC1AA", 1))) goto done; testresult = 1; diff --git a/deps/openssl/openssl/test/ssl-tests/08-npn.cnf b/deps/openssl/openssl/test/ssl-tests/08-npn.cnf index f38b3f6975ce06..1931d02de4bacb 100644 --- a/deps/openssl/openssl/test/ssl-tests/08-npn.cnf +++ b/deps/openssl/openssl/test/ssl-tests/08-npn.cnf @@ -1,6 +1,6 @@ # Generated with generate_ssl_tests.pl -num_tests = 20 +num_tests = 22 test-0 = 0-npn-simple test-1 = 1-npn-client-finds-match @@ -8,20 +8,22 @@ test-2 = 2-npn-client-honours-server-pref test-3 = 3-npn-client-first-pref-on-mismatch test-4 = 4-npn-no-server-support test-5 = 5-npn-no-client-support -test-6 = 6-npn-with-sni-no-context-switch -test-7 = 7-npn-with-sni-context-switch -test-8 = 8-npn-selected-sni-server-supports-npn -test-9 = 9-npn-selected-sni-server-does-not-support-npn -test-10 = 10-alpn-preferred-over-npn -test-11 = 11-sni-npn-preferred-over-alpn -test-12 = 12-npn-simple-resumption -test-13 = 13-npn-server-switch-resumption -test-14 = 14-npn-client-switch-resumption -test-15 = 15-npn-client-first-pref-on-mismatch-resumption -test-16 = 16-npn-no-server-support-resumption -test-17 = 17-npn-no-client-support-resumption -test-18 = 18-alpn-preferred-over-npn-resumption -test-19 = 19-npn-used-if-alpn-not-supported-resumption +test-6 = 6-npn-empty-client-list +test-7 = 7-npn-empty-server-list +test-8 = 8-npn-with-sni-no-context-switch +test-9 = 9-npn-with-sni-context-switch +test-10 = 10-npn-selected-sni-server-supports-npn +test-11 = 11-npn-selected-sni-server-does-not-support-npn +test-12 = 12-alpn-preferred-over-npn +test-13 = 13-sni-npn-preferred-over-alpn +test-14 = 14-npn-simple-resumption +test-15 = 15-npn-server-switch-resumption +test-16 = 16-npn-client-switch-resumption +test-17 = 17-npn-client-first-pref-on-mismatch-resumption +test-18 = 18-npn-no-server-support-resumption +test-19 = 19-npn-no-client-support-resumption +test-20 = 20-alpn-preferred-over-npn-resumption +test-21 = 21-npn-used-if-alpn-not-supported-resumption # =========================================================== [0-npn-simple] @@ -206,253 +208,318 @@ NPNProtocols = foo # =========================================================== -[6-npn-with-sni-no-context-switch] -ssl_conf = 6-npn-with-sni-no-context-switch-ssl +[6-npn-empty-client-list] +ssl_conf = 6-npn-empty-client-list-ssl -[6-npn-with-sni-no-context-switch-ssl] -server = 6-npn-with-sni-no-context-switch-server -client = 6-npn-with-sni-no-context-switch-client -server2 = 6-npn-with-sni-no-context-switch-server2 +[6-npn-empty-client-list-ssl] +server = 6-npn-empty-client-list-server +client = 6-npn-empty-client-list-client -[6-npn-with-sni-no-context-switch-server] +[6-npn-empty-client-list-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[6-npn-with-sni-no-context-switch-server2] +[6-npn-empty-client-list-client] +CipherString = DEFAULT +MaxProtocol = TLSv1.2 +VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem +VerifyMode = Peer + +[test-6] +ExpectedClientAlert = HandshakeFailure +ExpectedResult = ClientFail +server = 6-npn-empty-client-list-server-extra +client = 6-npn-empty-client-list-client-extra + +[6-npn-empty-client-list-server-extra] +NPNProtocols = foo + +[6-npn-empty-client-list-client-extra] +NPNProtocols = + + +# =========================================================== + +[7-npn-empty-server-list] +ssl_conf = 7-npn-empty-server-list-ssl + +[7-npn-empty-server-list-ssl] +server = 7-npn-empty-server-list-server +client = 7-npn-empty-server-list-client + +[7-npn-empty-server-list-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[6-npn-with-sni-no-context-switch-client] +[7-npn-empty-server-list-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-6] +[test-7] +ExpectedNPNProtocol = foo +server = 7-npn-empty-server-list-server-extra +client = 7-npn-empty-server-list-client-extra + +[7-npn-empty-server-list-server-extra] +NPNProtocols = + +[7-npn-empty-server-list-client-extra] +NPNProtocols = foo + + +# =========================================================== + +[8-npn-with-sni-no-context-switch] +ssl_conf = 8-npn-with-sni-no-context-switch-ssl + +[8-npn-with-sni-no-context-switch-ssl] +server = 8-npn-with-sni-no-context-switch-server +client = 8-npn-with-sni-no-context-switch-client +server2 = 8-npn-with-sni-no-context-switch-server2 + +[8-npn-with-sni-no-context-switch-server] +Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem +CipherString = DEFAULT +PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem + +[8-npn-with-sni-no-context-switch-server2] +Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem +CipherString = DEFAULT +PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem + +[8-npn-with-sni-no-context-switch-client] +CipherString = DEFAULT +MaxProtocol = TLSv1.2 +VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem +VerifyMode = Peer + +[test-8] ExpectedNPNProtocol = foo ExpectedServerName = server1 -server = 6-npn-with-sni-no-context-switch-server-extra -server2 = 6-npn-with-sni-no-context-switch-server2-extra -client = 6-npn-with-sni-no-context-switch-client-extra +server = 8-npn-with-sni-no-context-switch-server-extra +server2 = 8-npn-with-sni-no-context-switch-server2-extra +client = 8-npn-with-sni-no-context-switch-client-extra -[6-npn-with-sni-no-context-switch-server-extra] +[8-npn-with-sni-no-context-switch-server-extra] NPNProtocols = foo ServerNameCallback = IgnoreMismatch -[6-npn-with-sni-no-context-switch-server2-extra] +[8-npn-with-sni-no-context-switch-server2-extra] NPNProtocols = bar -[6-npn-with-sni-no-context-switch-client-extra] +[8-npn-with-sni-no-context-switch-client-extra] NPNProtocols = foo,bar ServerName = server1 # =========================================================== -[7-npn-with-sni-context-switch] -ssl_conf = 7-npn-with-sni-context-switch-ssl +[9-npn-with-sni-context-switch] +ssl_conf = 9-npn-with-sni-context-switch-ssl -[7-npn-with-sni-context-switch-ssl] -server = 7-npn-with-sni-context-switch-server -client = 7-npn-with-sni-context-switch-client -server2 = 7-npn-with-sni-context-switch-server2 +[9-npn-with-sni-context-switch-ssl] +server = 9-npn-with-sni-context-switch-server +client = 9-npn-with-sni-context-switch-client +server2 = 9-npn-with-sni-context-switch-server2 -[7-npn-with-sni-context-switch-server] +[9-npn-with-sni-context-switch-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[7-npn-with-sni-context-switch-server2] +[9-npn-with-sni-context-switch-server2] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[7-npn-with-sni-context-switch-client] +[9-npn-with-sni-context-switch-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-7] +[test-9] ExpectedNPNProtocol = bar ExpectedServerName = server2 -server = 7-npn-with-sni-context-switch-server-extra -server2 = 7-npn-with-sni-context-switch-server2-extra -client = 7-npn-with-sni-context-switch-client-extra +server = 9-npn-with-sni-context-switch-server-extra +server2 = 9-npn-with-sni-context-switch-server2-extra +client = 9-npn-with-sni-context-switch-client-extra -[7-npn-with-sni-context-switch-server-extra] +[9-npn-with-sni-context-switch-server-extra] NPNProtocols = foo ServerNameCallback = IgnoreMismatch -[7-npn-with-sni-context-switch-server2-extra] +[9-npn-with-sni-context-switch-server2-extra] NPNProtocols = bar -[7-npn-with-sni-context-switch-client-extra] +[9-npn-with-sni-context-switch-client-extra] NPNProtocols = foo,bar ServerName = server2 # =========================================================== -[8-npn-selected-sni-server-supports-npn] -ssl_conf = 8-npn-selected-sni-server-supports-npn-ssl +[10-npn-selected-sni-server-supports-npn] +ssl_conf = 10-npn-selected-sni-server-supports-npn-ssl -[8-npn-selected-sni-server-supports-npn-ssl] -server = 8-npn-selected-sni-server-supports-npn-server -client = 8-npn-selected-sni-server-supports-npn-client -server2 = 8-npn-selected-sni-server-supports-npn-server2 +[10-npn-selected-sni-server-supports-npn-ssl] +server = 10-npn-selected-sni-server-supports-npn-server +client = 10-npn-selected-sni-server-supports-npn-client +server2 = 10-npn-selected-sni-server-supports-npn-server2 -[8-npn-selected-sni-server-supports-npn-server] +[10-npn-selected-sni-server-supports-npn-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[8-npn-selected-sni-server-supports-npn-server2] +[10-npn-selected-sni-server-supports-npn-server2] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[8-npn-selected-sni-server-supports-npn-client] +[10-npn-selected-sni-server-supports-npn-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-8] +[test-10] ExpectedNPNProtocol = bar ExpectedServerName = server2 -server = 8-npn-selected-sni-server-supports-npn-server-extra -server2 = 8-npn-selected-sni-server-supports-npn-server2-extra -client = 8-npn-selected-sni-server-supports-npn-client-extra +server = 10-npn-selected-sni-server-supports-npn-server-extra +server2 = 10-npn-selected-sni-server-supports-npn-server2-extra +client = 10-npn-selected-sni-server-supports-npn-client-extra -[8-npn-selected-sni-server-supports-npn-server-extra] +[10-npn-selected-sni-server-supports-npn-server-extra] ServerNameCallback = IgnoreMismatch -[8-npn-selected-sni-server-supports-npn-server2-extra] +[10-npn-selected-sni-server-supports-npn-server2-extra] NPNProtocols = bar -[8-npn-selected-sni-server-supports-npn-client-extra] +[10-npn-selected-sni-server-supports-npn-client-extra] NPNProtocols = foo,bar ServerName = server2 # =========================================================== -[9-npn-selected-sni-server-does-not-support-npn] -ssl_conf = 9-npn-selected-sni-server-does-not-support-npn-ssl +[11-npn-selected-sni-server-does-not-support-npn] +ssl_conf = 11-npn-selected-sni-server-does-not-support-npn-ssl -[9-npn-selected-sni-server-does-not-support-npn-ssl] -server = 9-npn-selected-sni-server-does-not-support-npn-server -client = 9-npn-selected-sni-server-does-not-support-npn-client -server2 = 9-npn-selected-sni-server-does-not-support-npn-server2 +[11-npn-selected-sni-server-does-not-support-npn-ssl] +server = 11-npn-selected-sni-server-does-not-support-npn-server +client = 11-npn-selected-sni-server-does-not-support-npn-client +server2 = 11-npn-selected-sni-server-does-not-support-npn-server2 -[9-npn-selected-sni-server-does-not-support-npn-server] +[11-npn-selected-sni-server-does-not-support-npn-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[9-npn-selected-sni-server-does-not-support-npn-server2] +[11-npn-selected-sni-server-does-not-support-npn-server2] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[9-npn-selected-sni-server-does-not-support-npn-client] +[11-npn-selected-sni-server-does-not-support-npn-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-9] +[test-11] ExpectedServerName = server2 -server = 9-npn-selected-sni-server-does-not-support-npn-server-extra -client = 9-npn-selected-sni-server-does-not-support-npn-client-extra +server = 11-npn-selected-sni-server-does-not-support-npn-server-extra +client = 11-npn-selected-sni-server-does-not-support-npn-client-extra -[9-npn-selected-sni-server-does-not-support-npn-server-extra] +[11-npn-selected-sni-server-does-not-support-npn-server-extra] NPNProtocols = bar ServerNameCallback = IgnoreMismatch -[9-npn-selected-sni-server-does-not-support-npn-client-extra] +[11-npn-selected-sni-server-does-not-support-npn-client-extra] NPNProtocols = foo,bar ServerName = server2 # =========================================================== -[10-alpn-preferred-over-npn] -ssl_conf = 10-alpn-preferred-over-npn-ssl +[12-alpn-preferred-over-npn] +ssl_conf = 12-alpn-preferred-over-npn-ssl -[10-alpn-preferred-over-npn-ssl] -server = 10-alpn-preferred-over-npn-server -client = 10-alpn-preferred-over-npn-client +[12-alpn-preferred-over-npn-ssl] +server = 12-alpn-preferred-over-npn-server +client = 12-alpn-preferred-over-npn-client -[10-alpn-preferred-over-npn-server] +[12-alpn-preferred-over-npn-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[10-alpn-preferred-over-npn-client] +[12-alpn-preferred-over-npn-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-10] +[test-12] ExpectedALPNProtocol = foo -server = 10-alpn-preferred-over-npn-server-extra -client = 10-alpn-preferred-over-npn-client-extra +server = 12-alpn-preferred-over-npn-server-extra +client = 12-alpn-preferred-over-npn-client-extra -[10-alpn-preferred-over-npn-server-extra] +[12-alpn-preferred-over-npn-server-extra] ALPNProtocols = foo NPNProtocols = bar -[10-alpn-preferred-over-npn-client-extra] +[12-alpn-preferred-over-npn-client-extra] ALPNProtocols = foo NPNProtocols = bar # =========================================================== -[11-sni-npn-preferred-over-alpn] -ssl_conf = 11-sni-npn-preferred-over-alpn-ssl +[13-sni-npn-preferred-over-alpn] +ssl_conf = 13-sni-npn-preferred-over-alpn-ssl -[11-sni-npn-preferred-over-alpn-ssl] -server = 11-sni-npn-preferred-over-alpn-server -client = 11-sni-npn-preferred-over-alpn-client -server2 = 11-sni-npn-preferred-over-alpn-server2 +[13-sni-npn-preferred-over-alpn-ssl] +server = 13-sni-npn-preferred-over-alpn-server +client = 13-sni-npn-preferred-over-alpn-client +server2 = 13-sni-npn-preferred-over-alpn-server2 -[11-sni-npn-preferred-over-alpn-server] +[13-sni-npn-preferred-over-alpn-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[11-sni-npn-preferred-over-alpn-server2] +[13-sni-npn-preferred-over-alpn-server2] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[11-sni-npn-preferred-over-alpn-client] +[13-sni-npn-preferred-over-alpn-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-11] +[test-13] ExpectedNPNProtocol = bar ExpectedServerName = server2 -server = 11-sni-npn-preferred-over-alpn-server-extra -server2 = 11-sni-npn-preferred-over-alpn-server2-extra -client = 11-sni-npn-preferred-over-alpn-client-extra +server = 13-sni-npn-preferred-over-alpn-server-extra +server2 = 13-sni-npn-preferred-over-alpn-server2-extra +client = 13-sni-npn-preferred-over-alpn-client-extra -[11-sni-npn-preferred-over-alpn-server-extra] +[13-sni-npn-preferred-over-alpn-server-extra] ALPNProtocols = foo ServerNameCallback = IgnoreMismatch -[11-sni-npn-preferred-over-alpn-server2-extra] +[13-sni-npn-preferred-over-alpn-server2-extra] NPNProtocols = bar -[11-sni-npn-preferred-over-alpn-client-extra] +[13-sni-npn-preferred-over-alpn-client-extra] ALPNProtocols = foo NPNProtocols = bar ServerName = server2 @@ -460,356 +527,356 @@ ServerName = server2 # =========================================================== -[12-npn-simple-resumption] -ssl_conf = 12-npn-simple-resumption-ssl +[14-npn-simple-resumption] +ssl_conf = 14-npn-simple-resumption-ssl -[12-npn-simple-resumption-ssl] -server = 12-npn-simple-resumption-server -client = 12-npn-simple-resumption-client -resume-server = 12-npn-simple-resumption-server -resume-client = 12-npn-simple-resumption-client +[14-npn-simple-resumption-ssl] +server = 14-npn-simple-resumption-server +client = 14-npn-simple-resumption-client +resume-server = 14-npn-simple-resumption-server +resume-client = 14-npn-simple-resumption-client -[12-npn-simple-resumption-server] +[14-npn-simple-resumption-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[12-npn-simple-resumption-client] +[14-npn-simple-resumption-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-12] +[test-14] ExpectedNPNProtocol = foo HandshakeMode = Resume ResumptionExpected = Yes -server = 12-npn-simple-resumption-server-extra -resume-server = 12-npn-simple-resumption-server-extra -client = 12-npn-simple-resumption-client-extra -resume-client = 12-npn-simple-resumption-client-extra +server = 14-npn-simple-resumption-server-extra +resume-server = 14-npn-simple-resumption-server-extra +client = 14-npn-simple-resumption-client-extra +resume-client = 14-npn-simple-resumption-client-extra -[12-npn-simple-resumption-server-extra] +[14-npn-simple-resumption-server-extra] NPNProtocols = foo -[12-npn-simple-resumption-client-extra] +[14-npn-simple-resumption-client-extra] NPNProtocols = foo # =========================================================== -[13-npn-server-switch-resumption] -ssl_conf = 13-npn-server-switch-resumption-ssl +[15-npn-server-switch-resumption] +ssl_conf = 15-npn-server-switch-resumption-ssl -[13-npn-server-switch-resumption-ssl] -server = 13-npn-server-switch-resumption-server -client = 13-npn-server-switch-resumption-client -resume-server = 13-npn-server-switch-resumption-resume-server -resume-client = 13-npn-server-switch-resumption-client +[15-npn-server-switch-resumption-ssl] +server = 15-npn-server-switch-resumption-server +client = 15-npn-server-switch-resumption-client +resume-server = 15-npn-server-switch-resumption-resume-server +resume-client = 15-npn-server-switch-resumption-client -[13-npn-server-switch-resumption-server] +[15-npn-server-switch-resumption-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[13-npn-server-switch-resumption-resume-server] +[15-npn-server-switch-resumption-resume-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[13-npn-server-switch-resumption-client] +[15-npn-server-switch-resumption-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-13] +[test-15] ExpectedNPNProtocol = baz HandshakeMode = Resume ResumptionExpected = Yes -server = 13-npn-server-switch-resumption-server-extra -resume-server = 13-npn-server-switch-resumption-resume-server-extra -client = 13-npn-server-switch-resumption-client-extra -resume-client = 13-npn-server-switch-resumption-client-extra +server = 15-npn-server-switch-resumption-server-extra +resume-server = 15-npn-server-switch-resumption-resume-server-extra +client = 15-npn-server-switch-resumption-client-extra +resume-client = 15-npn-server-switch-resumption-client-extra -[13-npn-server-switch-resumption-server-extra] +[15-npn-server-switch-resumption-server-extra] NPNProtocols = bar,foo -[13-npn-server-switch-resumption-resume-server-extra] +[15-npn-server-switch-resumption-resume-server-extra] NPNProtocols = baz,foo -[13-npn-server-switch-resumption-client-extra] +[15-npn-server-switch-resumption-client-extra] NPNProtocols = foo,bar,baz # =========================================================== -[14-npn-client-switch-resumption] -ssl_conf = 14-npn-client-switch-resumption-ssl +[16-npn-client-switch-resumption] +ssl_conf = 16-npn-client-switch-resumption-ssl -[14-npn-client-switch-resumption-ssl] -server = 14-npn-client-switch-resumption-server -client = 14-npn-client-switch-resumption-client -resume-server = 14-npn-client-switch-resumption-server -resume-client = 14-npn-client-switch-resumption-resume-client +[16-npn-client-switch-resumption-ssl] +server = 16-npn-client-switch-resumption-server +client = 16-npn-client-switch-resumption-client +resume-server = 16-npn-client-switch-resumption-server +resume-client = 16-npn-client-switch-resumption-resume-client -[14-npn-client-switch-resumption-server] +[16-npn-client-switch-resumption-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[14-npn-client-switch-resumption-client] +[16-npn-client-switch-resumption-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[14-npn-client-switch-resumption-resume-client] +[16-npn-client-switch-resumption-resume-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-14] +[test-16] ExpectedNPNProtocol = bar HandshakeMode = Resume ResumptionExpected = Yes -server = 14-npn-client-switch-resumption-server-extra -resume-server = 14-npn-client-switch-resumption-server-extra -client = 14-npn-client-switch-resumption-client-extra -resume-client = 14-npn-client-switch-resumption-resume-client-extra +server = 16-npn-client-switch-resumption-server-extra +resume-server = 16-npn-client-switch-resumption-server-extra +client = 16-npn-client-switch-resumption-client-extra +resume-client = 16-npn-client-switch-resumption-resume-client-extra -[14-npn-client-switch-resumption-server-extra] +[16-npn-client-switch-resumption-server-extra] NPNProtocols = foo,bar,baz -[14-npn-client-switch-resumption-client-extra] +[16-npn-client-switch-resumption-client-extra] NPNProtocols = foo,baz -[14-npn-client-switch-resumption-resume-client-extra] +[16-npn-client-switch-resumption-resume-client-extra] NPNProtocols = bar,baz # =========================================================== -[15-npn-client-first-pref-on-mismatch-resumption] -ssl_conf = 15-npn-client-first-pref-on-mismatch-resumption-ssl +[17-npn-client-first-pref-on-mismatch-resumption] +ssl_conf = 17-npn-client-first-pref-on-mismatch-resumption-ssl -[15-npn-client-first-pref-on-mismatch-resumption-ssl] -server = 15-npn-client-first-pref-on-mismatch-resumption-server -client = 15-npn-client-first-pref-on-mismatch-resumption-client -resume-server = 15-npn-client-first-pref-on-mismatch-resumption-resume-server -resume-client = 15-npn-client-first-pref-on-mismatch-resumption-client +[17-npn-client-first-pref-on-mismatch-resumption-ssl] +server = 17-npn-client-first-pref-on-mismatch-resumption-server +client = 17-npn-client-first-pref-on-mismatch-resumption-client +resume-server = 17-npn-client-first-pref-on-mismatch-resumption-resume-server +resume-client = 17-npn-client-first-pref-on-mismatch-resumption-client -[15-npn-client-first-pref-on-mismatch-resumption-server] +[17-npn-client-first-pref-on-mismatch-resumption-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[15-npn-client-first-pref-on-mismatch-resumption-resume-server] +[17-npn-client-first-pref-on-mismatch-resumption-resume-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[15-npn-client-first-pref-on-mismatch-resumption-client] +[17-npn-client-first-pref-on-mismatch-resumption-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-15] +[test-17] ExpectedNPNProtocol = foo HandshakeMode = Resume ResumptionExpected = Yes -server = 15-npn-client-first-pref-on-mismatch-resumption-server-extra -resume-server = 15-npn-client-first-pref-on-mismatch-resumption-resume-server-extra -client = 15-npn-client-first-pref-on-mismatch-resumption-client-extra -resume-client = 15-npn-client-first-pref-on-mismatch-resumption-client-extra +server = 17-npn-client-first-pref-on-mismatch-resumption-server-extra +resume-server = 17-npn-client-first-pref-on-mismatch-resumption-resume-server-extra +client = 17-npn-client-first-pref-on-mismatch-resumption-client-extra +resume-client = 17-npn-client-first-pref-on-mismatch-resumption-client-extra -[15-npn-client-first-pref-on-mismatch-resumption-server-extra] +[17-npn-client-first-pref-on-mismatch-resumption-server-extra] NPNProtocols = bar -[15-npn-client-first-pref-on-mismatch-resumption-resume-server-extra] +[17-npn-client-first-pref-on-mismatch-resumption-resume-server-extra] NPNProtocols = baz -[15-npn-client-first-pref-on-mismatch-resumption-client-extra] +[17-npn-client-first-pref-on-mismatch-resumption-client-extra] NPNProtocols = foo,bar # =========================================================== -[16-npn-no-server-support-resumption] -ssl_conf = 16-npn-no-server-support-resumption-ssl +[18-npn-no-server-support-resumption] +ssl_conf = 18-npn-no-server-support-resumption-ssl -[16-npn-no-server-support-resumption-ssl] -server = 16-npn-no-server-support-resumption-server -client = 16-npn-no-server-support-resumption-client -resume-server = 16-npn-no-server-support-resumption-resume-server -resume-client = 16-npn-no-server-support-resumption-client +[18-npn-no-server-support-resumption-ssl] +server = 18-npn-no-server-support-resumption-server +client = 18-npn-no-server-support-resumption-client +resume-server = 18-npn-no-server-support-resumption-resume-server +resume-client = 18-npn-no-server-support-resumption-client -[16-npn-no-server-support-resumption-server] +[18-npn-no-server-support-resumption-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[16-npn-no-server-support-resumption-resume-server] +[18-npn-no-server-support-resumption-resume-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[16-npn-no-server-support-resumption-client] +[18-npn-no-server-support-resumption-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-16] +[test-18] HandshakeMode = Resume ResumptionExpected = Yes -server = 16-npn-no-server-support-resumption-server-extra -client = 16-npn-no-server-support-resumption-client-extra -resume-client = 16-npn-no-server-support-resumption-client-extra +server = 18-npn-no-server-support-resumption-server-extra +client = 18-npn-no-server-support-resumption-client-extra +resume-client = 18-npn-no-server-support-resumption-client-extra -[16-npn-no-server-support-resumption-server-extra] +[18-npn-no-server-support-resumption-server-extra] NPNProtocols = foo -[16-npn-no-server-support-resumption-client-extra] +[18-npn-no-server-support-resumption-client-extra] NPNProtocols = foo # =========================================================== -[17-npn-no-client-support-resumption] -ssl_conf = 17-npn-no-client-support-resumption-ssl +[19-npn-no-client-support-resumption] +ssl_conf = 19-npn-no-client-support-resumption-ssl -[17-npn-no-client-support-resumption-ssl] -server = 17-npn-no-client-support-resumption-server -client = 17-npn-no-client-support-resumption-client -resume-server = 17-npn-no-client-support-resumption-server -resume-client = 17-npn-no-client-support-resumption-resume-client +[19-npn-no-client-support-resumption-ssl] +server = 19-npn-no-client-support-resumption-server +client = 19-npn-no-client-support-resumption-client +resume-server = 19-npn-no-client-support-resumption-server +resume-client = 19-npn-no-client-support-resumption-resume-client -[17-npn-no-client-support-resumption-server] +[19-npn-no-client-support-resumption-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[17-npn-no-client-support-resumption-client] +[19-npn-no-client-support-resumption-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[17-npn-no-client-support-resumption-resume-client] +[19-npn-no-client-support-resumption-resume-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-17] +[test-19] HandshakeMode = Resume ResumptionExpected = Yes -server = 17-npn-no-client-support-resumption-server-extra -resume-server = 17-npn-no-client-support-resumption-server-extra -client = 17-npn-no-client-support-resumption-client-extra +server = 19-npn-no-client-support-resumption-server-extra +resume-server = 19-npn-no-client-support-resumption-server-extra +client = 19-npn-no-client-support-resumption-client-extra -[17-npn-no-client-support-resumption-server-extra] +[19-npn-no-client-support-resumption-server-extra] NPNProtocols = foo -[17-npn-no-client-support-resumption-client-extra] +[19-npn-no-client-support-resumption-client-extra] NPNProtocols = foo # =========================================================== -[18-alpn-preferred-over-npn-resumption] -ssl_conf = 18-alpn-preferred-over-npn-resumption-ssl +[20-alpn-preferred-over-npn-resumption] +ssl_conf = 20-alpn-preferred-over-npn-resumption-ssl -[18-alpn-preferred-over-npn-resumption-ssl] -server = 18-alpn-preferred-over-npn-resumption-server -client = 18-alpn-preferred-over-npn-resumption-client -resume-server = 18-alpn-preferred-over-npn-resumption-resume-server -resume-client = 18-alpn-preferred-over-npn-resumption-client +[20-alpn-preferred-over-npn-resumption-ssl] +server = 20-alpn-preferred-over-npn-resumption-server +client = 20-alpn-preferred-over-npn-resumption-client +resume-server = 20-alpn-preferred-over-npn-resumption-resume-server +resume-client = 20-alpn-preferred-over-npn-resumption-client -[18-alpn-preferred-over-npn-resumption-server] +[20-alpn-preferred-over-npn-resumption-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[18-alpn-preferred-over-npn-resumption-resume-server] +[20-alpn-preferred-over-npn-resumption-resume-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[18-alpn-preferred-over-npn-resumption-client] +[20-alpn-preferred-over-npn-resumption-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-18] +[test-20] ExpectedALPNProtocol = foo HandshakeMode = Resume ResumptionExpected = Yes -server = 18-alpn-preferred-over-npn-resumption-server-extra -resume-server = 18-alpn-preferred-over-npn-resumption-resume-server-extra -client = 18-alpn-preferred-over-npn-resumption-client-extra -resume-client = 18-alpn-preferred-over-npn-resumption-client-extra +server = 20-alpn-preferred-over-npn-resumption-server-extra +resume-server = 20-alpn-preferred-over-npn-resumption-resume-server-extra +client = 20-alpn-preferred-over-npn-resumption-client-extra +resume-client = 20-alpn-preferred-over-npn-resumption-client-extra -[18-alpn-preferred-over-npn-resumption-server-extra] +[20-alpn-preferred-over-npn-resumption-server-extra] NPNProtocols = bar -[18-alpn-preferred-over-npn-resumption-resume-server-extra] +[20-alpn-preferred-over-npn-resumption-resume-server-extra] ALPNProtocols = foo NPNProtocols = baz -[18-alpn-preferred-over-npn-resumption-client-extra] +[20-alpn-preferred-over-npn-resumption-client-extra] ALPNProtocols = foo NPNProtocols = bar,baz # =========================================================== -[19-npn-used-if-alpn-not-supported-resumption] -ssl_conf = 19-npn-used-if-alpn-not-supported-resumption-ssl +[21-npn-used-if-alpn-not-supported-resumption] +ssl_conf = 21-npn-used-if-alpn-not-supported-resumption-ssl -[19-npn-used-if-alpn-not-supported-resumption-ssl] -server = 19-npn-used-if-alpn-not-supported-resumption-server -client = 19-npn-used-if-alpn-not-supported-resumption-client -resume-server = 19-npn-used-if-alpn-not-supported-resumption-resume-server -resume-client = 19-npn-used-if-alpn-not-supported-resumption-client +[21-npn-used-if-alpn-not-supported-resumption-ssl] +server = 21-npn-used-if-alpn-not-supported-resumption-server +client = 21-npn-used-if-alpn-not-supported-resumption-client +resume-server = 21-npn-used-if-alpn-not-supported-resumption-resume-server +resume-client = 21-npn-used-if-alpn-not-supported-resumption-client -[19-npn-used-if-alpn-not-supported-resumption-server] +[21-npn-used-if-alpn-not-supported-resumption-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[19-npn-used-if-alpn-not-supported-resumption-resume-server] +[21-npn-used-if-alpn-not-supported-resumption-resume-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[19-npn-used-if-alpn-not-supported-resumption-client] +[21-npn-used-if-alpn-not-supported-resumption-client] CipherString = DEFAULT MaxProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-19] +[test-21] ExpectedNPNProtocol = baz HandshakeMode = Resume ResumptionExpected = Yes -server = 19-npn-used-if-alpn-not-supported-resumption-server-extra -resume-server = 19-npn-used-if-alpn-not-supported-resumption-resume-server-extra -client = 19-npn-used-if-alpn-not-supported-resumption-client-extra -resume-client = 19-npn-used-if-alpn-not-supported-resumption-client-extra +server = 21-npn-used-if-alpn-not-supported-resumption-server-extra +resume-server = 21-npn-used-if-alpn-not-supported-resumption-resume-server-extra +client = 21-npn-used-if-alpn-not-supported-resumption-client-extra +resume-client = 21-npn-used-if-alpn-not-supported-resumption-client-extra -[19-npn-used-if-alpn-not-supported-resumption-server-extra] +[21-npn-used-if-alpn-not-supported-resumption-server-extra] ALPNProtocols = foo NPNProtocols = bar -[19-npn-used-if-alpn-not-supported-resumption-resume-server-extra] +[21-npn-used-if-alpn-not-supported-resumption-resume-server-extra] NPNProtocols = baz -[19-npn-used-if-alpn-not-supported-resumption-client-extra] +[21-npn-used-if-alpn-not-supported-resumption-client-extra] ALPNProtocols = foo NPNProtocols = bar,baz diff --git a/deps/openssl/openssl/test/ssl-tests/08-npn.cnf.in b/deps/openssl/openssl/test/ssl-tests/08-npn.cnf.in index 30783e45eb5931..0caed2100ed488 100644 --- a/deps/openssl/openssl/test/ssl-tests/08-npn.cnf.in +++ b/deps/openssl/openssl/test/ssl-tests/08-npn.cnf.in @@ -1,5 +1,5 @@ # -*- mode: perl; -*- -# Copyright 2016-2020 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -110,6 +110,41 @@ our @tests = ( "ExpectedNPNProtocol" => undef, }, }, + { + name => "npn-empty-client-list", + server => { + extra => { + "NPNProtocols" => "foo", + }, + }, + client => { + extra => { + "NPNProtocols" => "", + }, + "MaxProtocol" => "TLSv1.2" + }, + test => { + "ExpectedResult" => "ClientFail", + "ExpectedClientAlert" => "HandshakeFailure" + }, + }, + { + name => "npn-empty-server-list", + server => { + extra => { + "NPNProtocols" => "", + }, + }, + client => { + extra => { + "NPNProtocols" => "foo", + }, + "MaxProtocol" => "TLSv1.2" + }, + test => { + "ExpectedNPNProtocol" => "foo" + }, + }, { name => "npn-with-sni-no-context-switch", server => { diff --git a/deps/openssl/openssl/test/ssl-tests/09-alpn.cnf b/deps/openssl/openssl/test/ssl-tests/09-alpn.cnf index e7e6cb95348b72..dd668739ab9a06 100644 --- a/deps/openssl/openssl/test/ssl-tests/09-alpn.cnf +++ b/deps/openssl/openssl/test/ssl-tests/09-alpn.cnf @@ -1,6 +1,6 @@ # Generated with generate_ssl_tests.pl -num_tests = 16 +num_tests = 18 test-0 = 0-alpn-simple test-1 = 1-alpn-server-finds-match @@ -18,6 +18,8 @@ test-12 = 12-alpn-client-switch-resumption test-13 = 13-alpn-alert-on-mismatch-resumption test-14 = 14-alpn-no-server-support-resumption test-15 = 15-alpn-no-client-support-resumption +test-16 = 16-alpn-empty-client-list +test-17 = 17-alpn-empty-server-list # =========================================================== [0-alpn-simple] @@ -617,3 +619,65 @@ ALPNProtocols = foo ALPNProtocols = foo +# =========================================================== + +[16-alpn-empty-client-list] +ssl_conf = 16-alpn-empty-client-list-ssl + +[16-alpn-empty-client-list-ssl] +server = 16-alpn-empty-client-list-server +client = 16-alpn-empty-client-list-client + +[16-alpn-empty-client-list-server] +Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem +CipherString = DEFAULT +PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem + +[16-alpn-empty-client-list-client] +CipherString = DEFAULT +VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem +VerifyMode = Peer + +[test-16] +server = 16-alpn-empty-client-list-server-extra +client = 16-alpn-empty-client-list-client-extra + +[16-alpn-empty-client-list-server-extra] +ALPNProtocols = foo + +[16-alpn-empty-client-list-client-extra] +ALPNProtocols = + + +# =========================================================== + +[17-alpn-empty-server-list] +ssl_conf = 17-alpn-empty-server-list-ssl + +[17-alpn-empty-server-list-ssl] +server = 17-alpn-empty-server-list-server +client = 17-alpn-empty-server-list-client + +[17-alpn-empty-server-list-server] +Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem +CipherString = DEFAULT +PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem + +[17-alpn-empty-server-list-client] +CipherString = DEFAULT +VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem +VerifyMode = Peer + +[test-17] +ExpectedResult = ServerFail +ExpectedServerAlert = NoApplicationProtocol +server = 17-alpn-empty-server-list-server-extra +client = 17-alpn-empty-server-list-client-extra + +[17-alpn-empty-server-list-server-extra] +ALPNProtocols = + +[17-alpn-empty-server-list-client-extra] +ALPNProtocols = foo + + diff --git a/deps/openssl/openssl/test/ssl-tests/09-alpn.cnf.in b/deps/openssl/openssl/test/ssl-tests/09-alpn.cnf.in index 81330756c62ce6..73e9cbabb0538a 100644 --- a/deps/openssl/openssl/test/ssl-tests/09-alpn.cnf.in +++ b/deps/openssl/openssl/test/ssl-tests/09-alpn.cnf.in @@ -1,5 +1,5 @@ # -*- mode: perl; -*- -# Copyright 2016-2020 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -322,4 +322,37 @@ our @tests = ( "ExpectedALPNProtocol" => undef, }, }, + { + name => "alpn-empty-client-list", + server => { + extra => { + "ALPNProtocols" => "foo", + }, + }, + client => { + extra => { + "ALPNProtocols" => "", + }, + }, + test => { + "ExpectedALPNProtocol" => undef, + }, + }, + { + name => "alpn-empty-server-list", + server => { + extra => { + "ALPNProtocols" => "", + }, + }, + client => { + extra => { + "ALPNProtocols" => "foo", + }, + }, + test => { + "ExpectedResult" => "ServerFail", + "ExpectedServerAlert" => "NoApplicationProtocol", + }, + }, ); diff --git a/deps/openssl/openssl/test/ssl-tests/14-curves.cnf.in b/deps/openssl/openssl/test/ssl-tests/14-curves.cnf.in index 1e003bace0b7a4..33201df281f70f 100644 --- a/deps/openssl/openssl/test/ssl-tests/14-curves.cnf.in +++ b/deps/openssl/openssl/test/ssl-tests/14-curves.cnf.in @@ -12,8 +12,11 @@ use OpenSSL::Test::Utils qw(anydisabled); our $fips_mode; -my @curves = ("prime256v1", "secp384r1", "secp521r1", "X25519", - "X448"); +my @curves = ("prime256v1", "secp384r1", "secp521r1"); + +my @curves_no_fips = ("X25519", "X448"); + +push @curves, @curves_no_fips if !$fips_mode; #Curves *only* suitable for use in TLSv1.3 my @curves_tls_1_3 = ("ffdhe2048", "ffdhe3072", "ffdhe4096", "ffdhe6144", diff --git a/deps/openssl/openssl/test/ssl-tests/20-cert-select.cnf b/deps/openssl/openssl/test/ssl-tests/20-cert-select.cnf index 79dcd4c8f4e2ed..6888d538ba354d 100644 --- a/deps/openssl/openssl/test/ssl-tests/20-cert-select.cnf +++ b/deps/openssl/openssl/test/ssl-tests/20-cert-select.cnf @@ -19,12 +19,12 @@ test-13 = 13-Suite B P-256 Hash Algorithm Selection test-14 = 14-Suite B P-384 Hash Algorithm Selection test-15 = 15-Ed25519 CipherString and Signature Algorithm Selection test-16 = 16-Ed448 CipherString and Signature Algorithm Selection -test-17 = 17-Ed25519 CipherString and Curves Selection -test-18 = 18-Ed448 CipherString and Curves Selection -test-19 = 19-TLS 1.2 Ed25519 Client Auth -test-20 = 20-TLS 1.2 Ed448 Client Auth -test-21 = 21-ECDSA Signature Algorithm Selection SHA1 -test-22 = 22-ECDSA with brainpool +test-17 = 17-TLS 1.2 Ed25519 Client Auth +test-18 = 18-TLS 1.2 Ed448 Client Auth +test-19 = 19-ECDSA Signature Algorithm Selection SHA1 +test-20 = 20-ECDSA with brainpool +test-21 = 21-Ed25519 CipherString and Curves Selection +test-22 = 22-Ed448 CipherString and Curves Selection test-23 = 23-RSA-PSS Certificate CipherString Selection test-24 = 24-RSA-PSS Certificate Legacy Signature Algorithm Selection test-25 = 25-RSA-PSS Certificate Unified Signature Algorithm Selection @@ -602,91 +602,21 @@ ExpectedServerSignType = Ed448 # =========================================================== -[17-Ed25519 CipherString and Curves Selection] -ssl_conf = 17-Ed25519 CipherString and Curves Selection-ssl +[17-TLS 1.2 Ed25519 Client Auth] +ssl_conf = 17-TLS 1.2 Ed25519 Client Auth-ssl -[17-Ed25519 CipherString and Curves Selection-ssl] -server = 17-Ed25519 CipherString and Curves Selection-server -client = 17-Ed25519 CipherString and Curves Selection-client +[17-TLS 1.2 Ed25519 Client Auth-ssl] +server = 17-TLS 1.2 Ed25519 Client Auth-server +client = 17-TLS 1.2 Ed25519 Client Auth-client -[17-Ed25519 CipherString and Curves Selection-server] -Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem -CipherString = DEFAULT -ECDSA.Certificate = ${ENV::TEST_CERTS_DIR}/server-ecdsa-cert.pem -ECDSA.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ecdsa-key.pem -Ed25519.Certificate = ${ENV::TEST_CERTS_DIR}/server-ed25519-cert.pem -Ed25519.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ed25519-key.pem -Ed448.Certificate = ${ENV::TEST_CERTS_DIR}/server-ed448-cert.pem -Ed448.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ed448-key.pem -MaxProtocol = TLSv1.2 -PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem - -[17-Ed25519 CipherString and Curves Selection-client] -CipherString = aECDSA -Curves = X25519 -MaxProtocol = TLSv1.2 -SignatureAlgorithms = ECDSA+SHA256:ed25519 -VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem -VerifyMode = Peer - -[test-17] -ExpectedResult = Success -ExpectedServerCertType = Ed25519 -ExpectedServerSignType = Ed25519 - - -# =========================================================== - -[18-Ed448 CipherString and Curves Selection] -ssl_conf = 18-Ed448 CipherString and Curves Selection-ssl - -[18-Ed448 CipherString and Curves Selection-ssl] -server = 18-Ed448 CipherString and Curves Selection-server -client = 18-Ed448 CipherString and Curves Selection-client - -[18-Ed448 CipherString and Curves Selection-server] -Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem -CipherString = DEFAULT -ECDSA.Certificate = ${ENV::TEST_CERTS_DIR}/server-ecdsa-cert.pem -ECDSA.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ecdsa-key.pem -Ed25519.Certificate = ${ENV::TEST_CERTS_DIR}/server-ed25519-cert.pem -Ed25519.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ed25519-key.pem -Ed448.Certificate = ${ENV::TEST_CERTS_DIR}/server-ed448-cert.pem -Ed448.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ed448-key.pem -MaxProtocol = TLSv1.2 -PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem - -[18-Ed448 CipherString and Curves Selection-client] -CipherString = aECDSA -Curves = X448 -MaxProtocol = TLSv1.2 -SignatureAlgorithms = ECDSA+SHA256:ed448 -VerifyCAFile = ${ENV::TEST_CERTS_DIR}/root-ed448-cert.pem -VerifyMode = Peer - -[test-18] -ExpectedResult = Success -ExpectedServerCertType = Ed448 -ExpectedServerSignType = Ed448 - - -# =========================================================== - -[19-TLS 1.2 Ed25519 Client Auth] -ssl_conf = 19-TLS 1.2 Ed25519 Client Auth-ssl - -[19-TLS 1.2 Ed25519 Client Auth-ssl] -server = 19-TLS 1.2 Ed25519 Client Auth-server -client = 19-TLS 1.2 Ed25519 Client Auth-client - -[19-TLS 1.2 Ed25519 Client Auth-server] +[17-TLS 1.2 Ed25519 Client Auth-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem VerifyCAFile = ${ENV::TEST_CERTS_DIR}/root-cert.pem VerifyMode = Require -[19-TLS 1.2 Ed25519 Client Auth-client] +[17-TLS 1.2 Ed25519 Client Auth-client] CipherString = DEFAULT Ed25519.Certificate = ${ENV::TEST_CERTS_DIR}/client-ed25519-cert.pem Ed25519.PrivateKey = ${ENV::TEST_CERTS_DIR}/client-ed25519-key.pem @@ -695,7 +625,7 @@ MinProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-19] +[test-17] ExpectedClientCertType = Ed25519 ExpectedClientSignType = Ed25519 ExpectedResult = Success @@ -703,21 +633,21 @@ ExpectedResult = Success # =========================================================== -[20-TLS 1.2 Ed448 Client Auth] -ssl_conf = 20-TLS 1.2 Ed448 Client Auth-ssl +[18-TLS 1.2 Ed448 Client Auth] +ssl_conf = 18-TLS 1.2 Ed448 Client Auth-ssl -[20-TLS 1.2 Ed448 Client Auth-ssl] -server = 20-TLS 1.2 Ed448 Client Auth-server -client = 20-TLS 1.2 Ed448 Client Auth-client +[18-TLS 1.2 Ed448 Client Auth-ssl] +server = 18-TLS 1.2 Ed448 Client Auth-server +client = 18-TLS 1.2 Ed448 Client Auth-client -[20-TLS 1.2 Ed448 Client Auth-server] +[18-TLS 1.2 Ed448 Client Auth-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem VerifyCAFile = ${ENV::TEST_CERTS_DIR}/root-cert.pem VerifyMode = Require -[20-TLS 1.2 Ed448 Client Auth-client] +[18-TLS 1.2 Ed448 Client Auth-client] CipherString = DEFAULT Ed448.Certificate = ${ENV::TEST_CERTS_DIR}/client-ed448-cert.pem Ed448.PrivateKey = ${ENV::TEST_CERTS_DIR}/client-ed448-key.pem @@ -726,7 +656,7 @@ MinProtocol = TLSv1.2 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-20] +[test-18] ExpectedClientCertType = Ed448 ExpectedClientSignType = Ed448 ExpectedResult = Success @@ -734,14 +664,14 @@ ExpectedResult = Success # =========================================================== -[21-ECDSA Signature Algorithm Selection SHA1] -ssl_conf = 21-ECDSA Signature Algorithm Selection SHA1-ssl +[19-ECDSA Signature Algorithm Selection SHA1] +ssl_conf = 19-ECDSA Signature Algorithm Selection SHA1-ssl -[21-ECDSA Signature Algorithm Selection SHA1-ssl] -server = 21-ECDSA Signature Algorithm Selection SHA1-server -client = 21-ECDSA Signature Algorithm Selection SHA1-client +[19-ECDSA Signature Algorithm Selection SHA1-ssl] +server = 19-ECDSA Signature Algorithm Selection SHA1-server +client = 19-ECDSA Signature Algorithm Selection SHA1-client -[21-ECDSA Signature Algorithm Selection SHA1-server] +[19-ECDSA Signature Algorithm Selection SHA1-server] Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem CipherString = DEFAULT:@SECLEVEL=0 ECDSA.Certificate = ${ENV::TEST_CERTS_DIR}/server-ecdsa-cert.pem @@ -753,13 +683,13 @@ Ed448.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ed448-key.pem MaxProtocol = TLSv1.2 PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem -[21-ECDSA Signature Algorithm Selection SHA1-client] +[19-ECDSA Signature Algorithm Selection SHA1-client] CipherString = DEFAULT:@SECLEVEL=0 SignatureAlgorithms = ECDSA+SHA1 VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-21] +[test-19] ExpectedResult = Success ExpectedServerCertType = P-256 ExpectedServerSignHash = SHA1 @@ -768,20 +698,20 @@ ExpectedServerSignType = EC # =========================================================== -[22-ECDSA with brainpool] -ssl_conf = 22-ECDSA with brainpool-ssl +[20-ECDSA with brainpool] +ssl_conf = 20-ECDSA with brainpool-ssl -[22-ECDSA with brainpool-ssl] -server = 22-ECDSA with brainpool-server -client = 22-ECDSA with brainpool-client +[20-ECDSA with brainpool-ssl] +server = 20-ECDSA with brainpool-server +client = 20-ECDSA with brainpool-client -[22-ECDSA with brainpool-server] +[20-ECDSA with brainpool-server] Certificate = ${ENV::TEST_CERTS_DIR}/server-ecdsa-brainpoolP256r1-cert.pem CipherString = DEFAULT Groups = brainpoolP256r1 PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ecdsa-brainpoolP256r1-key.pem -[22-ECDSA with brainpool-client] +[20-ECDSA with brainpool-client] CipherString = aECDSA Groups = brainpoolP256r1 MaxProtocol = TLSv1.2 @@ -789,13 +719,83 @@ RequestCAFile = ${ENV::TEST_CERTS_DIR}/root-cert.pem VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem VerifyMode = Peer -[test-22] +[test-20] ExpectedResult = Success ExpectedServerCANames = empty ExpectedServerCertType = brainpoolP256r1 ExpectedServerSignType = EC +# =========================================================== + +[21-Ed25519 CipherString and Curves Selection] +ssl_conf = 21-Ed25519 CipherString and Curves Selection-ssl + +[21-Ed25519 CipherString and Curves Selection-ssl] +server = 21-Ed25519 CipherString and Curves Selection-server +client = 21-Ed25519 CipherString and Curves Selection-client + +[21-Ed25519 CipherString and Curves Selection-server] +Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem +CipherString = DEFAULT +ECDSA.Certificate = ${ENV::TEST_CERTS_DIR}/server-ecdsa-cert.pem +ECDSA.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ecdsa-key.pem +Ed25519.Certificate = ${ENV::TEST_CERTS_DIR}/server-ed25519-cert.pem +Ed25519.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ed25519-key.pem +Ed448.Certificate = ${ENV::TEST_CERTS_DIR}/server-ed448-cert.pem +Ed448.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ed448-key.pem +MaxProtocol = TLSv1.2 +PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem + +[21-Ed25519 CipherString and Curves Selection-client] +CipherString = aECDSA +Curves = X25519 +MaxProtocol = TLSv1.2 +SignatureAlgorithms = ECDSA+SHA256:ed25519 +VerifyCAFile = ${ENV::TEST_CERTS_DIR}/rootcert.pem +VerifyMode = Peer + +[test-21] +ExpectedResult = Success +ExpectedServerCertType = Ed25519 +ExpectedServerSignType = Ed25519 + + +# =========================================================== + +[22-Ed448 CipherString and Curves Selection] +ssl_conf = 22-Ed448 CipherString and Curves Selection-ssl + +[22-Ed448 CipherString and Curves Selection-ssl] +server = 22-Ed448 CipherString and Curves Selection-server +client = 22-Ed448 CipherString and Curves Selection-client + +[22-Ed448 CipherString and Curves Selection-server] +Certificate = ${ENV::TEST_CERTS_DIR}/servercert.pem +CipherString = DEFAULT +ECDSA.Certificate = ${ENV::TEST_CERTS_DIR}/server-ecdsa-cert.pem +ECDSA.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ecdsa-key.pem +Ed25519.Certificate = ${ENV::TEST_CERTS_DIR}/server-ed25519-cert.pem +Ed25519.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ed25519-key.pem +Ed448.Certificate = ${ENV::TEST_CERTS_DIR}/server-ed448-cert.pem +Ed448.PrivateKey = ${ENV::TEST_CERTS_DIR}/server-ed448-key.pem +MaxProtocol = TLSv1.2 +PrivateKey = ${ENV::TEST_CERTS_DIR}/serverkey.pem + +[22-Ed448 CipherString and Curves Selection-client] +CipherString = aECDSA +Curves = X448 +MaxProtocol = TLSv1.2 +SignatureAlgorithms = ECDSA+SHA256:ed448 +VerifyCAFile = ${ENV::TEST_CERTS_DIR}/root-ed448-cert.pem +VerifyMode = Peer + +[test-22] +ExpectedResult = Success +ExpectedServerCertType = Ed448 +ExpectedServerSignType = Ed448 + + # =========================================================== [23-RSA-PSS Certificate CipherString Selection] diff --git a/deps/openssl/openssl/test/ssl-tests/20-cert-select.cnf.in b/deps/openssl/openssl/test/ssl-tests/20-cert-select.cnf.in index 30cde592c6d0e9..435932c4c1810c 100644 --- a/deps/openssl/openssl/test/ssl-tests/20-cert-select.cnf.in +++ b/deps/openssl/openssl/test/ssl-tests/20-cert-select.cnf.in @@ -328,41 +328,6 @@ our @tests = ( "ExpectedResult" => "Success" }, }, - { - name => "Ed25519 CipherString and Curves Selection", - server => $server, - client => { - "CipherString" => "aECDSA", - "MaxProtocol" => "TLSv1.2", - "SignatureAlgorithms" => "ECDSA+SHA256:ed25519", - # Excluding P-256 from the supported curves list means server - # certificate should be Ed25519 and not P-256 - "Curves" => "X25519" - }, - test => { - "ExpectedServerCertType" =>, "Ed25519", - "ExpectedServerSignType" =>, "Ed25519", - "ExpectedResult" => "Success" - }, - }, - { - name => "Ed448 CipherString and Curves Selection", - server => $server, - client => { - "CipherString" => "aECDSA", - "MaxProtocol" => "TLSv1.2", - "SignatureAlgorithms" => "ECDSA+SHA256:ed448", - "VerifyCAFile" => test_pem("root-ed448-cert.pem"), - # Excluding P-256 from the supported curves list means server - # certificate should be Ed25519 and not P-256 - "Curves" => "X448" - }, - test => { - "ExpectedServerCertType" =>, "Ed448", - "ExpectedServerSignType" =>, "Ed448", - "ExpectedResult" => "Success" - }, - }, { name => "TLS 1.2 Ed25519 Client Auth", server => { @@ -446,6 +411,41 @@ my @tests_non_fips = ( "ExpectedResult" => "Success" }, }, + { + name => "Ed25519 CipherString and Curves Selection", + server => $server, + client => { + "CipherString" => "aECDSA", + "MaxProtocol" => "TLSv1.2", + "SignatureAlgorithms" => "ECDSA+SHA256:ed25519", + # Excluding P-256 from the supported curves list means server + # certificate should be Ed25519 and not P-256 + "Curves" => "X25519" + }, + test => { + "ExpectedServerCertType" =>, "Ed25519", + "ExpectedServerSignType" =>, "Ed25519", + "ExpectedResult" => "Success" + }, + }, + { + name => "Ed448 CipherString and Curves Selection", + server => $server, + client => { + "CipherString" => "aECDSA", + "MaxProtocol" => "TLSv1.2", + "SignatureAlgorithms" => "ECDSA+SHA256:ed448", + "VerifyCAFile" => test_pem("root-ed448-cert.pem"), + # Excluding P-256 from the supported curves list means server + # certificate should be Ed25519 and not P-256 + "Curves" => "X448" + }, + test => { + "ExpectedServerCertType" =>, "Ed448", + "ExpectedServerSignType" =>, "Ed448", + "ExpectedResult" => "Success" + }, + }, ); my @tests_pss = ( diff --git a/deps/openssl/openssl/test/ssl-tests/28-seclevel.cnf.in b/deps/openssl/openssl/test/ssl-tests/28-seclevel.cnf.in index 945f4599d10ef8..3b97ac68eb3a71 100644 --- a/deps/openssl/openssl/test/ssl-tests/28-seclevel.cnf.in +++ b/deps/openssl/openssl/test/ssl-tests/28-seclevel.cnf.in @@ -1,5 +1,5 @@ # -*- mode: perl; -*- -# Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -56,7 +56,10 @@ our @tests_ec = ( client => { "CipherString" => "DEFAULT:\@SECLEVEL=5", "VerifyCAFile" => test_pem("root-ed448-cert.pem") }, test => { "ExpectedResult" => "ServerFail" }, - }, + } +); + +our @tests_ec_non_fips = ( { name => "SECLEVEL 3 with P-384 key, X25519 ECDHE", server => { "CipherString" => "DEFAULT:\@SECLEVEL=3", @@ -81,5 +84,6 @@ our @tests_tls1_2 = ( }, ); +push @tests_ec, @tests_ec_non_fips unless $fips_mode; push @tests, @tests_ec unless disabled("ec"); push @tests, @tests_tls1_2 unless disabled("tls1_2") || disabled("ec"); diff --git a/deps/openssl/openssl/test/sslapitest.c b/deps/openssl/openssl/test/sslapitest.c index 20b5ac194663b3..b0544d4942f7ea 100644 --- a/deps/openssl/openssl/test/sslapitest.c +++ b/deps/openssl/openssl/test/sslapitest.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -2402,7 +2402,6 @@ static int test_session_wo_ca_names(void) #endif } - #ifndef OSSL_NO_USABLE_TLS1_3 static SSL_SESSION *sesscache[6]; static int do_cache; @@ -3490,6 +3489,25 @@ static int setupearly_data_test(SSL_CTX **cctx, SSL_CTX **sctx, SSL **clientssl, return 1; } +static int check_early_data_timeout(time_t timer) +{ + int res = 0; + + /* + * Early data is time sensitive. We have an approx 8 second allowance + * between writing the early data and reading it. If we exceed that time + * then this test will fail. This can sometimes (rarely) occur in normal CI + * operation. We can try and detect this and just ignore the result of this + * test if it has taken too long. We assume anything over 7 seconds is too + * long + */ + timer = time(NULL) - timer; + if (timer >= 7) + res = TEST_skip("Test took too long, ignoring result"); + + return res; +} + static int test_early_data_read_write(int idx) { SSL_CTX *cctx = NULL, *sctx = NULL; @@ -3499,6 +3517,7 @@ static int test_early_data_read_write(int idx) unsigned char buf[20], data[1024]; size_t readbytes, written, eoedlen, rawread, rawwritten; BIO *rbio; + time_t timer; if (!TEST_true(setupearly_data_test(&cctx, &sctx, &clientssl, &serverssl, &sess, idx, @@ -3506,13 +3525,20 @@ static int test_early_data_read_write(int idx) goto end; /* Write and read some early data */ + timer = time(NULL); if (!TEST_true(SSL_write_early_data(clientssl, MSG1, strlen(MSG1), &written)) - || !TEST_size_t_eq(written, strlen(MSG1)) - || !TEST_int_eq(SSL_read_early_data(serverssl, buf, - sizeof(buf), &readbytes), - SSL_READ_EARLY_DATA_SUCCESS) - || !TEST_mem_eq(MSG1, readbytes, buf, strlen(MSG1)) + || !TEST_size_t_eq(written, strlen(MSG1))) + goto end; + + if (!TEST_int_eq(SSL_read_early_data(serverssl, buf, sizeof(buf), + &readbytes), + SSL_READ_EARLY_DATA_SUCCESS)) { + testresult = check_early_data_timeout(timer); + goto end; + } + + if (!TEST_mem_eq(MSG1, readbytes, buf, strlen(MSG1)) || !TEST_int_eq(SSL_get_early_data_status(serverssl), SSL_EARLY_DATA_ACCEPTED)) goto end; @@ -3729,6 +3755,7 @@ static int test_early_data_replay_int(int idx, int usecb, int confopt) SSL_SESSION *sess = NULL; size_t readbytes, written; unsigned char buf[20]; + time_t timer; allow_ed_cb_called = 0; @@ -3783,6 +3810,7 @@ static int test_early_data_replay_int(int idx, int usecb, int confopt) goto end; /* Write and read some early data */ + timer = time(NULL); if (!TEST_true(SSL_write_early_data(clientssl, MSG1, strlen(MSG1), &written)) || !TEST_size_t_eq(written, strlen(MSG1))) @@ -3803,8 +3831,11 @@ static int test_early_data_replay_int(int idx, int usecb, int confopt) /* In this case the callback decides to accept the early data */ if (!TEST_int_eq(SSL_read_early_data(serverssl, buf, sizeof(buf), &readbytes), - SSL_READ_EARLY_DATA_SUCCESS) - || !TEST_mem_eq(MSG1, strlen(MSG1), buf, readbytes) + SSL_READ_EARLY_DATA_SUCCESS)) { + testresult = check_early_data_timeout(timer); + goto end; + } + if (!TEST_mem_eq(MSG1, strlen(MSG1), buf, readbytes) /* * Server will have sent its flight so client can now send * end of early data and complete its half of the handshake @@ -3907,7 +3938,7 @@ static int early_data_skip_helper(int testtype, int cipher, int idx) if (!TEST_true(SSL_set1_groups_list(serverssl, "ffdhe3072"))) goto end; #else - if (!TEST_true(SSL_set1_groups_list(serverssl, "P-256"))) + if (!TEST_true(SSL_set1_groups_list(serverssl, "P-384"))) goto end; #endif } else if (idx == 2) { @@ -4321,13 +4352,19 @@ static int test_early_data_psk(int idx) || !TEST_int_eq(ERR_GET_REASON(ERR_get_error()), err)) goto end; } else { + time_t timer = time(NULL); + if (!TEST_true(SSL_write_early_data(clientssl, MSG1, strlen(MSG1), &written))) goto end; if (!TEST_int_eq(SSL_read_early_data(serverssl, buf, sizeof(buf), - &readbytes), readearlyres) - || (readearlyres == SSL_READ_EARLY_DATA_SUCCESS + &readbytes), readearlyres)) { + testresult = check_early_data_timeout(timer); + goto end; + } + + if ((readearlyres == SSL_READ_EARLY_DATA_SUCCESS && !TEST_mem_eq(buf, readbytes, MSG1, strlen(MSG1))) || !TEST_int_eq(SSL_get_early_data_status(serverssl), edstatus) || !TEST_int_eq(SSL_connect(clientssl), connectres)) @@ -4365,6 +4402,7 @@ static int test_early_data_psk_with_all_ciphers(int idx) unsigned char buf[20]; size_t readbytes, written; const SSL_CIPHER *cipher; + time_t timer; const char *cipher_str[] = { TLS1_3_RFC_AES_128_GCM_SHA256, TLS1_3_RFC_AES_256_GCM_SHA384, @@ -4416,14 +4454,19 @@ static int test_early_data_psk_with_all_ciphers(int idx) goto end; SSL_set_connect_state(clientssl); + timer = time(NULL); if (!TEST_true(SSL_write_early_data(clientssl, MSG1, strlen(MSG1), &written))) goto end; if (!TEST_int_eq(SSL_read_early_data(serverssl, buf, sizeof(buf), &readbytes), - SSL_READ_EARLY_DATA_SUCCESS) - || !TEST_mem_eq(buf, readbytes, MSG1, strlen(MSG1)) + SSL_READ_EARLY_DATA_SUCCESS)) { + testresult = check_early_data_timeout(timer); + goto end; + } + + if (!TEST_mem_eq(buf, readbytes, MSG1, strlen(MSG1)) || !TEST_int_eq(SSL_get_early_data_status(serverssl), SSL_EARLY_DATA_ACCEPTED) || !TEST_int_eq(SSL_connect(clientssl), 1) @@ -4864,10 +4907,14 @@ static int test_key_exchange(int idx) kexch_name0 = "secp521r1"; break; case 4: + if (is_fips) + return TEST_skip("X25519 might not be supported by fips provider."); kexch_alg = NID_X25519; kexch_name0 = "x25519"; break; case 5: + if (is_fips) + return TEST_skip("X448 might not be supported by fips provider."); kexch_alg = NID_X448; kexch_name0 = "x448"; break; @@ -5082,6 +5129,9 @@ static int test_negotiated_group(int idx) else expectednid = kexch_alg; + if (is_fips && (kexch_alg == NID_X25519 || kexch_alg == NID_X448)) + return TEST_skip("X25519 and X448 might not be available in fips provider."); + if (!istls13) max_version = TLS1_2_VERSION; @@ -5503,7 +5553,7 @@ static int test_tls13_psk(int idx) if (!TEST_true(SSL_set1_groups_list(serverssl, "ffdhe3072"))) goto end; #else - if (!TEST_true(SSL_set1_groups_list(serverssl, "P-256"))) + if (!TEST_true(SSL_set1_groups_list(serverssl, "P-384"))) goto end; #endif @@ -7467,6 +7517,7 @@ static int test_info_callback(int tst) SSL_SESSION *sess = NULL; size_t written, readbytes; unsigned char buf[80]; + time_t timer; /* early_data tests */ if (!TEST_true(setupearly_data_test(&cctx, &sctx, &clientssl, @@ -7481,13 +7532,20 @@ static int test_info_callback(int tst) sslapi_info_callback); /* Write and read some early data and then complete the connection */ + timer = time(NULL); if (!TEST_true(SSL_write_early_data(clientssl, MSG1, strlen(MSG1), &written)) - || !TEST_size_t_eq(written, strlen(MSG1)) - || !TEST_int_eq(SSL_read_early_data(serverssl, buf, - sizeof(buf), &readbytes), - SSL_READ_EARLY_DATA_SUCCESS) - || !TEST_mem_eq(MSG1, readbytes, buf, strlen(MSG1)) + || !TEST_size_t_eq(written, strlen(MSG1))) + goto end; + + if (!TEST_int_eq(SSL_read_early_data(serverssl, buf, + sizeof(buf), &readbytes), + SSL_READ_EARLY_DATA_SUCCESS)) { + testresult = check_early_data_timeout(timer); + goto end; + } + + if (!TEST_mem_eq(MSG1, readbytes, buf, strlen(MSG1)) || !TEST_int_eq(SSL_get_early_data_status(serverssl), SSL_EARLY_DATA_ACCEPTED) || !TEST_true(create_ssl_connection(serverssl, clientssl, @@ -8954,6 +9012,126 @@ static int test_session_timeout(int test) return testresult; } +/* + * Test that a session cache overflow works as expected + * Test 0: TLSv1.3, timeout on new session later than old session + * Test 1: TLSv1.2, timeout on new session later than old session + * Test 2: TLSv1.3, timeout on new session earlier than old session + * Test 3: TLSv1.2, timeout on new session earlier than old session + */ +#if !defined(OSSL_NO_USABLE_TLS1_3) || !defined(OPENSSL_NO_TLS1_2) +static int test_session_cache_overflow(int idx) +{ + SSL_CTX *sctx = NULL, *cctx = NULL; + SSL *serverssl = NULL, *clientssl = NULL; + int testresult = 0; + SSL_SESSION *sess = NULL; + +#ifdef OSSL_NO_USABLE_TLS1_3 + /* If no TLSv1.3 available then do nothing in this case */ + if (idx % 2 == 0) + return TEST_skip("No TLSv1.3 available"); +#endif +#ifdef OPENSSL_NO_TLS1_2 + /* If no TLSv1.2 available then do nothing in this case */ + if (idx % 2 == 1) + return TEST_skip("No TLSv1.2 available"); +#endif + + if (!TEST_true(create_ssl_ctx_pair(libctx, TLS_server_method(), + TLS_client_method(), TLS1_VERSION, + (idx % 2 == 0) ? TLS1_3_VERSION + : TLS1_2_VERSION, + &sctx, &cctx, cert, privkey)) + || !TEST_true(SSL_CTX_set_options(sctx, SSL_OP_NO_TICKET))) + goto end; + + SSL_CTX_sess_set_get_cb(sctx, get_session_cb); + get_sess_val = NULL; + + SSL_CTX_sess_set_cache_size(sctx, 1); + + if (!TEST_true(create_ssl_objects(sctx, cctx, &serverssl, &clientssl, + NULL, NULL))) + goto end; + + if (!TEST_true(create_ssl_connection(serverssl, clientssl, SSL_ERROR_NONE))) + goto end; + + if (idx > 1) { + sess = SSL_get_session(serverssl); + if (!TEST_ptr(sess)) + goto end; + + /* + * Cause this session to have a longer timeout than the next session to + * be added. + */ + if (!TEST_true(SSL_SESSION_set_timeout(sess, LONG_MAX / 2))) { + sess = NULL; + goto end; + } + sess = NULL; + } + + SSL_shutdown(serverssl); + SSL_shutdown(clientssl); + SSL_free(serverssl); + SSL_free(clientssl); + serverssl = clientssl = NULL; + + /* + * Session cache size is 1 and we already populated the cache with a session + * so the next connection should cause an overflow. + */ + + if (!TEST_true(create_ssl_objects(sctx, cctx, &serverssl, &clientssl, + NULL, NULL))) + goto end; + + if (!TEST_true(create_ssl_connection(serverssl, clientssl, SSL_ERROR_NONE))) + goto end; + + /* + * The session we just negotiated may have been already removed from the + * internal cache - but we will return it anyway from our external cache. + */ + get_sess_val = SSL_get_session(serverssl); + if (!TEST_ptr(get_sess_val)) + goto end; + sess = SSL_get1_session(clientssl); + if (!TEST_ptr(sess)) + goto end; + + SSL_shutdown(serverssl); + SSL_shutdown(clientssl); + SSL_free(serverssl); + SSL_free(clientssl); + serverssl = clientssl = NULL; + + if (!TEST_true(create_ssl_objects(sctx, cctx, &serverssl, &clientssl, + NULL, NULL))) + goto end; + + if (!TEST_true(SSL_set_session(clientssl, sess))) + goto end; + + if (!TEST_true(create_ssl_connection(serverssl, clientssl, SSL_ERROR_NONE))) + goto end; + + testresult = 1; + + end: + SSL_free(serverssl); + SSL_free(clientssl); + SSL_CTX_free(sctx); + SSL_CTX_free(cctx); + SSL_SESSION_free(sess); + + return testresult; +} +#endif /* !defined(OSSL_NO_USABLE_TLS1_3) || !defined(OPENSSL_NO_TLS1_2) */ + /* * Test 0: Client sets servername and server acknowledges it (TLSv1.2) * Test 1: Client sets servername and server does not acknowledge it (TLSv1.2) @@ -9269,20 +9447,11 @@ static int test_pluggable_group(int idx) OSSL_PROVIDER *tlsprov = OSSL_PROVIDER_load(libctx, "tls-provider"); /* Check that we are not impacted by a provider without any groups */ OSSL_PROVIDER *legacyprov = OSSL_PROVIDER_load(libctx, "legacy"); - const char *group_name = idx == 0 ? "xorgroup" : "xorkemgroup"; + const char *group_name = idx == 0 ? "xorkemgroup" : "xorgroup"; if (!TEST_ptr(tlsprov)) goto end; - if (legacyprov == NULL) { - /* - * In this case we assume we've been built with "no-legacy" and skip - * this test (there is no OPENSSL_NO_LEGACY) - */ - testresult = 1; - goto end; - } - if (!TEST_true(create_ssl_ctx_pair(libctx, TLS_server_method(), TLS_client_method(), TLS1_3_VERSION, @@ -9292,7 +9461,9 @@ static int test_pluggable_group(int idx) NULL, NULL))) goto end; - if (!TEST_true(SSL_set1_groups_list(serverssl, group_name)) + /* ensure GROUPLIST_INCREMENT (=40) logic triggers: */ + if (!TEST_true(SSL_set1_groups_list(serverssl, "xorgroup:xorkemgroup:dummy1:dummy2:dummy3:dummy4:dummy5:dummy6:dummy7:dummy8:dummy9:dummy10:dummy11:dummy12:dummy13:dummy14:dummy15:dummy16:dummy17:dummy18:dummy19:dummy20:dummy21:dummy22:dummy23:dummy24:dummy25:dummy26:dummy27:dummy28:dummy29:dummy30:dummy31:dummy32:dummy33:dummy34:dummy35:dummy36:dummy37:dummy38:dummy39:dummy40:dummy41:dummy42:dummy43")) + /* removing a single algorithm from the list makes the test pass */ || !TEST_true(SSL_set1_groups_list(clientssl, group_name))) goto end; @@ -10128,27 +10299,6 @@ static int test_load_dhfile(void) } #if !defined(OPENSSL_NO_TLS1_2) && !defined(OPENSSL_NO_DYNAMIC_ENGINE) - -static ENGINE *load_dasync(void) -{ - ENGINE *e; - - if (!TEST_ptr(e = ENGINE_by_id("dasync"))) - return NULL; - - if (!TEST_true(ENGINE_init(e))) { - ENGINE_free(e); - return NULL; - } - - if (!TEST_true(ENGINE_register_ciphers(e))) { - ENGINE_free(e); - return NULL; - } - - return e; -} - /* * Test TLSv1.2 with a pipeline capable cipher. TLSv1.3 and DTLS do not * support this yet. The only pipeline capable cipher that we have is in the @@ -10443,6 +10593,177 @@ static int test_handshake_retry(int idx) return testresult; } +struct resume_servername_cb_data { + int i; + SSL_CTX *cctx; + SSL_CTX *sctx; + SSL_SESSION *sess; + int recurse; +}; + +/* + * Servername callback. We use it here to run another complete handshake using + * the same session - and mark the session as not_resuamble at the end + */ +static int resume_servername_cb(SSL *s, int *ad, void *arg) +{ + struct resume_servername_cb_data *cbdata = arg; + SSL *serverssl = NULL, *clientssl = NULL; + int ret = SSL_TLSEXT_ERR_ALERT_FATAL; + + if (cbdata->recurse) + return SSL_TLSEXT_ERR_ALERT_FATAL; + + if ((cbdata->i % 3) != 1) + return SSL_TLSEXT_ERR_OK; + + cbdata->recurse = 1; + + if (!TEST_true(create_ssl_objects(cbdata->sctx, cbdata->cctx, &serverssl, + &clientssl, NULL, NULL)) + || !TEST_true(SSL_set_session(clientssl, cbdata->sess))) + goto end; + + ERR_set_mark(); + /* + * We expect this to fail - because the servername cb will fail. This will + * mark the session as not_resumable. + */ + if (!TEST_false(create_ssl_connection(serverssl, clientssl, SSL_ERROR_NONE))) { + ERR_clear_last_mark(); + goto end; + } + ERR_pop_to_mark(); + + ret = SSL_TLSEXT_ERR_OK; + end: + SSL_free(serverssl); + SSL_free(clientssl); + cbdata->recurse = 0; + return ret; +} + +/* + * Test multiple resumptions and cache size handling + * Test 0: TLSv1.3 (max_early_data set) + * Test 1: TLSv1.3 (SSL_OP_NO_TICKET set) + * Test 2: TLSv1.3 (max_early_data and SSL_OP_NO_TICKET set) + * Test 3: TLSv1.3 (SSL_OP_NO_TICKET, simultaneous resumes) + * Test 4: TLSv1.2 + */ +static int test_multi_resume(int idx) +{ + SSL_CTX *sctx = NULL, *cctx = NULL; + SSL *serverssl = NULL, *clientssl = NULL; + SSL_SESSION *sess = NULL; + int max_version = TLS1_3_VERSION; + int i, testresult = 0; + struct resume_servername_cb_data cbdata; + +#if defined(OPENSSL_NO_TLS1_2) + if (idx == 4) + return TEST_skip("TLSv1.2 is disabled in this build"); +#else + if (idx == 4) + max_version = TLS1_2_VERSION; +#endif +#if defined(OSSL_NO_USABLE_TLS1_3) + if (idx != 4) + return TEST_skip("No usable TLSv1.3 in this build"); +#endif + + if (!TEST_true(create_ssl_ctx_pair(libctx, TLS_server_method(), + TLS_client_method(), TLS1_VERSION, + max_version, &sctx, &cctx, cert, + privkey))) + goto end; + + /* + * TLSv1.3 only uses a session cache if either max_early_data > 0 (used for + * replay protection), or if SSL_OP_NO_TICKET is in use + */ + if (idx == 0 || idx == 2) { + if (!TEST_true(SSL_CTX_set_max_early_data(sctx, 1024))) + goto end; + } + if (idx == 1 || idx == 2 || idx == 3) + SSL_CTX_set_options(sctx, SSL_OP_NO_TICKET); + + SSL_CTX_sess_set_cache_size(sctx, 5); + + if (idx == 3) { + SSL_CTX_set_tlsext_servername_callback(sctx, resume_servername_cb); + SSL_CTX_set_tlsext_servername_arg(sctx, &cbdata); + cbdata.cctx = cctx; + cbdata.sctx = sctx; + cbdata.recurse = 0; + } + + for (i = 0; i < 30; i++) { + if (!TEST_true(create_ssl_objects(sctx, cctx, &serverssl, &clientssl, + NULL, NULL)) + || !TEST_true(SSL_set_session(clientssl, sess))) + goto end; + + /* + * Check simultaneous resumes. We pause the connection part way through + * the handshake by (mis)using the servername_cb. The pause occurs after + * session resumption has already occurred, but before any session + * tickets have been issued. While paused we run another complete + * handshake resuming the same session. + */ + if (idx == 3) { + cbdata.i = i; + cbdata.sess = sess; + } + + /* + * Recreate a bug where dynamically changing the max_early_data value + * can cause sessions in the session cache which cannot be deleted. + */ + if ((idx == 0 || idx == 2) && (i % 3) == 2) + SSL_set_max_early_data(serverssl, 0); + + if (!TEST_true(create_ssl_connection(serverssl, clientssl, SSL_ERROR_NONE))) + goto end; + + if (sess == NULL || (idx == 0 && (i % 3) == 2)) { + if (!TEST_false(SSL_session_reused(clientssl))) + goto end; + } else { + if (!TEST_true(SSL_session_reused(clientssl))) + goto end; + } + SSL_SESSION_free(sess); + + /* Do a full handshake, followed by two resumptions */ + if ((i % 3) == 2) { + sess = NULL; + } else { + if (!TEST_ptr((sess = SSL_get1_session(clientssl)))) + goto end; + } + + SSL_shutdown(clientssl); + SSL_shutdown(serverssl); + SSL_free(serverssl); + SSL_free(clientssl); + serverssl = clientssl = NULL; + } + + /* We should never exceed the session cache size limit */ + if (!TEST_long_le(SSL_CTX_sess_number(sctx), 5)) + goto end; + + testresult = 1; + end: + SSL_free(serverssl); + SSL_free(clientssl); + SSL_CTX_free(sctx); + SSL_CTX_free(cctx); + SSL_SESSION_free(sess); + return testresult; +} #ifndef OPENSSL_NO_QUIC static int test_quic_set_encryption_secrets(SSL *ssl, OSSL_ENCRYPTION_LEVEL level, @@ -10802,6 +11123,367 @@ static int test_quic_early_data(int tst) # endif /* OSSL_NO_USABLE_TLS1_3 */ #endif /* OPENSSL_NO_QUIC */ +static struct next_proto_st { + int serverlen; + unsigned char server[40]; + int clientlen; + unsigned char client[40]; + int expected_ret; + size_t selectedlen; + unsigned char selected[40]; +} next_proto_tests[] = { + { + 4, { 3, 'a', 'b', 'c' }, + 4, { 3, 'a', 'b', 'c' }, + OPENSSL_NPN_NEGOTIATED, + 3, { 'a', 'b', 'c' } + }, + { + 7, { 3, 'a', 'b', 'c', 2, 'a', 'b' }, + 4, { 3, 'a', 'b', 'c' }, + OPENSSL_NPN_NEGOTIATED, + 3, { 'a', 'b', 'c' } + }, + { + 7, { 2, 'a', 'b', 3, 'a', 'b', 'c', }, + 4, { 3, 'a', 'b', 'c' }, + OPENSSL_NPN_NEGOTIATED, + 3, { 'a', 'b', 'c' } + }, + { + 4, { 3, 'a', 'b', 'c' }, + 7, { 3, 'a', 'b', 'c', 2, 'a', 'b', }, + OPENSSL_NPN_NEGOTIATED, + 3, { 'a', 'b', 'c' } + }, + { + 4, { 3, 'a', 'b', 'c' }, + 7, { 2, 'a', 'b', 3, 'a', 'b', 'c'}, + OPENSSL_NPN_NEGOTIATED, + 3, { 'a', 'b', 'c' } + }, + { + 7, { 2, 'b', 'c', 3, 'a', 'b', 'c' }, + 7, { 2, 'a', 'b', 3, 'a', 'b', 'c'}, + OPENSSL_NPN_NEGOTIATED, + 3, { 'a', 'b', 'c' } + }, + { + 10, { 2, 'b', 'c', 3, 'a', 'b', 'c', 2, 'a', 'b' }, + 7, { 2, 'a', 'b', 3, 'a', 'b', 'c'}, + OPENSSL_NPN_NEGOTIATED, + 3, { 'a', 'b', 'c' } + }, + { + 4, { 3, 'b', 'c', 'd' }, + 4, { 3, 'a', 'b', 'c' }, + OPENSSL_NPN_NO_OVERLAP, + 3, { 'a', 'b', 'c' } + }, + { + 0, { 0 }, + 4, { 3, 'a', 'b', 'c' }, + OPENSSL_NPN_NO_OVERLAP, + 3, { 'a', 'b', 'c' } + }, + { + -1, { 0 }, + 4, { 3, 'a', 'b', 'c' }, + OPENSSL_NPN_NO_OVERLAP, + 3, { 'a', 'b', 'c' } + }, + { + 4, { 3, 'a', 'b', 'c' }, + 0, { 0 }, + OPENSSL_NPN_NO_OVERLAP, + 0, { 0 } + }, + { + 4, { 3, 'a', 'b', 'c' }, + -1, { 0 }, + OPENSSL_NPN_NO_OVERLAP, + 0, { 0 } + }, + { + 3, { 3, 'a', 'b', 'c' }, + 4, { 3, 'a', 'b', 'c' }, + OPENSSL_NPN_NO_OVERLAP, + 3, { 'a', 'b', 'c' } + }, + { + 4, { 3, 'a', 'b', 'c' }, + 3, { 3, 'a', 'b', 'c' }, + OPENSSL_NPN_NO_OVERLAP, + 0, { 0 } + } +}; + +static int test_select_next_proto(int idx) +{ + struct next_proto_st *np = &next_proto_tests[idx]; + int ret = 0; + unsigned char *out, *client, *server; + unsigned char outlen; + unsigned int clientlen, serverlen; + + if (np->clientlen == -1) { + client = NULL; + clientlen = 0; + } else { + client = np->client; + clientlen = (unsigned int)np->clientlen; + } + if (np->serverlen == -1) { + server = NULL; + serverlen = 0; + } else { + server = np->server; + serverlen = (unsigned int)np->serverlen; + } + + if (!TEST_int_eq(SSL_select_next_proto(&out, &outlen, server, serverlen, + client, clientlen), + np->expected_ret)) + goto err; + + if (np->selectedlen == 0) { + if (!TEST_ptr_null(out) || !TEST_uchar_eq(outlen, 0)) + goto err; + } else { + if (!TEST_mem_eq(out, outlen, np->selected, np->selectedlen)) + goto err; + } + + ret = 1; + err: + return ret; +} + +static const unsigned char fooprot[] = {3, 'f', 'o', 'o' }; +static const unsigned char barprot[] = {3, 'b', 'a', 'r' }; + +#if !defined(OPENSSL_NO_TLS1_2) && !defined(OPENSSL_NO_NEXTPROTONEG) +static int npn_advert_cb(SSL *ssl, const unsigned char **out, + unsigned int *outlen, void *arg) +{ + int *idx = (int *)arg; + + switch (*idx) { + default: + case 0: + *out = fooprot; + *outlen = sizeof(fooprot); + return SSL_TLSEXT_ERR_OK; + + case 1: + *outlen = 0; + return SSL_TLSEXT_ERR_OK; + + case 2: + return SSL_TLSEXT_ERR_NOACK; + } +} + +static int npn_select_cb(SSL *s, unsigned char **out, unsigned char *outlen, + const unsigned char *in, unsigned int inlen, void *arg) +{ + int *idx = (int *)arg; + + switch (*idx) { + case 0: + case 1: + *out = (unsigned char *)(fooprot + 1); + *outlen = *fooprot; + return SSL_TLSEXT_ERR_OK; + + case 3: + *out = (unsigned char *)(barprot + 1); + *outlen = *barprot; + return SSL_TLSEXT_ERR_OK; + + case 4: + *outlen = 0; + return SSL_TLSEXT_ERR_OK; + + default: + case 2: + return SSL_TLSEXT_ERR_ALERT_FATAL; + } +} + +/* + * Test the NPN callbacks + * Test 0: advert = foo, select = foo + * Test 1: advert = , select = foo + * Test 2: no advert + * Test 3: advert = foo, select = bar + * Test 4: advert = foo, select = (should fail) + */ +static int test_npn(int idx) +{ + SSL_CTX *sctx = NULL, *cctx = NULL; + SSL *serverssl = NULL, *clientssl = NULL; + int testresult = 0; + + if (!TEST_true(create_ssl_ctx_pair(libctx, TLS_server_method(), + TLS_client_method(), 0, TLS1_2_VERSION, + &sctx, &cctx, cert, privkey))) + goto end; + + SSL_CTX_set_next_protos_advertised_cb(sctx, npn_advert_cb, &idx); + SSL_CTX_set_next_proto_select_cb(cctx, npn_select_cb, &idx); + + if (!TEST_true(create_ssl_objects(sctx, cctx, &serverssl, &clientssl, NULL, + NULL))) + goto end; + + if (idx == 4) { + /* We don't allow empty selection of NPN, so this should fail */ + if (!TEST_false(create_ssl_connection(serverssl, clientssl, + SSL_ERROR_NONE))) + goto end; + } else { + const unsigned char *prot; + unsigned int protlen; + + if (!TEST_true(create_ssl_connection(serverssl, clientssl, + SSL_ERROR_NONE))) + goto end; + + SSL_get0_next_proto_negotiated(serverssl, &prot, &protlen); + switch (idx) { + case 0: + case 1: + if (!TEST_mem_eq(prot, protlen, fooprot + 1, *fooprot)) + goto end; + break; + case 2: + if (!TEST_uint_eq(protlen, 0)) + goto end; + break; + case 3: + if (!TEST_mem_eq(prot, protlen, barprot + 1, *barprot)) + goto end; + break; + default: + TEST_error("Should not get here"); + goto end; + } + } + + testresult = 1; + end: + SSL_free(serverssl); + SSL_free(clientssl); + SSL_CTX_free(sctx); + SSL_CTX_free(cctx); + + return testresult; +} +#endif /* !defined(OPENSSL_NO_TLS1_2) && !defined(OPENSSL_NO_NEXTPROTONEG) */ + +static int alpn_select_cb2(SSL *ssl, const unsigned char **out, + unsigned char *outlen, const unsigned char *in, + unsigned int inlen, void *arg) +{ + int *idx = (int *)arg; + + switch (*idx) { + case 0: + *out = (unsigned char *)(fooprot + 1); + *outlen = *fooprot; + return SSL_TLSEXT_ERR_OK; + + case 2: + *out = (unsigned char *)(barprot + 1); + *outlen = *barprot; + return SSL_TLSEXT_ERR_OK; + + case 3: + *outlen = 0; + return SSL_TLSEXT_ERR_OK; + + default: + case 1: + return SSL_TLSEXT_ERR_ALERT_FATAL; + } + return 0; +} + +/* + * Test the ALPN callbacks + * Test 0: client = foo, select = foo + * Test 1: client = , select = none + * Test 2: client = foo, select = bar (should fail) + * Test 3: client = foo, select = (should fail) + */ +static int test_alpn(int idx) +{ + SSL_CTX *sctx = NULL, *cctx = NULL; + SSL *serverssl = NULL, *clientssl = NULL; + int testresult = 0; + const unsigned char *prots = fooprot; + unsigned int protslen = sizeof(fooprot); + + if (!TEST_true(create_ssl_ctx_pair(libctx, TLS_server_method(), + TLS_client_method(), 0, 0, + &sctx, &cctx, cert, privkey))) + goto end; + + SSL_CTX_set_alpn_select_cb(sctx, alpn_select_cb2, &idx); + + if (!TEST_true(create_ssl_objects(sctx, cctx, &serverssl, &clientssl, NULL, + NULL))) + goto end; + + if (idx == 1) { + prots = NULL; + protslen = 0; + } + + /* SSL_set_alpn_protos returns 0 for success! */ + if (!TEST_false(SSL_set_alpn_protos(clientssl, prots, protslen))) + goto end; + + if (idx == 2 || idx == 3) { + /* We don't allow empty selection of NPN, so this should fail */ + if (!TEST_false(create_ssl_connection(serverssl, clientssl, + SSL_ERROR_NONE))) + goto end; + } else { + const unsigned char *prot; + unsigned int protlen; + + if (!TEST_true(create_ssl_connection(serverssl, clientssl, + SSL_ERROR_NONE))) + goto end; + + SSL_get0_alpn_selected(clientssl, &prot, &protlen); + switch (idx) { + case 0: + if (!TEST_mem_eq(prot, protlen, fooprot + 1, *fooprot)) + goto end; + break; + case 1: + if (!TEST_uint_eq(protlen, 0)) + goto end; + break; + default: + TEST_error("Should not get here"); + goto end; + } + } + + testresult = 1; + end: + SSL_free(serverssl); + SSL_free(clientssl); + SSL_CTX_free(sctx); + SSL_CTX_free(cctx); + + return testresult; +} + OPT_TEST_DECLARE_USAGE("certfile privkeyfile srpvfile tmpfile provider config dhfile\n") int setup_tests(void) @@ -11066,6 +11748,9 @@ int setup_tests(void) ADD_TEST(test_set_verify_cert_store_ssl_ctx); ADD_TEST(test_set_verify_cert_store_ssl); ADD_ALL_TESTS(test_session_timeout, 1); +#if !defined(OSSL_NO_USABLE_TLS1_3) || !defined(OPENSSL_NO_TLS1_2) + ADD_ALL_TESTS(test_session_cache_overflow, 4); +#endif ADD_TEST(test_load_dhfile); #if !defined(OPENSSL_NO_TLS1_2) && !defined(OSSL_NO_USABLE_TLS1_3) ADD_ALL_TESTS(test_serverinfo_custom, 4); @@ -11074,6 +11759,12 @@ int setup_tests(void) ADD_ALL_TESTS(test_pipelining, 7); #endif ADD_ALL_TESTS(test_handshake_retry, 16); + ADD_ALL_TESTS(test_multi_resume, 5); + ADD_ALL_TESTS(test_select_next_proto, OSSL_NELEM(next_proto_tests)); +#if !defined(OPENSSL_NO_TLS1_2) && !defined(OPENSSL_NO_NEXTPROTONEG) + ADD_ALL_TESTS(test_npn, 5); +#endif + ADD_ALL_TESTS(test_alpn, 4); #ifndef OPENSSL_NO_QUIC ADD_ALL_TESTS(test_quic_api, 9); # ifndef OSSL_NO_USABLE_TLS1_3 diff --git a/deps/openssl/openssl/test/sslbuffertest.c b/deps/openssl/openssl/test/sslbuffertest.c index 3c3e69d61da80e..f313151f686f96 100644 --- a/deps/openssl/openssl/test/sslbuffertest.c +++ b/deps/openssl/openssl/test/sslbuffertest.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2020 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"); * you may not use this file except in compliance with the License. @@ -8,10 +8,19 @@ * or in the file LICENSE in the source distribution. */ +/* + * We need access to the deprecated low level Engine APIs for legacy purposes + * when the deprecated calls are not hidden + */ +#ifndef OPENSSL_NO_DEPRECATED_3_0 +# define OPENSSL_SUPPRESS_DEPRECATED +#endif + #include #include #include #include +#include #include "internal/packet.h" @@ -150,6 +159,166 @@ static int test_func(int test) return result; } +/* + * Test that attempting to free the buffers at points where they cannot be freed + * works as expected + * Test 0: Attempt to free buffers after a full record has been processed, but + * the application has only performed a partial read + * Test 1: Attempt to free buffers after only a partial record header has been + * received + * Test 2: Attempt to free buffers after a full record header but no record body + * Test 3: Attempt to free buffers after a full record hedaer and partial record + * body + * Test 4-7: We repeat tests 0-3 but including data from a second pipelined + * record + */ +static int test_free_buffers(int test) +{ + int result = 0; + SSL *serverssl = NULL, *clientssl = NULL; + const char testdata[] = "Test data"; + char buf[120]; + size_t written, readbytes; + int i, pipeline = test > 3; + ENGINE *e = NULL; + + if (pipeline) { + e = load_dasync(); + if (e == NULL) + goto end; + test -= 4; + } + + if (!TEST_true(create_ssl_objects(serverctx, clientctx, &serverssl, + &clientssl, NULL, NULL))) + goto end; + + if (pipeline) { + if (!TEST_true(SSL_set_cipher_list(serverssl, "AES128-SHA")) + || !TEST_true(SSL_set_max_proto_version(serverssl, + TLS1_2_VERSION)) + || !TEST_true(SSL_set_max_pipelines(serverssl, 2))) + goto end; + } + + if (!TEST_true(create_ssl_connection(serverssl, clientssl, + SSL_ERROR_NONE))) + goto end; + + /* + * For the non-pipeline case we write one record. For pipelining we write + * two records. + */ + for (i = 0; i <= pipeline; i++) { + if (!TEST_true(SSL_write_ex(clientssl, testdata, strlen(testdata), + &written))) + goto end; + } + + if (test == 0) { + size_t readlen = 1; + + /* + * Deliberately only read the first byte - so the remaining bytes are + * still buffered. In the pipelining case we read as far as the first + * byte from the second record. + */ + if (pipeline) + readlen += strlen(testdata); + + if (!TEST_true(SSL_read_ex(serverssl, buf, readlen, &readbytes)) + || !TEST_size_t_eq(readlen, readbytes)) + goto end; + } else { + BIO *tmp; + size_t partial_len; + + /* Remove all the data that is pending for read by the server */ + tmp = SSL_get_rbio(serverssl); + if (!TEST_true(BIO_read_ex(tmp, buf, sizeof(buf), &readbytes)) + || !TEST_size_t_lt(readbytes, sizeof(buf)) + || !TEST_size_t_gt(readbytes, SSL3_RT_HEADER_LENGTH)) + goto end; + + switch(test) { + case 1: + partial_len = SSL3_RT_HEADER_LENGTH - 1; + break; + case 2: + partial_len = SSL3_RT_HEADER_LENGTH; + break; + case 3: + partial_len = readbytes - 1; + break; + default: + TEST_error("Invalid test index"); + goto end; + } + + if (pipeline) { + /* We happen to know the first record is 57 bytes long */ + const size_t first_rec_len = 57; + + if (test != 3) + partial_len += first_rec_len; + + /* + * Sanity check. If we got the record len right then this should + * never fail. + */ + if (!TEST_int_eq(buf[first_rec_len], SSL3_RT_APPLICATION_DATA)) + goto end; + } + + /* + * Put back just the partial record (plus the whole initial record in + * the pipelining case) + */ + if (!TEST_true(BIO_write_ex(tmp, buf, partial_len, &written))) + goto end; + + if (pipeline) { + /* + * Attempt a read. This should pass but only return data from the + * first record. Only a partial record is available for the second + * record. + */ + if (!TEST_true(SSL_read_ex(serverssl, buf, sizeof(buf), + &readbytes)) + || !TEST_size_t_eq(readbytes, strlen(testdata))) + goto end; + } else { + /* + * Attempt a read. This should fail because only a partial record is + * available. + */ + if (!TEST_false(SSL_read_ex(serverssl, buf, sizeof(buf), + &readbytes))) + goto end; + } + } + + /* + * Attempting to free the buffers at this point should fail because they are + * still in use + */ + if (!TEST_false(SSL_free_buffers(serverssl))) + goto end; + + result = 1; + end: + SSL_free(clientssl); + SSL_free(serverssl); +#ifndef OPENSSL_NO_DYNAMIC_ENGINE + if (e != NULL) { + ENGINE_unregister_ciphers(e); + ENGINE_finish(e); + ENGINE_free(e); + } +#endif + return result; +} + OPT_TEST_DECLARE_USAGE("certfile privkeyfile\n") int setup_tests(void) @@ -173,6 +342,11 @@ int setup_tests(void) } ADD_ALL_TESTS(test_func, 9); +#if !defined(OPENSSL_NO_TLS1_2) && !defined(OPENSSL_NO_DYNAMIC_ENGINE) + ADD_ALL_TESTS(test_free_buffers, 8); +#else + ADD_ALL_TESTS(test_free_buffers, 4); +#endif return 1; } diff --git a/deps/openssl/openssl/test/test.cnf b/deps/openssl/openssl/test/test.cnf index 8b2f92ad8e241d..8f68982a9fa1fc 100644 --- a/deps/openssl/openssl/test/test.cnf +++ b/deps/openssl/openssl/test/test.cnf @@ -72,3 +72,9 @@ commonName = CN field commonName_value = Eric Young emailAddress = email field emailAddress_value = eay@mincom.oz.au + +[ dirname_sec ] +C = UK +O = My Organization +OU = My Unit +CN = My Name diff --git a/deps/openssl/openssl/test/tls-provider.c b/deps/openssl/openssl/test/tls-provider.c index 5c44b6812e8168..7375792c312552 100644 --- a/deps/openssl/openssl/test/tls-provider.c +++ b/deps/openssl/openssl/test/tls-provider.c @@ -1,5 +1,5 @@ /* - * Copyright 2019-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2019-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -185,6 +185,8 @@ static int tls_prov_get_capabilities(void *provctx, const char *capability, } /* Register our 2 groups */ + OPENSSL_assert(xor_group.group_id >= 65024 + && xor_group.group_id < 65279 - NUM_DUMMY_GROUPS); ret = cb(xor_group_params, arg); ret &= cb(xor_kemgroup_params, arg); @@ -196,6 +198,7 @@ static int tls_prov_get_capabilities(void *provctx, const char *capability, for (i = 0; i < NUM_DUMMY_GROUPS; i++) { OSSL_PARAM dummygroup[OSSL_NELEM(xor_group_params)]; + unsigned int dummygroup_id; memcpy(dummygroup, xor_group_params, sizeof(xor_group_params)); @@ -210,6 +213,9 @@ static int tls_prov_get_capabilities(void *provctx, const char *capability, } dummygroup[0].data = dummy_group_names[i]; dummygroup[0].data_size = strlen(dummy_group_names[i]) + 1; + /* assign unique group IDs also to dummy groups for registration */ + dummygroup_id = 65279 - NUM_DUMMY_GROUPS + i; + dummygroup[3].data = (unsigned char*)&dummygroup_id; ret &= cb(dummygroup, arg); } @@ -817,9 +823,10 @@ unsigned int randomize_tls_group_id(OSSL_LIB_CTX *libctx) return 0; /* * Ensure group_id is within the IANA Reserved for private use range - * (65024-65279) + * (65024-65279). + * Carve out NUM_DUMMY_GROUPS ids for properly registering those. */ - group_id %= 65279 - 65024; + group_id %= 65279 - NUM_DUMMY_GROUPS - 65024; group_id += 65024; /* Ensure we did not already issue this group_id */ diff --git a/deps/openssl/openssl/test/v3ext.c b/deps/openssl/openssl/test/v3ext.c index 88034db271559d..9305a3010bf8ec 100644 --- a/deps/openssl/openssl/test/v3ext.c +++ b/deps/openssl/openssl/test/v3ext.c @@ -1,5 +1,5 @@ /* - * Copyright 2016-2022 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. * * Licensed under the Apache License 2.0 (the "License"). You may not use * this file except in compliance with the License. You can obtain a copy @@ -269,17 +269,20 @@ static int test_addr_fam_len(void) goto end; if (!ASN1_OCTET_STRING_set(f1->addressFamily, key, keylen)) goto end; + + /* Push and transfer memory ownership to stack */ if (!sk_IPAddressFamily_push(addr, f1)) goto end; + f1 = NULL; /* Shouldn't be able to canonize this as the len is > 3*/ if (!TEST_false(X509v3_addr_canonize(addr))) goto end; - /* Create a well formed IPAddressFamily */ - f1 = sk_IPAddressFamily_pop(addr); - IPAddressFamily_free(f1); + /* Pop and free the new stack element */ + IPAddressFamily_free(sk_IPAddressFamily_pop(addr)); + /* Create a well-formed IPAddressFamily */ key[0] = (afi >> 8) & 0xFF; key[1] = afi & 0xFF; key[2] = 0x1; @@ -297,8 +300,11 @@ static int test_addr_fam_len(void) /* Mark this as inheritance so we skip some of the is_canonize checks */ f1->ipAddressChoice->type = IPAddressChoice_inherit; + + /* Push and transfer memory ownership to stack */ if (!sk_IPAddressFamily_push(addr, f1)) goto end; + f1 = NULL; /* Should be able to canonize now */ if (!TEST_true(X509v3_addr_canonize(addr))) @@ -306,7 +312,10 @@ static int test_addr_fam_len(void) testresult = 1; end: + /* Free stack and any memory owned by detached element */ + IPAddressFamily_free(f1); sk_IPAddressFamily_pop_free(addr, IPAddressFamily_free); + ASN1_OCTET_STRING_free(ip1); ASN1_OCTET_STRING_free(ip2); return testresult; diff --git a/deps/openssl/openssl/util/check-format-commit.sh b/deps/openssl/openssl/util/check-format-commit.sh new file mode 100755 index 00000000000000..7e712dc48cf655 --- /dev/null +++ b/deps/openssl/openssl/util/check-format-commit.sh @@ -0,0 +1,171 @@ +#!/bin/bash +# Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. +# +# Licensed under the Apache License 2.0 (the "License"). +# You may not use this file except in compliance with the License. +# You can obtain a copy in the file LICENSE in the source distribution +# or at https://www.openssl.org/source/license.html +# +# This script is a wrapper around check-format.pl. It accepts a commit sha +# value as input, and uses it to identify the files and ranges that were +# changed in that commit, filtering check-format.pl output only to lines that +# fall into the commits change ranges. +# + + +# List of Regexes to use when running check-format.pl. +# Style checks don't apply to any of these +EXCLUDED_FILE_REGEX=("\.pod" \ + "\.pl" \ + "\.pm" \ + "\.t" \ + "\.yml" \ + "\.sh") + +# Exit code for the script +EXIT_CODE=0 + +# Global vars + +# TEMPDIR is used to hold any files this script creates +# And is cleaned on EXIT with a trap function +TEMPDIR=$(mktemp -d /tmp/checkformat.XXXXXX) + +# TOPDIR always points to the root of the git tree we are working in +# used to locate the check-format.pl script +TOPDIR=$(git rev-parse --show-toplevel) + + +# cleanup handler function, returns us to the root of the git tree +# and erases our temp directory +cleanup() { + rm -rf $TEMPDIR + cd $TOPDIR +} + +trap cleanup EXIT + +# Get the canonical sha256 sum for the commit we are checking +# This lets us pass in symbolic ref names like master/etc and +# resolve them to sha256 sums easily +COMMIT=$(git rev-parse $1) + +# Fail gracefully if git rev-parse doesn't produce a valid +# commit +if [ $? -ne 0 ] +then + echo "$1 is not a valid revision" + exit 1 +fi + +# Create a iteratable list of files to check for a +# given commit. It produces output of the format +# , +touch $TEMPDIR/ranges.txt +git show $COMMIT | awk -v mycmt=$COMMIT ' + BEGIN {myfile=""} + /+{3}/ { + gsub(/b\//,"",$2); + myfile=$2 + } + /@@/ { + gsub(/+/,"",$3); + printf mycmt " " myfile " " $3 "\n" + }' >> $TEMPDIR/ranges.txt || true + +# filter out anything that matches on a filter regex +for i in ${EXCLUDED_FILE_REGEX[@]} +do + touch $TEMPDIR/ranges.filter + grep -v "$i" $TEMPDIR/ranges.txt >> $TEMPDIR/ranges.filter || true + REMAINING_FILES=$(wc -l $TEMPDIR/ranges.filter | awk '{print $1}') + if [ $REMAINING_FILES -eq 0 ] + then + echo "This commit has no files that require checking" + exit 0 + fi + mv $TEMPDIR/ranges.filter $TEMPDIR/ranges.txt +done + +# check out the files from the commit level. +# For each file name in ranges, we show that file at the commit +# level we are checking, and redirect it to the same path, relative +# to $TEMPDIR/check-format. This give us the full file to run +# check-format.pl on with line numbers matching the ranges in the +# $TEMPDIR/ranges.txt file +for j in $(grep $COMMIT $TEMPDIR/ranges.txt | awk '{print $2}') +do + FDIR=$(dirname $j) + mkdir -p $TEMPDIR/check-format/$FDIR + git show $COMMIT:$j > $TEMPDIR/check-format/$j +done + +# Now for each file in $TEMPDIR/check-format run check-format.pl +# Note that we use the %P formatter in the find utilty. This strips +# off the $TEMPDIR/check-format path prefix, leaving $j with the +# path to the file relative to the root of the source dir, so that +# output from check-format.pl looks correct, relative to the root +# of the git tree. +for j in $(find $TEMPDIR/check-format -type f -printf "%P\n") +do + range_start=() + range_end=() + + # Get the ranges for this file. Create 2 arrays. range_start contains + # the start lines for valid ranges from the commit. the range_end array + # contains the corresponding end line (note, since diff output gives us + # a line count for a change, the range_end[k] entry is actually + # range_start[k]+line count + for k in $(grep $COMMIT $TEMPDIR/ranges.txt | grep $j | awk '{print $3}') + do + RANGE=$k + RSTART=$(echo $RANGE | awk -F',' '{print $1}') + RLEN=$(echo $RANGE | awk -F',' '{print $2}') + let REND=$RSTART+$RLEN + range_start+=($RSTART) + range_end+=($REND) + done + + # Go to our checked out tree + cd $TEMPDIR/check-format + + # Actually run check-format.pl on the file, capturing the output + # in a temporary file. Note the format of check-patch.pl output is + # ::: + $TOPDIR/util/check-format.pl $j > $TEMPDIR/format-results.txt + + # Now we filter the check-format.pl output based on the changed lines + # captured in the range_start/end arrays + let maxidx=${#range_start[@]}-1 + for k in $(seq 0 1 $maxidx) + do + RSTART=${range_start[$k]} + REND=${range_end[$k]} + + # field 2 of check-format.pl output is the offending line number + # Check here if any line in that output falls between any of the + # start/end ranges defined in the range_start/range_end array. + # If it does fall in that range, print the entire line to stdout + # If anything is printed, have awk exit with a non-zero exit code + awk -v rstart=$RSTART -v rend=$REND -F':' ' + BEGIN {rc=0} + /:/ { + if (($2 >= rstart) && ($2 <= rend)) { + print $0; + rc=1 + } + } + END {exit rc;} + ' $TEMPDIR/format-results.txt + + # If awk exited with a non-zero code, this script will also exit + # with a non-zero code + if [ $? -ne 0 ] + then + EXIT_CODE=1 + fi + done +done + +# Exit with the recorded exit code above +exit $EXIT_CODE diff --git a/deps/openssl/openssl/util/check-format-test-negatives.c b/deps/openssl/openssl/util/check-format-test-negatives.c index b6c42a00a0750e..f6b1bfb31920ff 100644 --- a/deps/openssl/openssl/util/check-format-test-negatives.c +++ b/deps/openssl/openssl/util/check-format-test-negatives.c @@ -1,5 +1,5 @@ /* - * Copyright 2007-2023 The OpenSSL Project Authors. All Rights Reserved. + * Copyright 2007-2024 The OpenSSL Project Authors. All Rights Reserved. * Copyright Siemens AG 2015-2022 * * Licensed under the Apache License 2.0 (the "License"). You may not use @@ -335,9 +335,8 @@ size_t UTIL_url_encode(const char *source, int f() { c; - if (1) { + if (1) c; - } c; if (1) if (2) diff --git a/deps/openssl/openssl/util/check-format.pl b/deps/openssl/openssl/util/check-format.pl index e1a91bcc58150d..ef2c1920e7220a 100755 --- a/deps/openssl/openssl/util/check-format.pl +++ b/deps/openssl/openssl/util/check-format.pl @@ -1,6 +1,6 @@ #! /usr/bin/env perl # -# Copyright 2020-2023 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2020-2024 The OpenSSL Project Authors. All Rights Reserved. # Copyright Siemens AG 2019-2022 # # Licensed under the Apache License 2.0 (the "License"). @@ -167,7 +167,7 @@ my $line_body_start; # number of line where last function body started, or 0 my $line_function_start; # number of line where last function definition started, used for $line_body_start my $last_function_header; # header containing name of last function defined, used if $line_body_start != 0 -my $line_opening_brace; # number of previous line with opening brace after do/while/for, optionally for if/else +my $line_opening_brace; # number of previous line with opening brace after if/do/while/for, optionally for 'else' my $keyword_opening_brace; # name of previous keyword, used if $line_opening_brace != 0 my $block_indent; # currently required normal indentation at block/statement level @@ -972,9 +972,12 @@ sub check_nested_nonblock_indents { # check for code block containing a single line/statement if ($line_before2 > 0 && !$outermost_level && # within function body $in_typedecl == 0 && @nested_indents == 0 && # neither within type declaration nor inside stmt/expr - m/^[\s@]*\}/) { # leading closing brace '}', any preceding blinded comment must not be matched + m/^[\s@]*\}\s*(\w*)/) { # leading closing brace '}', any preceding blinded comment must not be matched # TODO extend detection from single-line to potentially multi-line statement + my $next_word = $1; if ($line_opening_brace > 0 && + ($keyword_opening_brace ne "if" || + $extended_1_stmt || $next_word ne "else") && ($line_opening_brace == $line_before2 || $line_opening_brace == $line_before) && $contents_before =~ m/;/) { # there is at least one terminator ';', so there is some stmt @@ -1132,9 +1135,9 @@ sub check_nested_nonblock_indents { $line_body_start = $contents =~ m/LONG BODY/ ? 0 : $line if $line_function_start != 0; } } else { - $line_opening_brace = $line if $keyword_opening_brace =~ m/do|while|for/; + $line_opening_brace = $line if $keyword_opening_brace =~ m/if|do|while|for/; # using, not assigning, $keyword_opening_brace here because it could be on an earlier line - $line_opening_brace = $line if $keyword_opening_brace =~ m/if|else/ && $extended_1_stmt && + $line_opening_brace = $line if $keyword_opening_brace eq "else" && $extended_1_stmt && # TODO prevent false positives for if/else where braces around single-statement branches # should be avoided but only if all branches have just single statements # The following helps detecting the exception when handling multiple 'if ... else' branches: diff --git a/deps/openssl/openssl/util/perl/OpenSSL/Test/Utils.pm b/deps/openssl/openssl/util/perl/OpenSSL/Test/Utils.pm index dcff6a5c996795..34eafc4659a5df 100644 --- a/deps/openssl/openssl/util/perl/OpenSSL/Test/Utils.pm +++ b/deps/openssl/openssl/util/perl/OpenSSL/Test/Utils.pm @@ -1,4 +1,4 @@ -# Copyright 2016-2019 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -72,6 +72,8 @@ Returns an item from the %config hash in \$TOP/configdata.pm. =item B Return true if IPv4 / IPv6 is possible to use on the current system. +Additionally, B also checks how OpenSSL was configured, +i.e. if IPv6 was explicitly disabled with -DOPENSSL_USE_IPv6=0. =back @@ -80,6 +82,7 @@ Return true if IPv4 / IPv6 is possible to use on the current system. our %available_protocols; our %disabled; our %config; +our %target; my $configdata_loaded = 0; sub load_configdata { @@ -91,6 +94,7 @@ sub load_configdata { %available_protocols = %configdata::available_protocols; %disabled = %configdata::disabled; %config = %configdata::config; + %target = %configdata::target; }; $configdata_loaded = 1; } @@ -221,6 +225,18 @@ sub have_IPv4 { } sub have_IPv6 { + if ($have_IPv6 < 0) { + load_configdata() unless $configdata_loaded; + # If OpenSSL is configured with IPv6 explicitly disabled, no IPv6 + # related tests should be performed. In other words, pretend IPv6 + # isn't present. + $have_IPv6 = 0 + if grep { $_ eq 'OPENSSL_USE_IPV6=0' } @{$config{CPPDEFINES}}; + # Similarly, if a config target has explicitly disabled IPv6, no + # IPv6 related tests should be performed. + $have_IPv6 = 0 + if grep { $_ eq 'OPENSSL_USE_IPV6=0' } @{$target{defines}}; + } if ($have_IPv6 < 0) { $have_IPv6 = check_IP("::1"); } diff --git a/deps/openssl/openssl/util/perl/TLSProxy/Message.pm b/deps/openssl/openssl/util/perl/TLSProxy/Message.pm index 2c1bdb3837e259..193aa2554f1971 100644 --- a/deps/openssl/openssl/util/perl/TLSProxy/Message.pm +++ b/deps/openssl/openssl/util/perl/TLSProxy/Message.pm @@ -1,4 +1,4 @@ -# Copyright 2016-2021 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -379,6 +379,15 @@ sub create_message [@message_frag_lens] ); $message->parse(); + } elsif ($mt == MT_NEXT_PROTO) { + $message = TLSProxy::NextProto->new( + $server, + $data, + [@message_rec_list], + $startoffset, + [@message_frag_lens] + ); + $message->parse(); } else { #Unknown message type $message = TLSProxy::Message->new( diff --git a/deps/openssl/openssl/util/perl/TLSProxy/NextProto.pm b/deps/openssl/openssl/util/perl/TLSProxy/NextProto.pm new file mode 100644 index 00000000000000..0e1834754667b1 --- /dev/null +++ b/deps/openssl/openssl/util/perl/TLSProxy/NextProto.pm @@ -0,0 +1,54 @@ +# Copyright 2024 The OpenSSL Project Authors. All Rights Reserved. +# +# Licensed under the Apache License 2.0 (the "License"). You may not use +# this file except in compliance with the License. You can obtain a copy +# in the file LICENSE in the source distribution or at +# https://www.openssl.org/source/license.html + +use strict; + +package TLSProxy::NextProto; + +use vars '@ISA'; +push @ISA, 'TLSProxy::Message'; + +sub new +{ + my $class = shift; + my ($server, + $data, + $records, + $startoffset, + $message_frag_lens) = @_; + + my $self = $class->SUPER::new( + $server, + TLSProxy::Message::MT_NEXT_PROTO, + $data, + $records, + $startoffset, + $message_frag_lens); + + return $self; +} + +sub parse +{ + # We don't support parsing at the moment +} + +# This is supposed to reconstruct the on-the-wire message data following changes. +# For now though since we don't support parsing we just create an empty NextProto +# message - this capability is used in test_npn +sub set_message_contents +{ + my $self = shift; + my $data; + + $data = pack("C32", 0x00, 0x1e, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00); + $self->data($data); +} +1; diff --git a/deps/openssl/openssl/util/perl/TLSProxy/Proxy.pm b/deps/openssl/openssl/util/perl/TLSProxy/Proxy.pm index 3de10eccb94eff..7ad7c939ad5270 100644 --- a/deps/openssl/openssl/util/perl/TLSProxy/Proxy.pm +++ b/deps/openssl/openssl/util/perl/TLSProxy/Proxy.pm @@ -1,4 +1,4 @@ -# Copyright 2016-2018 The OpenSSL Project Authors. All Rights Reserved. +# Copyright 2016-2024 The OpenSSL Project Authors. All Rights Reserved. # # Licensed under the Apache License 2.0 (the "License"). You may not use # this file except in compliance with the License. You can obtain a copy @@ -23,6 +23,7 @@ use TLSProxy::CertificateRequest; use TLSProxy::CertificateVerify; use TLSProxy::ServerKeyExchange; use TLSProxy::NewSessionTicket; +use TLSProxy::NextProto; my $have_IPv6; my $IP_factory; diff --git a/deps/undici/src/.gitignore b/deps/undici/src/.gitignore index 60aa663c8380f9..7cba7df889f509 100644 --- a/deps/undici/src/.gitignore +++ b/deps/undici/src/.gitignore @@ -84,3 +84,6 @@ undici-fetch.js .npmrc .tap + +# File generated by /test/request-timeout.js +test/request-timeout.10mb.bin diff --git a/deps/undici/src/.npmignore b/deps/undici/src/.npmignore index 879c6669f039c5..461d334ef36c51 100644 --- a/deps/undici/src/.npmignore +++ b/deps/undici/src/.npmignore @@ -11,3 +11,6 @@ lib/llhttp/llhttp.wasm !index.d.ts !docs/docs/**/* !scripts/strip-comments.js + +# File generated by /test/request-timeout.js +test/request-timeout.10mb.bin diff --git a/deps/undici/src/docs/docs/api/Dispatcher.md b/deps/undici/src/docs/docs/api/Dispatcher.md index ecc3cfd61f73e0..574030bf686be8 100644 --- a/deps/undici/src/docs/docs/api/Dispatcher.md +++ b/deps/undici/src/docs/docs/api/Dispatcher.md @@ -984,6 +984,203 @@ client.dispatch( ); ``` +##### `Response Error Interceptor` + +**Introduction** + +The Response Error Interceptor is designed to handle HTTP response errors efficiently. It intercepts responses and throws detailed errors for responses with status codes indicating failure (4xx, 5xx). This interceptor enhances error handling by providing structured error information, including response headers, data, and status codes. + +**ResponseError Class** + +The `ResponseError` class extends the `UndiciError` class and encapsulates detailed error information. It captures the response status code, headers, and data, providing a structured way to handle errors. + +**Definition** + +```js +class ResponseError extends UndiciError { + constructor (message, code, { headers, data }) { + super(message); + this.name = 'ResponseError'; + this.message = message || 'Response error'; + this.code = 'UND_ERR_RESPONSE'; + this.statusCode = code; + this.data = data; + this.headers = headers; + } +} +``` + +**Interceptor Handler** + +The interceptor's handler class extends `DecoratorHandler` and overrides methods to capture response details and handle errors based on the response status code. + +**Methods** + +- **onConnect**: Initializes response properties. +- **onHeaders**: Captures headers and status code. Decodes body if content type is `application/json` or `text/plain`. +- **onData**: Appends chunks to the body if status code indicates an error. +- **onComplete**: Finalizes error handling, constructs a `ResponseError`, and invokes the `onError` method. +- **onError**: Propagates errors to the handler. + +**Definition** + +```js +class Handler extends DecoratorHandler { + // Private properties + #handler; + #statusCode; + #contentType; + #decoder; + #headers; + #body; + + constructor (opts, { handler }) { + super(handler); + this.#handler = handler; + } + + onConnect (abort) { + this.#statusCode = 0; + this.#contentType = null; + this.#decoder = null; + this.#headers = null; + this.#body = ''; + return this.#handler.onConnect(abort); + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage, headers = parseHeaders(rawHeaders)) { + this.#statusCode = statusCode; + this.#headers = headers; + this.#contentType = headers['content-type']; + + if (this.#statusCode < 400) { + return this.#handler.onHeaders(statusCode, rawHeaders, resume, statusMessage, headers); + } + + if (this.#contentType === 'application/json' || this.#contentType === 'text/plain') { + this.#decoder = new TextDecoder('utf-8'); + } + } + + onData (chunk) { + if (this.#statusCode < 400) { + return this.#handler.onData(chunk); + } + this.#body += this.#decoder?.decode(chunk, { stream: true }) ?? ''; + } + + onComplete (rawTrailers) { + if (this.#statusCode >= 400) { + this.#body += this.#decoder?.decode(undefined, { stream: false }) ?? ''; + if (this.#contentType === 'application/json') { + try { + this.#body = JSON.parse(this.#body); + } catch { + // Do nothing... + } + } + + let err; + const stackTraceLimit = Error.stackTraceLimit; + Error.stackTraceLimit = 0; + try { + err = new ResponseError('Response Error', this.#statusCode, this.#headers, this.#body); + } finally { + Error.stackTraceLimit = stackTraceLimit; + } + + this.#handler.onError(err); + } else { + this.#handler.onComplete(rawTrailers); + } + } + + onError (err) { + this.#handler.onError(err); + } +} + +module.exports = (dispatch) => (opts, handler) => opts.throwOnError + ? dispatch(opts, new Handler(opts, { handler })) + : dispatch(opts, handler); +``` + +**Tests** + +Unit tests ensure the interceptor functions correctly, handling both error and non-error responses appropriately. + +**Example Tests** + +- **No Error if `throwOnError` is False**: + +```js +test('should not error if request is not meant to throw error', async (t) => { + const opts = { throwOnError: false }; + const handler = { onError: () => {}, onData: () => {}, onComplete: () => {} }; + const interceptor = createResponseErrorInterceptor((opts, handler) => handler.onComplete()); + assert.doesNotThrow(() => interceptor(opts, handler)); +}); +``` + +- **Error if Status Code is in Specified Error Codes**: + +```js +test('should error if request status code is in the specified error codes', async (t) => { + const opts = { throwOnError: true, statusCodes: [500] }; + const response = { statusCode: 500 }; + let capturedError; + const handler = { + onError: (err) => { capturedError = err; }, + onData: () => {}, + onComplete: () => {} + }; + + const interceptor = createResponseErrorInterceptor((opts, handler) => { + if (opts.throwOnError && opts.statusCodes.includes(response.statusCode)) { + handler.onError(new Error('Response Error')); + } else { + handler.onComplete(); + } + }); + + interceptor({ ...opts, response }, handler); + + await new Promise(resolve => setImmediate(resolve)); + + assert(capturedError, 'Expected error to be captured but it was not.'); + assert.strictEqual(capturedError.message, 'Response Error'); + assert.strictEqual(response.statusCode, 500); +}); +``` + +- **No Error if Status Code is Not in Specified Error Codes**: + +```js +test('should not error if request status code is not in the specified error codes', async (t) => { + const opts = { throwOnError: true, statusCodes: [500] }; + const response = { statusCode: 404 }; + const handler = { + onError: () => {}, + onData: () => {}, + onComplete: () => {} + }; + + const interceptor = createResponseErrorInterceptor((opts, handler) => { + if (opts.throwOnError && opts.statusCodes.includes(response.statusCode)) { + handler.onError(new Error('Response Error')); + } else { + handler.onComplete(); + } + }); + + assert.doesNotThrow(() => interceptor({ ...opts, response }, handler)); +}); +``` + +**Conclusion** + +The Response Error Interceptor provides a robust mechanism for handling HTTP response errors by capturing detailed error information and propagating it through a structured `ResponseError` class. This enhancement improves error handling and debugging capabilities in applications using the interceptor. + ## Instance Events ### Event: `'connect'` diff --git a/deps/undici/src/docs/docs/api/RetryHandler.md b/deps/undici/src/docs/docs/api/RetryHandler.md index 8988ee5301077c..0dd9f29533f861 100644 --- a/deps/undici/src/docs/docs/api/RetryHandler.md +++ b/deps/undici/src/docs/docs/api/RetryHandler.md @@ -19,7 +19,7 @@ Extends: [`Dispatch.DispatchOptions`](Dispatcher.md#parameter-dispatchoptions). #### `RetryOptions` -- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => void` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed. +- **retry** `(err: Error, context: RetryContext, callback: (err?: Error | null) => void) => number | null` (optional) - Function to be called after every retry. It should pass error if no more retries should be performed. - **maxRetries** `number` (optional) - Maximum number of retries. Default: `5` - **maxTimeout** `number` (optional) - Maximum number of milliseconds to wait before retrying. Default: `30000` (30 seconds) - **minTimeout** `number` (optional) - Minimum number of milliseconds to wait before retrying. Default: `500` (half a second) diff --git a/deps/undici/src/lib/api/api-upgrade.js b/deps/undici/src/lib/api/api-upgrade.js index 019fe1efa2d3d7..7effcf21049d7c 100644 --- a/deps/undici/src/lib/api/api-upgrade.js +++ b/deps/undici/src/lib/api/api-upgrade.js @@ -50,9 +50,9 @@ class UpgradeHandler extends AsyncResource { } onUpgrade (statusCode, rawHeaders, socket) { - const { callback, opaque, context } = this + assert(statusCode === 101) - assert.strictEqual(statusCode, 101) + const { callback, opaque, context } = this removeSignal(this) diff --git a/deps/undici/src/lib/core/connect.js b/deps/undici/src/lib/core/connect.js index b388f022298391..8ab21fcd5fc41e 100644 --- a/deps/undici/src/lib/core/connect.js +++ b/deps/undici/src/lib/core/connect.js @@ -4,6 +4,9 @@ const net = require('node:net') const assert = require('node:assert') const util = require('./util') const { InvalidArgumentError, ConnectTimeoutError } = require('./errors') +const timers = require('../util/timers') + +function noop () {} let tls // include tls conditionally since it is not always available @@ -91,9 +94,11 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, sess servername = servername || options.servername || util.getServerName(host) || null const sessionKey = servername || hostname + assert(sessionKey) + const session = customSession || sessionCache.get(sessionKey) || null - assert(sessionKey) + port = port || 443 socket = tls.connect({ highWaterMark: 16384, // TLS in node can't have bigger HWM anyway... @@ -104,7 +109,7 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, sess // TODO(HTTP/2): Add support for h2c ALPNProtocols: allowH2 ? ['http/1.1', 'h2'] : ['http/1.1'], socket: httpSocket, // upgrade socket connection - port: port || 443, + port, host: hostname }) @@ -115,11 +120,14 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, sess }) } else { assert(!httpSocket, 'httpSocket can only be sent on TLS update') + + port = port || 80 + socket = net.connect({ highWaterMark: 64 * 1024, // Same as nodejs fs streams. ...options, localAddress, - port: port || 80, + port, host: hostname }) } @@ -130,12 +138,12 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, sess socket.setKeepAlive(true, keepAliveInitialDelay) } - const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout) + const clearConnectTimeout = setupConnectTimeout(new WeakRef(socket), { timeout, hostname, port }) socket .setNoDelay(true) .once(protocol === 'https:' ? 'secureConnect' : 'connect', function () { - cancelTimeout() + queueMicrotask(clearConnectTimeout) if (callback) { const cb = callback @@ -144,7 +152,7 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, sess } }) .on('error', function (err) { - cancelTimeout() + queueMicrotask(clearConnectTimeout) if (callback) { const cb = callback @@ -157,36 +165,70 @@ function buildConnector ({ allowH2, maxCachedSessions, socketPath, timeout, sess } } -function setupTimeout (onConnectTimeout, timeout) { - if (!timeout) { - return () => {} - } +/** + * @param {WeakRef} socketWeakRef + * @param {object} opts + * @param {number} opts.timeout + * @param {string} opts.hostname + * @param {number} opts.port + * @returns {() => void} + */ +const setupConnectTimeout = process.platform === 'win32' + ? (socketWeakRef, opts) => { + if (!opts.timeout) { + return noop + } - let s1 = null - let s2 = null - const timeoutId = setTimeout(() => { - // setImmediate is added to make sure that we prioritize socket error events over timeouts - s1 = setImmediate(() => { - if (process.platform === 'win32') { + let s1 = null + let s2 = null + const fastTimer = timers.setFastTimeout(() => { + // setImmediate is added to make sure that we prioritize socket error events over timeouts + s1 = setImmediate(() => { // Windows needs an extra setImmediate probably due to implementation differences in the socket logic - s2 = setImmediate(() => onConnectTimeout()) - } else { - onConnectTimeout() + s2 = setImmediate(() => onConnectTimeout(socketWeakRef.deref(), opts)) + }) + }, opts.timeout) + return () => { + timers.clearFastTimeout(fastTimer) + clearImmediate(s1) + clearImmediate(s2) + } + } + : (socketWeakRef, opts) => { + if (!opts.timeout) { + return noop } - }) - }, timeout) - return () => { - clearTimeout(timeoutId) - clearImmediate(s1) - clearImmediate(s2) - } -} -function onConnectTimeout (socket) { + let s1 = null + const fastTimer = timers.setFastTimeout(() => { + // setImmediate is added to make sure that we prioritize socket error events over timeouts + s1 = setImmediate(() => { + onConnectTimeout(socketWeakRef.deref(), opts) + }) + }, opts.timeout) + return () => { + timers.clearFastTimeout(fastTimer) + clearImmediate(s1) + } + } + +/** + * @param {net.Socket} socket + * @param {object} opts + * @param {number} opts.timeout + * @param {string} opts.hostname + * @param {number} opts.port + */ +function onConnectTimeout (socket, opts) { let message = 'Connect Timeout Error' if (Array.isArray(socket.autoSelectFamilyAttemptedAddresses)) { - message += ` (attempted addresses: ${socket.autoSelectFamilyAttemptedAddresses.join(', ')})` + message += ` (attempted addresses: ${socket.autoSelectFamilyAttemptedAddresses.join(', ')},` + } else { + message += ` (attempted address: ${opts.hostname}:${opts.port},` } + + message += ` timeout: ${opts.timeout}ms)` + util.destroy(socket, new ConnectTimeoutError(message)) } diff --git a/deps/undici/src/lib/core/errors.js b/deps/undici/src/lib/core/errors.js index 3d69fdbecba200..9257875c1c32f4 100644 --- a/deps/undici/src/lib/core/errors.js +++ b/deps/undici/src/lib/core/errors.js @@ -195,6 +195,18 @@ class RequestRetryError extends UndiciError { } } +class ResponseError extends UndiciError { + constructor (message, code, { headers, data }) { + super(message) + this.name = 'ResponseError' + this.message = message || 'Response error' + this.code = 'UND_ERR_RESPONSE' + this.statusCode = code + this.data = data + this.headers = headers + } +} + class SecureProxyConnectionError extends UndiciError { constructor (cause, message, options) { super(message, { cause, ...(options ?? {}) }) @@ -227,5 +239,6 @@ module.exports = { BalancedPoolMissingUpstreamError, ResponseExceededMaxSizeError, RequestRetryError, + ResponseError, SecureProxyConnectionError } diff --git a/deps/undici/src/lib/core/util.js b/deps/undici/src/lib/core/util.js index 00f8a9b200a99f..9ee7ec23c52241 100644 --- a/deps/undici/src/lib/core/util.js +++ b/deps/undici/src/lib/core/util.js @@ -233,7 +233,7 @@ function getServerName (host) { return null } - assert.strictEqual(typeof host, 'string') + assert(typeof host === 'string') const servername = getHostname(host) if (net.isIP(servername)) { diff --git a/deps/undici/src/lib/dispatcher/client-h1.js b/deps/undici/src/lib/dispatcher/client-h1.js index 2f1c96724d3951..40628e53d4ce1e 100644 --- a/deps/undici/src/lib/dispatcher/client-h1.js +++ b/deps/undici/src/lib/dispatcher/client-h1.js @@ -85,35 +85,35 @@ async function lazyllhttp () { return 0 }, wasm_on_status: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p) + assert(currentParser.ptr === p) const start = at - currentBufferPtr + currentBufferRef.byteOffset return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 }, wasm_on_message_begin: (p) => { - assert.strictEqual(currentParser.ptr, p) + assert(currentParser.ptr === p) return currentParser.onMessageBegin() || 0 }, wasm_on_header_field: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p) + assert(currentParser.ptr === p) const start = at - currentBufferPtr + currentBufferRef.byteOffset return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 }, wasm_on_header_value: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p) + assert(currentParser.ptr === p) const start = at - currentBufferPtr + currentBufferRef.byteOffset return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 }, wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => { - assert.strictEqual(currentParser.ptr, p) + assert(currentParser.ptr === p) return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0 }, wasm_on_body: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p) + assert(currentParser.ptr === p) const start = at - currentBufferPtr + currentBufferRef.byteOffset return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0 }, wasm_on_message_complete: (p) => { - assert.strictEqual(currentParser.ptr, p) + assert(currentParser.ptr === p) return currentParser.onMessageComplete() || 0 } @@ -131,9 +131,17 @@ let currentBufferRef = null let currentBufferSize = 0 let currentBufferPtr = null -const TIMEOUT_HEADERS = 1 -const TIMEOUT_BODY = 2 -const TIMEOUT_IDLE = 3 +const USE_NATIVE_TIMER = 0 +const USE_FAST_TIMER = 1 + +// Use fast timers for headers and body to take eventual event loop +// latency into account. +const TIMEOUT_HEADERS = 2 | USE_FAST_TIMER +const TIMEOUT_BODY = 4 | USE_FAST_TIMER + +// Use native timers to ignore event loop latency for keep-alive +// handling. +const TIMEOUT_KEEP_ALIVE = 8 | USE_NATIVE_TIMER class Parser { constructor (client, socket, { exports }) { @@ -164,26 +172,39 @@ class Parser { this.maxResponseSize = client[kMaxResponseSize] } - setTimeout (value, type) { - this.timeoutType = type - if (value !== this.timeoutValue) { - timers.clearTimeout(this.timeout) - if (value) { - this.timeout = timers.setTimeout(onParserTimeout, value, this) - // istanbul ignore else: only for jest - if (this.timeout.unref) { + setTimeout (delay, type) { + // If the existing timer and the new timer are of different timer type + // (fast or native) or have different delay, we need to clear the existing + // timer and set a new one. + if ( + delay !== this.timeoutValue || + (type & USE_FAST_TIMER) ^ (this.timeoutType & USE_FAST_TIMER) + ) { + // If a timeout is already set, clear it with clearTimeout of the fast + // timer implementation, as it can clear fast and native timers. + if (this.timeout) { + timers.clearTimeout(this.timeout) + this.timeout = null + } + + if (delay) { + if (type & USE_FAST_TIMER) { + this.timeout = timers.setFastTimeout(onParserTimeout, delay, new WeakRef(this)) + } else { + this.timeout = setTimeout(onParserTimeout, delay, new WeakRef(this)) this.timeout.unref() } - } else { - this.timeout = null } - this.timeoutValue = value + + this.timeoutValue = delay } else if (this.timeout) { // istanbul ignore else: only for jest if (this.timeout.refresh) { this.timeout.refresh() } } + + this.timeoutType = type } resume () { @@ -288,7 +309,7 @@ class Parser { this.llhttp.llhttp_free(this.ptr) this.ptr = null - timers.clearTimeout(this.timeout) + this.timeout && timers.clearTimeout(this.timeout) this.timeout = null this.timeoutValue = null this.timeoutType = null @@ -363,20 +384,19 @@ class Parser { const { upgrade, client, socket, headers, statusCode } = this assert(upgrade) + assert(client[kSocket] === socket) + assert(!socket.destroyed) + assert(!this.paused) + assert((headers.length & 1) === 0) const request = client[kQueue][client[kRunningIdx]] assert(request) - - assert(!socket.destroyed) - assert(socket === client[kSocket]) - assert(!this.paused) assert(request.upgrade || request.method === 'CONNECT') this.statusCode = null this.statusText = '' this.shouldKeepAlive = null - assert(this.headers.length % 2 === 0) this.headers = [] this.headersSize = 0 @@ -433,7 +453,7 @@ class Parser { return -1 } - assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS) + assert(this.timeoutType === TIMEOUT_HEADERS) this.statusCode = statusCode this.shouldKeepAlive = ( @@ -466,7 +486,7 @@ class Parser { return 2 } - assert(this.headers.length % 2 === 0) + assert((this.headers.length & 1) === 0) this.headers = [] this.headersSize = 0 @@ -523,7 +543,7 @@ class Parser { const request = client[kQueue][client[kRunningIdx]] assert(request) - assert.strictEqual(this.timeoutType, TIMEOUT_BODY) + assert(this.timeoutType === TIMEOUT_BODY) if (this.timeout) { // istanbul ignore else: only for jest if (this.timeout.refresh) { @@ -556,11 +576,12 @@ class Parser { return } + assert(statusCode >= 100) + assert((this.headers.length & 1) === 0) + const request = client[kQueue][client[kRunningIdx]] assert(request) - assert(statusCode >= 100) - this.statusCode = null this.statusText = '' this.bytesRead = 0 @@ -568,7 +589,6 @@ class Parser { this.keepAlive = '' this.connection = '' - assert(this.headers.length % 2 === 0) this.headers = [] this.headersSize = 0 @@ -587,7 +607,7 @@ class Parser { client[kQueue][client[kRunningIdx]++] = null if (socket[kWriting]) { - assert.strictEqual(client[kRunning], 0) + assert(client[kRunning] === 0) // Response completed before request. util.destroy(socket, new InformationalError('reset')) return constants.ERROR.PAUSED @@ -613,19 +633,19 @@ class Parser { } function onParserTimeout (parser) { - const { socket, timeoutType, client } = parser + const { socket, timeoutType, client, paused } = parser.deref() /* istanbul ignore else */ if (timeoutType === TIMEOUT_HEADERS) { if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { - assert(!parser.paused, 'cannot be paused while waiting for headers') + assert(!paused, 'cannot be paused while waiting for headers') util.destroy(socket, new HeadersTimeoutError()) } } else if (timeoutType === TIMEOUT_BODY) { - if (!parser.paused) { + if (!paused) { util.destroy(socket, new BodyTimeoutError()) } - } else if (timeoutType === TIMEOUT_IDLE) { + } else if (timeoutType === TIMEOUT_KEEP_ALIVE) { assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue]) util.destroy(socket, new InformationalError('socket idle timeout')) } @@ -646,10 +666,10 @@ async function connectH1 (client, socket) { socket[kParser] = new Parser(client, socket, llhttpInstance) addListener(socket, 'error', function (err) { - const parser = this[kParser] - assert(err.code !== 'ERR_TLS_CERT_ALTNAME_INVALID') + const parser = this[kParser] + // On Mac OS, we get an ECONNRESET even if there is a full body to be forwarded // to the user. if (err.code === 'ECONNRESET' && parser.statusCode && !parser.shouldKeepAlive) { @@ -803,8 +823,8 @@ function resumeH1 (client) { } if (client[kSize] === 0) { - if (socket[kParser].timeoutType !== TIMEOUT_IDLE) { - socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE) + if (socket[kParser].timeoutType !== TIMEOUT_KEEP_ALIVE) { + socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_KEEP_ALIVE) } } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { diff --git a/deps/undici/src/lib/dispatcher/client.js b/deps/undici/src/lib/dispatcher/client.js index cb61206b1edc2a..7e22aa000ba5a7 100644 --- a/deps/undici/src/lib/dispatcher/client.js +++ b/deps/undici/src/lib/dispatcher/client.js @@ -385,6 +385,10 @@ function onError (client, err) { } } +/** + * @param {Client} client + * @returns + */ async function connect (client) { assert(!client[kConnecting]) assert(!client[kHTTPContext]) diff --git a/deps/undici/src/lib/handler/retry-handler.js b/deps/undici/src/lib/handler/retry-handler.js index f7dedfa4bac35b..e00e82faf08da4 100644 --- a/deps/undici/src/lib/handler/retry-handler.js +++ b/deps/undici/src/lib/handler/retry-handler.js @@ -192,8 +192,18 @@ class RetryHandler { if (this.resume != null) { this.resume = null - if (statusCode !== 206) { - return true + // Only Partial Content 206 supposed to provide Content-Range, + // any other status code that partially consumed the payload + // should not be retry because it would result in downstream + // wrongly concatanete multiple responses. + if (statusCode !== 206 && (this.start > 0 || statusCode !== 200)) { + this.abort( + new RequestRetryError('server does not support the range header and the payload was partially consumed', statusCode, { + headers, + data: { count: this.retryCount } + }) + ) + return false } const contentRange = parseRangeHeader(headers['content-range']) diff --git a/deps/undici/src/lib/interceptor/response-error.js b/deps/undici/src/lib/interceptor/response-error.js new file mode 100644 index 00000000000000..3ded9c87fb736b --- /dev/null +++ b/deps/undici/src/lib/interceptor/response-error.js @@ -0,0 +1,86 @@ +'use strict' + +const { parseHeaders } = require('../core/util') +const DecoratorHandler = require('../handler/decorator-handler') +const { ResponseError } = require('../core/errors') + +class Handler extends DecoratorHandler { + #handler + #statusCode + #contentType + #decoder + #headers + #body + + constructor (opts, { handler }) { + super(handler) + this.#handler = handler + } + + onConnect (abort) { + this.#statusCode = 0 + this.#contentType = null + this.#decoder = null + this.#headers = null + this.#body = '' + + return this.#handler.onConnect(abort) + } + + onHeaders (statusCode, rawHeaders, resume, statusMessage, headers = parseHeaders(rawHeaders)) { + this.#statusCode = statusCode + this.#headers = headers + this.#contentType = headers['content-type'] + + if (this.#statusCode < 400) { + return this.#handler.onHeaders(statusCode, rawHeaders, resume, statusMessage, headers) + } + + if (this.#contentType === 'application/json' || this.#contentType === 'text/plain') { + this.#decoder = new TextDecoder('utf-8') + } + } + + onData (chunk) { + if (this.#statusCode < 400) { + return this.#handler.onData(chunk) + } + + this.#body += this.#decoder?.decode(chunk, { stream: true }) ?? '' + } + + onComplete (rawTrailers) { + if (this.#statusCode >= 400) { + this.#body += this.#decoder?.decode(undefined, { stream: false }) ?? '' + + if (this.#contentType === 'application/json') { + try { + this.#body = JSON.parse(this.#body) + } catch { + // Do nothing... + } + } + + let err + const stackTraceLimit = Error.stackTraceLimit + Error.stackTraceLimit = 0 + try { + err = new ResponseError('Response Error', this.#statusCode, this.#headers, this.#body) + } finally { + Error.stackTraceLimit = stackTraceLimit + } + + this.#handler.onError(err) + } else { + this.#handler.onComplete(rawTrailers) + } + } + + onError (err) { + this.#handler.onError(err) + } +} + +module.exports = (dispatch) => (opts, handler) => opts.throwOnError + ? dispatch(opts, new Handler(opts, { handler })) + : dispatch(opts, handler) diff --git a/deps/undici/src/lib/llhttp/wasm_build_env.txt b/deps/undici/src/lib/llhttp/wasm_build_env.txt index 88676cd7cd04d1..0686c4b5d6ddf0 100644 --- a/deps/undici/src/lib/llhttp/wasm_build_env.txt +++ b/deps/undici/src/lib/llhttp/wasm_build_env.txt @@ -1,12 +1,12 @@ -> undici@6.19.8 prebuild:wasm +> undici@6.20.0 prebuild:wasm > node build/wasm.js --prebuild > docker build --platform=linux/x86_64 -t llhttp_wasm_builder -f /home/runner/work/node/node/deps/undici/src/build/Dockerfile /home/runner/work/node/node/deps/undici/src -> undici@6.19.8 build:wasm +> undici@6.20.0 build:wasm > node build/wasm.js --docker > docker run --rm -t --platform=linux/x86_64 --user 1001:127 --mount type=bind,source=/home/runner/work/node/node/deps/undici/src/lib/llhttp,target=/home/node/undici/lib/llhttp llhttp_wasm_builder node build/wasm.js diff --git a/deps/undici/src/lib/mock/mock-utils.js b/deps/undici/src/lib/mock/mock-utils.js index f3c284d789117e..8f18db31ee210c 100644 --- a/deps/undici/src/lib/mock/mock-utils.js +++ b/deps/undici/src/lib/mock/mock-utils.js @@ -118,6 +118,10 @@ function matchKey (mockDispatch, { path, method, body, headers }) { function getResponseData (data) { if (Buffer.isBuffer(data)) { return data + } else if (data instanceof Uint8Array) { + return data + } else if (data instanceof ArrayBuffer) { + return data } else if (typeof data === 'object') { return JSON.stringify(data) } else { diff --git a/deps/undici/src/lib/util/timers.js b/deps/undici/src/lib/util/timers.js index d0091cc15f717c..c15bbc37aa158c 100644 --- a/deps/undici/src/lib/util/timers.js +++ b/deps/undici/src/lib/util/timers.js @@ -1,99 +1,423 @@ 'use strict' -const TICK_MS = 499 +/** + * This module offers an optimized timer implementation designed for scenarios + * where high precision is not critical. + * + * The timer achieves faster performance by using a low-resolution approach, + * with an accuracy target of within 500ms. This makes it particularly useful + * for timers with delays of 1 second or more, where exact timing is less + * crucial. + * + * It's important to note that Node.js timers are inherently imprecise, as + * delays can occur due to the event loop being blocked by other operations. + * Consequently, timers may trigger later than their scheduled time. + */ -let fastNow = Date.now() +/** + * The fastNow variable contains the internal fast timer clock value. + * + * @type {number} + */ +let fastNow = 0 + +/** + * RESOLUTION_MS represents the target resolution time in milliseconds. + * + * @type {number} + * @default 1000 + */ +const RESOLUTION_MS = 1e3 + +/** + * TICK_MS defines the desired interval in milliseconds between each tick. + * The target value is set to half the resolution time, minus 1 ms, to account + * for potential event loop overhead. + * + * @type {number} + * @default 499 + */ +const TICK_MS = (RESOLUTION_MS >> 1) - 1 + +/** + * fastNowTimeout is a Node.js timer used to manage and process + * the FastTimers stored in the `fastTimers` array. + * + * @type {NodeJS.Timeout} + */ let fastNowTimeout +/** + * The kFastTimer symbol is used to identify FastTimer instances. + * + * @type {Symbol} + */ +const kFastTimer = Symbol('kFastTimer') + +/** + * The fastTimers array contains all active FastTimers. + * + * @type {FastTimer[]} + */ const fastTimers = [] -function onTimeout () { - fastNow = Date.now() +/** + * These constants represent the various states of a FastTimer. + */ - let len = fastTimers.length +/** + * The `NOT_IN_LIST` constant indicates that the FastTimer is not included + * in the `fastTimers` array. Timers with this status will not be processed + * during the next tick by the `onTick` function. + * + * A FastTimer can be re-added to the `fastTimers` array by invoking the + * `refresh` method on the FastTimer instance. + * + * @type {-2} + */ +const NOT_IN_LIST = -2 + +/** + * The `TO_BE_CLEARED` constant indicates that the FastTimer is scheduled + * for removal from the `fastTimers` array. A FastTimer in this state will + * be removed in the next tick by the `onTick` function and will no longer + * be processed. + * + * This status is also set when the `clear` method is called on the FastTimer instance. + * + * @type {-1} + */ +const TO_BE_CLEARED = -1 + +/** + * The `PENDING` constant signifies that the FastTimer is awaiting processing + * in the next tick by the `onTick` function. Timers with this status will have + * their `_idleStart` value set and their status updated to `ACTIVE` in the next tick. + * + * @type {0} + */ +const PENDING = 0 + +/** + * The `ACTIVE` constant indicates that the FastTimer is active and waiting + * for its timer to expire. During the next tick, the `onTick` function will + * check if the timer has expired, and if so, it will execute the associated callback. + * + * @type {1} + */ +const ACTIVE = 1 + +/** + * The onTick function processes the fastTimers array. + * + * @returns {void} + */ +function onTick () { + /** + * Increment the fastNow value by the TICK_MS value, despite the actual time + * that has passed since the last tick. This approach ensures independence + * from the system clock and delays caused by a blocked event loop. + * + * @type {number} + */ + fastNow += TICK_MS + + /** + * The `idx` variable is used to iterate over the `fastTimers` array. + * Expired timers are removed by replacing them with the last element in the array. + * Consequently, `idx` is only incremented when the current element is not removed. + * + * @type {number} + */ let idx = 0 + + /** + * The len variable will contain the length of the fastTimers array + * and will be decremented when a FastTimer should be removed from the + * fastTimers array. + * + * @type {number} + */ + let len = fastTimers.length + while (idx < len) { + /** + * @type {FastTimer} + */ const timer = fastTimers[idx] - if (timer.state === 0) { - timer.state = fastNow + timer.delay - TICK_MS - } else if (timer.state > 0 && fastNow >= timer.state) { - timer.state = -1 - timer.callback(timer.opaque) + // If the timer is in the ACTIVE state and the timer has expired, it will + // be processed in the next tick. + if (timer._state === PENDING) { + // Set the _idleStart value to the fastNow value minus the TICK_MS value + // to account for the time the timer was in the PENDING state. + timer._idleStart = fastNow - TICK_MS + timer._state = ACTIVE + } else if ( + timer._state === ACTIVE && + fastNow >= timer._idleStart + timer._idleTimeout + ) { + timer._state = TO_BE_CLEARED + timer._idleStart = -1 + timer._onTimeout(timer._timerArg) } - if (timer.state === -1) { - timer.state = -2 - if (idx !== len - 1) { - fastTimers[idx] = fastTimers.pop() - } else { - fastTimers.pop() + if (timer._state === TO_BE_CLEARED) { + timer._state = NOT_IN_LIST + + // Move the last element to the current index and decrement len if it is + // not the only element in the array. + if (--len !== 0) { + fastTimers[idx] = fastTimers[len] } - len -= 1 } else { - idx += 1 + ++idx } } - if (fastTimers.length > 0) { + // Set the length of the fastTimers array to the new length and thus + // removing the excess FastTimers elements from the array. + fastTimers.length = len + + // If there are still active FastTimers in the array, refresh the Timer. + // If there are no active FastTimers, the timer will be refreshed again + // when a new FastTimer is instantiated. + if (fastTimers.length !== 0) { refreshTimeout() } } function refreshTimeout () { - if (fastNowTimeout?.refresh) { + // If the fastNowTimeout is already set, refresh it. + if (fastNowTimeout) { fastNowTimeout.refresh() + // fastNowTimeout is not instantiated yet, create a new Timer. } else { clearTimeout(fastNowTimeout) - fastNowTimeout = setTimeout(onTimeout, TICK_MS) + fastNowTimeout = setTimeout(onTick, TICK_MS) + + // If the Timer has an unref method, call it to allow the process to exit if + // there are no other active handles. if (fastNowTimeout.unref) { fastNowTimeout.unref() } } } -class Timeout { - constructor (callback, delay, opaque) { - this.callback = callback - this.delay = delay - this.opaque = opaque +/** + * The `FastTimer` class is a data structure designed to store and manage + * timer information. + */ +class FastTimer { + [kFastTimer] = true + + /** + * The state of the timer, which can be one of the following: + * - NOT_IN_LIST (-2) + * - TO_BE_CLEARED (-1) + * - PENDING (0) + * - ACTIVE (1) + * + * @type {-2|-1|0|1} + * @private + */ + _state = NOT_IN_LIST - // -2 not in timer list - // -1 in timer list but inactive - // 0 in timer list waiting for time - // > 0 in timer list waiting for time to expire - this.state = -2 + /** + * The number of milliseconds to wait before calling the callback. + * + * @type {number} + * @private + */ + _idleTimeout = -1 + + /** + * The time in milliseconds when the timer was started. This value is used to + * calculate when the timer should expire. + * + * @type {number} + * @default -1 + * @private + */ + _idleStart = -1 + + /** + * The function to be executed when the timer expires. + * @type {Function} + * @private + */ + _onTimeout + + /** + * The argument to be passed to the callback when the timer expires. + * + * @type {*} + * @private + */ + _timerArg + + /** + * @constructor + * @param {Function} callback A function to be executed after the timer + * expires. + * @param {number} delay The time, in milliseconds that the timer should wait + * before the specified function or code is executed. + * @param {*} arg + */ + constructor (callback, delay, arg) { + this._onTimeout = callback + this._idleTimeout = delay + this._timerArg = arg this.refresh() } + /** + * Sets the timer's start time to the current time, and reschedules the timer + * to call its callback at the previously specified duration adjusted to the + * current time. + * Using this on a timer that has already called its callback will reactivate + * the timer. + * + * @returns {void} + */ refresh () { - if (this.state === -2) { + // In the special case that the timer is not in the list of active timers, + // add it back to the array to be processed in the next tick by the onTick + // function. + if (this._state === NOT_IN_LIST) { fastTimers.push(this) - if (!fastNowTimeout || fastTimers.length === 1) { - refreshTimeout() - } } - this.state = 0 + // If the timer is the only active timer, refresh the fastNowTimeout for + // better resolution. + if (!fastNowTimeout || fastTimers.length === 1) { + refreshTimeout() + } + + // Setting the state to PENDING will cause the timer to be reset in the + // next tick by the onTick function. + this._state = PENDING } + /** + * The `clear` method cancels the timer, preventing it from executing. + * + * @returns {void} + * @private + */ clear () { - this.state = -1 + // Set the state to TO_BE_CLEARED to mark the timer for removal in the next + // tick by the onTick function. + this._state = TO_BE_CLEARED + + // Reset the _idleStart value to -1 to indicate that the timer is no longer + // active. + this._idleStart = -1 } } +/** + * This module exports a setTimeout and clearTimeout function that can be + * used as a drop-in replacement for the native functions. + */ module.exports = { - setTimeout (callback, delay, opaque) { - return delay <= 1e3 - ? setTimeout(callback, delay, opaque) - : new Timeout(callback, delay, opaque) + /** + * The setTimeout() method sets a timer which executes a function once the + * timer expires. + * @param {Function} callback A function to be executed after the timer + * expires. + * @param {number} delay The time, in milliseconds that the timer should + * wait before the specified function or code is executed. + * @param {*} [arg] An optional argument to be passed to the callback function + * when the timer expires. + * @returns {NodeJS.Timeout|FastTimer} + */ + setTimeout (callback, delay, arg) { + // If the delay is less than or equal to the RESOLUTION_MS value return a + // native Node.js Timer instance. + return delay <= RESOLUTION_MS + ? setTimeout(callback, delay, arg) + : new FastTimer(callback, delay, arg) }, + /** + * The clearTimeout method cancels an instantiated Timer previously created + * by calling setTimeout. + * + * @param {NodeJS.Timeout|FastTimer} timeout + */ clearTimeout (timeout) { - if (timeout instanceof Timeout) { + // If the timeout is a FastTimer, call its own clear method. + if (timeout[kFastTimer]) { + /** + * @type {FastTimer} + */ timeout.clear() + // Otherwise it is an instance of a native NodeJS.Timeout, so call the + // Node.js native clearTimeout function. } else { clearTimeout(timeout) } - } + }, + /** + * The setFastTimeout() method sets a fastTimer which executes a function once + * the timer expires. + * @param {Function} callback A function to be executed after the timer + * expires. + * @param {number} delay The time, in milliseconds that the timer should + * wait before the specified function or code is executed. + * @param {*} [arg] An optional argument to be passed to the callback function + * when the timer expires. + * @returns {FastTimer} + */ + setFastTimeout (callback, delay, arg) { + return new FastTimer(callback, delay, arg) + }, + /** + * The clearTimeout method cancels an instantiated FastTimer previously + * created by calling setFastTimeout. + * + * @param {FastTimer} timeout + */ + clearFastTimeout (timeout) { + timeout.clear() + }, + /** + * The now method returns the value of the internal fast timer clock. + * + * @returns {number} + */ + now () { + return fastNow + }, + /** + * Trigger the onTick function to process the fastTimers array. + * Exported for testing purposes only. + * Marking as deprecated to discourage any use outside of testing. + * @deprecated + * @param {number} [delay=0] The delay in milliseconds to add to the now value. + */ + tick (delay = 0) { + fastNow += delay - RESOLUTION_MS + 1 + onTick() + onTick() + }, + /** + * Reset FastTimers. + * Exported for testing purposes only. + * Marking as deprecated to discourage any use outside of testing. + * @deprecated + */ + reset () { + fastNow = 0 + fastTimers.length = 0 + clearTimeout(fastNowTimeout) + fastNowTimeout = null + }, + /** + * Exporting for testing purposes only. + * Marking as deprecated to discourage any use outside of testing. + * @deprecated + */ + kFastTimer } diff --git a/deps/undici/src/lib/web/fetch/formdata-parser.js b/deps/undici/src/lib/web/fetch/formdata-parser.js index 7e567e9ec656cc..315a4626da5775 100644 --- a/deps/undici/src/lib/web/fetch/formdata-parser.js +++ b/deps/undici/src/lib/web/fetch/formdata-parser.js @@ -87,11 +87,21 @@ function multipartFormDataParser (input, mimeType) { // the first byte. const position = { position: 0 } - // Note: undici addition, allow \r\n before the body. - if (input[0] === 0x0d && input[1] === 0x0a) { + // Note: undici addition, allows leading and trailing CRLFs. + while (input[position.position] === 0x0d && input[position.position + 1] === 0x0a) { position.position += 2 } + let trailing = input.length + + while (input[trailing - 1] === 0x0a && input[trailing - 2] === 0x0d) { + trailing -= 2 + } + + if (trailing !== input.length) { + input = input.subarray(0, trailing) + } + // 5. While true: while (true) { // 5.1. If position points to a sequence of bytes starting with 0x2D 0x2D diff --git a/deps/undici/src/lib/web/fetch/index.js b/deps/undici/src/lib/web/fetch/index.js index 784e0c2cdbbf0b..3c7ca211c9998b 100644 --- a/deps/undici/src/lib/web/fetch/index.js +++ b/deps/undici/src/lib/web/fetch/index.js @@ -2150,9 +2150,15 @@ async function httpNetworkFetch ( finishFlush: zlib.constants.Z_SYNC_FLUSH })) } else if (coding === 'deflate') { - decoders.push(createInflate()) + decoders.push(createInflate({ + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH + })) } else if (coding === 'br') { - decoders.push(zlib.createBrotliDecompress()) + decoders.push(zlib.createBrotliDecompress({ + flush: zlib.constants.BROTLI_OPERATION_FLUSH, + finishFlush: zlib.constants.BROTLI_OPERATION_FLUSH + })) } else { decoders.length = 0 break @@ -2160,13 +2166,19 @@ async function httpNetworkFetch ( } } + const onError = this.onError.bind(this) + resolve({ status, statusText, headersList, body: decoders.length - ? pipeline(this.body, ...decoders, () => { }) - : this.body.on('error', () => { }) + ? pipeline(this.body, ...decoders, (err) => { + if (err) { + this.onError(err) + } + }).on('error', onError) + : this.body.on('error', onError) }) return true diff --git a/deps/undici/src/lib/web/fetch/util.js b/deps/undici/src/lib/web/fetch/util.js index dc5ce9b392a68b..5101324a80cf78 100644 --- a/deps/undici/src/lib/web/fetch/util.js +++ b/deps/undici/src/lib/web/fetch/util.js @@ -1340,6 +1340,14 @@ function buildContentRange (rangeStart, rangeEnd, fullLength) { // interpreted as a zlib stream, otherwise it's interpreted as a // raw deflate stream. class InflateStream extends Transform { + #zlibOptions + + /** @param {zlib.ZlibOptions} [zlibOptions] */ + constructor (zlibOptions) { + super() + this.#zlibOptions = zlibOptions + } + _transform (chunk, encoding, callback) { if (!this._inflateStream) { if (chunk.length === 0) { @@ -1347,8 +1355,8 @@ class InflateStream extends Transform { return } this._inflateStream = (chunk[0] & 0x0F) === 0x08 - ? zlib.createInflate() - : zlib.createInflateRaw() + ? zlib.createInflate(this.#zlibOptions) + : zlib.createInflateRaw(this.#zlibOptions) this._inflateStream.on('data', this.push.bind(this)) this._inflateStream.on('end', () => this.push(null)) @@ -1367,8 +1375,12 @@ class InflateStream extends Transform { } } -function createInflate () { - return new InflateStream() +/** + * @param {zlib.ZlibOptions} [zlibOptions] + * @returns {InflateStream} + */ +function createInflate (zlibOptions) { + return new InflateStream(zlibOptions) } /** diff --git a/deps/undici/src/package-lock.json b/deps/undici/src/package-lock.json index 67e74b4e445fee..ee33c5fa5e55ac 100644 --- a/deps/undici/src/package-lock.json +++ b/deps/undici/src/package-lock.json @@ -1,18 +1,18 @@ { "name": "undici", - "version": "6.19.8", + "version": "6.20.0", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "undici", - "version": "6.19.8", + "version": "6.20.0", "license": "MIT", "devDependencies": { "@fastify/busboy": "2.1.1", "@matteo.collina/tspl": "^0.1.1", "@sinonjs/fake-timers": "^11.1.0", - "@types/node": "^18.0.3", + "@types/node": "~18.19.50", "abort-controller": "^3.0.0", "borp": "^0.15.0", "c8": "^10.0.0", @@ -39,20 +39,30 @@ } }, "node_modules/@actions/core": { - "version": "1.10.1", - "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.10.1.tgz", - "integrity": "sha512-3lBR9EDAY+iYIpTnTIXmWcNbX3T2kCkAEQGIQx4NVQ0575nk2k3GRZDTPQG+vVtS2izSLmINlxXf0uLtnrTP+g==", + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/@actions/core/-/core-1.11.1.tgz", + "integrity": "sha512-hXJCSrkwfA46Vd9Z3q4cpEpHB1rL5NG04+/rbqW9d3+CSvtB1tYe8UTpAlixa1vj0m/ULglfEK2UKxMGxCxv5A==", "dev": true, "license": "MIT", "dependencies": { - "@actions/http-client": "^2.0.1", - "uuid": "^8.3.2" + "@actions/exec": "^1.1.1", + "@actions/http-client": "^2.0.1" + } + }, + "node_modules/@actions/exec": { + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/@actions/exec/-/exec-1.1.1.tgz", + "integrity": "sha512-+sCcHHbVdk93a0XT19ECtO/gIXoxvdsgQLzb2fE2/5sIZmWQuluYyjPQtrtTHdU1YzTZ7bAPN4sITq2xi1679w==", + "dev": true, + "license": "MIT", + "dependencies": { + "@actions/io": "^1.0.1" } }, "node_modules/@actions/http-client": { - "version": "2.2.2", - "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.2.tgz", - "integrity": "sha512-2TvX5LskKQzDDQI+bobIDGAjkn0NJiQlg4MTrKnZ8HfQ7nDEUbtJ1ytxPDb2bfk3Hr2XD99X8oAJISAmIoiSAQ==", + "version": "2.2.3", + "resolved": "https://registry.npmjs.org/@actions/http-client/-/http-client-2.2.3.tgz", + "integrity": "sha512-mx8hyJi/hjFvbPokCg4uRd4ZX78t+YyRPtnKWwIl+RzNaVuFpQHfmlGVfsKEJN8LwTCvL+DfVgAM04XaHkm6bA==", "dev": true, "license": "MIT", "dependencies": { @@ -60,6 +70,13 @@ "undici": "^5.25.4" } }, + "node_modules/@actions/io": { + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/@actions/io/-/io-1.1.3.tgz", + "integrity": "sha512-wi9JjgKLYS7U/z8PPbco+PvTb/nRWjeoFlJ1Qer83k/3C5PHQi28hiVdeE2kHXmIL99mQFawx8qt/JPjZilJ8Q==", + "dev": true, + "license": "MIT" + }, "node_modules/@ampproject/remapping": { "version": "2.3.0", "resolved": "https://registry.npmjs.org/@ampproject/remapping/-/remapping-2.3.0.tgz", @@ -75,13 +92,13 @@ } }, "node_modules/@babel/code-frame": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.24.7.tgz", - "integrity": "sha512-BcYH1CVJBO9tvyIZ2jVeXgSIMvGZ2FDRvDdOIVQyuklNKSsx+eppDEBq/g47Ayw+RqNFE+URvOShmf+f/qwAlA==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/code-frame/-/code-frame-7.25.7.tgz", + "integrity": "sha512-0xZJFNE5XMpENsgfHYTw8FbX4kv53mFLn2i3XPoq69LyhYSCBJtitaHx9QnsVTrsogI4Z3+HtEfZ2/GFPOtf5g==", "dev": true, "license": "MIT", "dependencies": { - "@babel/highlight": "^7.24.7", + "@babel/highlight": "^7.25.7", "picocolors": "^1.0.0" }, "engines": { @@ -89,9 +106,9 @@ } }, "node_modules/@babel/compat-data": { - "version": "7.25.2", - "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.25.2.tgz", - "integrity": "sha512-bYcppcpKBvX4znYaPEeFau03bp89ShqNMLs+rmdptMw+heSZh9+z84d2YG+K7cYLbWwzdjtDoW/uqZmPjulClQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/compat-data/-/compat-data-7.25.7.tgz", + "integrity": "sha512-9ickoLz+hcXCeh7jrcin+/SLWm+GkxE2kTvoYyp38p4WkdFXfQJxDFGWp/YHjiKLPx06z2A7W8XKuqbReXDzsw==", "dev": true, "license": "MIT", "engines": { @@ -99,22 +116,22 @@ } }, "node_modules/@babel/core": { - "version": "7.25.2", - "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.25.2.tgz", - "integrity": "sha512-BBt3opiCOxUr9euZ5/ro/Xv8/V7yJ5bjYMqG/C1YAo8MIKAnumZalCN+msbci3Pigy4lIQfPUpfMM27HMGaYEA==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/core/-/core-7.25.7.tgz", + "integrity": "sha512-yJ474Zv3cwiSOO9nXJuqzvwEeM+chDuQ8GJirw+pZ91sCGCyOZ3dJkVE09fTV0VEVzXyLWhh3G/AolYTPX7Mow==", "dev": true, "license": "MIT", "dependencies": { "@ampproject/remapping": "^2.2.0", - "@babel/code-frame": "^7.24.7", - "@babel/generator": "^7.25.0", - "@babel/helper-compilation-targets": "^7.25.2", - "@babel/helper-module-transforms": "^7.25.2", - "@babel/helpers": "^7.25.0", - "@babel/parser": "^7.25.0", - "@babel/template": "^7.25.0", - "@babel/traverse": "^7.25.2", - "@babel/types": "^7.25.2", + "@babel/code-frame": "^7.25.7", + "@babel/generator": "^7.25.7", + "@babel/helper-compilation-targets": "^7.25.7", + "@babel/helper-module-transforms": "^7.25.7", + "@babel/helpers": "^7.25.7", + "@babel/parser": "^7.25.7", + "@babel/template": "^7.25.7", + "@babel/traverse": "^7.25.7", + "@babel/types": "^7.25.7", "convert-source-map": "^2.0.0", "debug": "^4.1.0", "gensync": "^1.0.0-beta.2", @@ -130,31 +147,31 @@ } }, "node_modules/@babel/generator": { - "version": "7.25.0", - "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.25.0.tgz", - "integrity": "sha512-3LEEcj3PVW8pW2R1SR1M89g/qrYk/m/mB/tLqn7dn4sbBUQyTqnlod+II2U4dqiGtUmkcnAmkMDralTFZttRiw==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/generator/-/generator-7.25.7.tgz", + "integrity": "sha512-5Dqpl5fyV9pIAD62yK9P7fcA768uVPUyrQmqpqstHWgMma4feF1x/oFysBCVZLY5wJ2GkMUCdsNDnGZrPoR6rA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.25.0", + "@babel/types": "^7.25.7", "@jridgewell/gen-mapping": "^0.3.5", "@jridgewell/trace-mapping": "^0.3.25", - "jsesc": "^2.5.1" + "jsesc": "^3.0.2" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-compilation-targets": { - "version": "7.25.2", - "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.2.tgz", - "integrity": "sha512-U2U5LsSaZ7TAt3cfaymQ8WHh0pxvdHoEk6HVpaexxixjyEquMh0L0YNJNM6CTGKMXV1iksi0iZkGw4AcFkPaaw==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-compilation-targets/-/helper-compilation-targets-7.25.7.tgz", + "integrity": "sha512-DniTEax0sv6isaw6qSQSfV4gVRNtw2rte8HHM45t9ZR0xILaufBRNkpMifCRiAPyvL4ACD6v0gfCwCmtOQaV4A==", "dev": true, "license": "MIT", "dependencies": { - "@babel/compat-data": "^7.25.2", - "@babel/helper-validator-option": "^7.24.8", - "browserslist": "^4.23.1", + "@babel/compat-data": "^7.25.7", + "@babel/helper-validator-option": "^7.25.7", + "browserslist": "^4.24.0", "lru-cache": "^5.1.1", "semver": "^6.3.1" }, @@ -163,30 +180,30 @@ } }, "node_modules/@babel/helper-module-imports": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.24.7.tgz", - "integrity": "sha512-8AyH3C+74cgCVVXow/myrynrAGv+nTVg5vKu2nZph9x7RcRwzmh0VFallJuFTZ9mx6u4eSdXZfcOzSqTUm0HCA==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-imports/-/helper-module-imports-7.25.7.tgz", + "integrity": "sha512-o0xCgpNmRohmnoWKQ0Ij8IdddjyBFE4T2kagL/x6M3+4zUgc+4qTOUBoNe4XxDskt1HPKO007ZPiMgLDq2s7Kw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/traverse": "^7.25.7", + "@babel/types": "^7.25.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-module-transforms": { - "version": "7.25.2", - "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.25.2.tgz", - "integrity": "sha512-BjyRAbix6j/wv83ftcVJmBt72QtHI56C7JXZoG2xATiLpmoC7dpd8WnkikExHDVPpi/3qCmO6WY1EaXOluiecQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-module-transforms/-/helper-module-transforms-7.25.7.tgz", + "integrity": "sha512-k/6f8dKG3yDz/qCwSM+RKovjMix563SLxQFo0UhRNo239SP6n9u5/eLtKD6EAjwta2JHJ49CsD8pms2HdNiMMQ==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-module-imports": "^7.24.7", - "@babel/helper-simple-access": "^7.24.7", - "@babel/helper-validator-identifier": "^7.24.7", - "@babel/traverse": "^7.25.2" + "@babel/helper-module-imports": "^7.25.7", + "@babel/helper-simple-access": "^7.25.7", + "@babel/helper-validator-identifier": "^7.25.7", + "@babel/traverse": "^7.25.7" }, "engines": { "node": ">=6.9.0" @@ -196,9 +213,9 @@ } }, "node_modules/@babel/helper-plugin-utils": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.24.8.tgz", - "integrity": "sha512-FFWx5142D8h2Mgr/iPVGH5G7w6jDn4jUSpZTyDnQO0Yn7Ks2Kuz6Pci8H6MPCoUJegd/UZQ3tAvfLCxQSnWWwg==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-plugin-utils/-/helper-plugin-utils-7.25.7.tgz", + "integrity": "sha512-eaPZai0PiqCi09pPs3pAFfl/zYgGaE6IdXtYvmf0qlcDTd3WCtO7JWCcRd64e0EQrcYgiHibEZnOGsSY4QSgaw==", "dev": true, "license": "MIT", "engines": { @@ -206,23 +223,23 @@ } }, "node_modules/@babel/helper-simple-access": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.24.7.tgz", - "integrity": "sha512-zBAIvbCMh5Ts+b86r/CjU+4XGYIs+R1j951gxI3KmmxBMhCg4oQMsv6ZXQ64XOm/cvzfU1FmoCyt6+owc5QMYg==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-simple-access/-/helper-simple-access-7.25.7.tgz", + "integrity": "sha512-FPGAkJmyoChQeM+ruBGIDyrT2tKfZJO8NcxdC+CWNJi7N8/rZpSxK7yvBJ5O/nF1gfu5KzN7VKG3YVSLFfRSxQ==", "dev": true, "license": "MIT", "dependencies": { - "@babel/traverse": "^7.24.7", - "@babel/types": "^7.24.7" + "@babel/traverse": "^7.25.7", + "@babel/types": "^7.25.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/helper-string-parser": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.24.8.tgz", - "integrity": "sha512-pO9KhhRcuUyGnJWwyEgnRJTSIZHiT+vMD0kPeD+so0l7mxkMT19g3pjY9GTnHySck/hDzq+dtW/4VgnMkippsQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-string-parser/-/helper-string-parser-7.25.7.tgz", + "integrity": "sha512-CbkjYdsJNHFk8uqpEkpCvRs3YRp9tY6FmFY7wLMSYuGYkrdUi7r2lc4/wqsvlHoMznX3WJ9IP8giGPq68T/Y6g==", "dev": true, "license": "MIT", "engines": { @@ -230,9 +247,9 @@ } }, "node_modules/@babel/helper-validator-identifier": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.24.7.tgz", - "integrity": "sha512-rR+PBcQ1SMQDDyF6X0wxtG8QyLCgUB0eRAGguqRLfkCA87l7yAP7ehq8SNj96OOGTO8OBV70KhuFYcIkHXOg0w==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-identifier/-/helper-validator-identifier-7.25.7.tgz", + "integrity": "sha512-AM6TzwYqGChO45oiuPqwL2t20/HdMC1rTPAesnBCgPCSF1x3oN9MVUwQV2iyz4xqWrctwK5RNC8LV22kaQCNYg==", "dev": true, "license": "MIT", "engines": { @@ -240,9 +257,9 @@ } }, "node_modules/@babel/helper-validator-option": { - "version": "7.24.8", - "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.24.8.tgz", - "integrity": "sha512-xb8t9tD1MHLungh/AIoWYN+gVHaB9kwlu8gffXGSt3FFEIT7RjS+xWbc2vUD1UTZdIpKj/ab3rdqJ7ufngyi2Q==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helper-validator-option/-/helper-validator-option-7.25.7.tgz", + "integrity": "sha512-ytbPLsm+GjArDYXJ8Ydr1c/KJuutjF2besPNbIZnZ6MKUxi/uTA22t2ymmA4WFjZFpjiAMO0xuuJPqK2nvDVfQ==", "dev": true, "license": "MIT", "engines": { @@ -250,27 +267,27 @@ } }, "node_modules/@babel/helpers": { - "version": "7.25.0", - "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.25.0.tgz", - "integrity": "sha512-MjgLZ42aCm0oGjJj8CtSM3DB8NOOf8h2l7DCTePJs29u+v7yO/RBX9nShlKMgFnRks/Q4tBAe7Hxnov9VkGwLw==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/helpers/-/helpers-7.25.7.tgz", + "integrity": "sha512-Sv6pASx7Esm38KQpF/U/OXLwPPrdGHNKoeblRxgZRLXnAtnkEe4ptJPDtAZM7fBLadbc1Q07kQpSiGQ0Jg6tRA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/template": "^7.25.0", - "@babel/types": "^7.25.0" + "@babel/template": "^7.25.7", + "@babel/types": "^7.25.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/highlight": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.24.7.tgz", - "integrity": "sha512-EStJpq4OuY8xYfhGVXngigBJRWxftKX9ksiGDnmlY3o7B/V7KIAc9X4oiK87uPJSc/vs5L869bem5fhZa8caZw==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/highlight/-/highlight-7.25.7.tgz", + "integrity": "sha512-iYyACpW3iW8Fw+ZybQK+drQre+ns/tKpXbNESfrhNnPLIklLbXr7MYJ6gPEd0iETGLOK+SxMjVvKb/ffmk+FEw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-validator-identifier": "^7.24.7", + "@babel/helper-validator-identifier": "^7.25.7", "chalk": "^2.4.2", "js-tokens": "^4.0.0", "picocolors": "^1.0.0" @@ -358,13 +375,13 @@ } }, "node_modules/@babel/parser": { - "version": "7.25.3", - "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.25.3.tgz", - "integrity": "sha512-iLTJKDbJ4hMvFPgQwwsVoxtHyWpKKPBrxkANrSYewDPaPpT5py5yeVkgPIJ7XYXhndxJpaA3PyALSXQ7u8e/Dw==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.25.7.tgz", + "integrity": "sha512-aZn7ETtQsjjGG5HruveUK06cU3Hljuhd9Iojm4M8WWv3wLE6OkE5PWbDUkItmMgegmccaITudyuW5RPYrYlgWw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/types": "^7.25.2" + "@babel/types": "^7.25.7" }, "bin": { "parser": "bin/babel-parser.js" @@ -429,13 +446,13 @@ } }, "node_modules/@babel/plugin-syntax-import-attributes": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.24.7.tgz", - "integrity": "sha512-hbX+lKKeUMGihnK8nvKqmXBInriT3GVjzXKFriV3YC6APGxMbP8RZNFwy91+hocLXq90Mta+HshoB31802bb8A==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-import-attributes/-/plugin-syntax-import-attributes-7.25.7.tgz", + "integrity": "sha512-AqVo+dguCgmpi/3mYBdu9lkngOBlQ2w2vnNpa6gfiCxQZLzV4ZbhsXitJ2Yblkoe1VQwtHSaNmIaGll/26YWRw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.7" }, "engines": { "node": ">=6.9.0" @@ -471,13 +488,13 @@ } }, "node_modules/@babel/plugin-syntax-jsx": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.24.7.tgz", - "integrity": "sha512-6ddciUPe/mpMnOKv/U+RSd2vvVy+Yw/JfBB0ZHYjEZt9NLHmCUylNYlsbqCCS1Bffjlb0fCwC9Vqz+sBz6PsiQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-jsx/-/plugin-syntax-jsx-7.25.7.tgz", + "integrity": "sha512-ruZOnKO+ajVL/MVx+PwNBPOkrnXTXoWMtte1MBpegfCArhqOe3Bj52avVj1huLLxNKYKXYaSxZ2F+woK1ekXfw==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.7" }, "engines": { "node": ">=6.9.0" @@ -597,13 +614,13 @@ } }, "node_modules/@babel/plugin-syntax-typescript": { - "version": "7.24.7", - "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.24.7.tgz", - "integrity": "sha512-c/+fVeJBB0FeKsFvwytYiUD+LBvhHjGSI0g446PRGdSVGZLRNArBUno2PETbAly3tpiNAQR5XaZ+JslxkotsbA==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/plugin-syntax-typescript/-/plugin-syntax-typescript-7.25.7.tgz", + "integrity": "sha512-rR+5FDjpCHqqZN2bzZm18bVYGaejGq5ZkpVCJLXor/+zlSrSoc4KWcHI0URVWjl/68Dyr1uwZUz/1njycEAv9g==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-plugin-utils": "^7.24.7" + "@babel/helper-plugin-utils": "^7.25.7" }, "engines": { "node": ">=6.9.0" @@ -613,32 +630,32 @@ } }, "node_modules/@babel/template": { - "version": "7.25.0", - "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.0.tgz", - "integrity": "sha512-aOOgh1/5XzKvg1jvVz7AVrx2piJ2XBi227DHmbY6y+bM9H2FlN+IfecYu4Xl0cNiiVejlsCri89LUsbj8vJD9Q==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/template/-/template-7.25.7.tgz", + "integrity": "sha512-wRwtAgI3bAS+JGU2upWNL9lSlDcRCqD05BZ1n3X2ONLH1WilFP6O1otQjeMK/1g0pvYcXC7b/qVUB1keofjtZA==", "dev": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.24.7", - "@babel/parser": "^7.25.0", - "@babel/types": "^7.25.0" + "@babel/code-frame": "^7.25.7", + "@babel/parser": "^7.25.7", + "@babel/types": "^7.25.7" }, "engines": { "node": ">=6.9.0" } }, "node_modules/@babel/traverse": { - "version": "7.25.3", - "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.3.tgz", - "integrity": "sha512-HefgyP1x754oGCsKmV5reSmtV7IXj/kpaE1XYY+D9G5PvKKoFfSbiS4M77MdjuwlZKDIKFCffq9rPU+H/s3ZdQ==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/traverse/-/traverse-7.25.7.tgz", + "integrity": "sha512-jatJPT1Zjqvh/1FyJs6qAHL+Dzb7sTb+xr7Q+gM1b+1oBsMsQQ4FkVKb6dFlJvLlVssqkRzV05Jzervt9yhnzg==", "dev": true, "license": "MIT", "dependencies": { - "@babel/code-frame": "^7.24.7", - "@babel/generator": "^7.25.0", - "@babel/parser": "^7.25.3", - "@babel/template": "^7.25.0", - "@babel/types": "^7.25.2", + "@babel/code-frame": "^7.25.7", + "@babel/generator": "^7.25.7", + "@babel/parser": "^7.25.7", + "@babel/template": "^7.25.7", + "@babel/types": "^7.25.7", "debug": "^4.3.1", "globals": "^11.1.0" }, @@ -647,14 +664,14 @@ } }, "node_modules/@babel/types": { - "version": "7.25.2", - "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.25.2.tgz", - "integrity": "sha512-YTnYtra7W9e6/oAZEHj0bJehPRUlLH9/fbpT5LfB0NhQXyALCRkRs3zH9v07IYhkgpqX6Z78FnuccZr/l4Fs4Q==", + "version": "7.25.7", + "resolved": "https://registry.npmjs.org/@babel/types/-/types-7.25.7.tgz", + "integrity": "sha512-vwIVdXG+j+FOpkwqHRcBgHLYNL7XMkufrlaFvL9o6Ai9sJn9+PdyIL5qa0XzTZw084c+u9LOls53eoZWP/W5WQ==", "dev": true, "license": "MIT", "dependencies": { - "@babel/helper-string-parser": "^7.24.8", - "@babel/helper-validator-identifier": "^7.24.7", + "@babel/helper-string-parser": "^7.25.7", + "@babel/helper-validator-identifier": "^7.25.7", "to-fast-properties": "^2.0.0" }, "engines": { @@ -685,9 +702,9 @@ } }, "node_modules/@eslint-community/regexpp": { - "version": "4.11.0", - "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.0.tgz", - "integrity": "sha512-G/M/tIiMrTAxEWRfLfQJMmGNX28IxBg4PBz8XqQhqUHLFI6TL2htpIB1iQCj144V5ee/JaKyT9/WZ0MGZWfA7A==", + "version": "4.11.1", + "resolved": "https://registry.npmjs.org/@eslint-community/regexpp/-/regexpp-4.11.1.tgz", + "integrity": "sha512-m4DVN9ZqskZoLU5GlWZadwDnYo3vAEydiUayB9widCl9ffWx2IvPnp6n3on5rJmziJSw9Bv+Z3ChDVdMwXCY8Q==", "dev": true, "license": "MIT", "engines": { @@ -792,9 +809,9 @@ } }, "node_modules/@eslint/js": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.0.tgz", - "integrity": "sha512-Ys+3g2TaW7gADOJzPt83SJtCDhMjndcDMFVQ/Tj9iA1BfJzFKD9mAUXT3OenpuPHbI6P/myECxRJrofUsDx/5g==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/@eslint/js/-/js-8.57.1.tgz", + "integrity": "sha512-d9zaMRSTIKDLhctzH12MtXvJKSSUhaHcjV+2Z+GK+EEY7XKpP5yR4x+N3TAcHTcu963nIr+TMcCb4DBCYX1z6Q==", "dev": true, "license": "MIT", "engines": { @@ -812,14 +829,14 @@ } }, "node_modules/@humanwhocodes/config-array": { - "version": "0.11.14", - "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.11.14.tgz", - "integrity": "sha512-3T8LkOmg45BV5FICb15QQMsyUSWrQ8AygVfC7ZG32zOalnqrilm018ZVCw0eapXux8FtA33q8PSRSstjee3jSg==", + "version": "0.13.0", + "resolved": "https://registry.npmjs.org/@humanwhocodes/config-array/-/config-array-0.13.0.tgz", + "integrity": "sha512-DZLEEqFWQFiyK6h5YIeynKx7JlvCYWL0cImfSRXZ9l4Sg2efkFGTuFf6vzXjK1cq6IYkU+Eg/JizXw+TD2vRNw==", "deprecated": "Use @eslint/config-array instead", "dev": true, "license": "Apache-2.0", "dependencies": { - "@humanwhocodes/object-schema": "^2.0.2", + "@humanwhocodes/object-schema": "^2.0.3", "debug": "^4.3.1", "minimatch": "^3.0.5" }, @@ -1495,9 +1512,9 @@ } }, "node_modules/@reporters/github": { - "version": "1.7.0", - "resolved": "https://registry.npmjs.org/@reporters/github/-/github-1.7.0.tgz", - "integrity": "sha512-5velzo4PWkfTqvZD2Ins5Wg38KSjPsWofS4jKTw61IjL4z2vLFGx2KL2Aqpa7bTOo3NYtc7w0zbyaHsBmZtCxQ==", + "version": "1.7.1", + "resolved": "https://registry.npmjs.org/@reporters/github/-/github-1.7.1.tgz", + "integrity": "sha512-PzM4jrcNPilW9YjbZ3VTImtAK9AkWaF4XpUSxPJLE3Dmo2tbjABT/vAveKQAkXq1NR5BeCpbQ5vsxthklc8D2g==", "dev": true, "license": "MIT", "dependencies": { @@ -1505,6 +1522,13 @@ "stack-utils": "^2.0.6" } }, + "node_modules/@rtsao/scc": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/@rtsao/scc/-/scc-1.1.0.tgz", + "integrity": "sha512-zt6OdqaDoOnJ1ZYsCYGt9YmWzDXl4vQdKTyJev62gFhRGKdx7mcT54V9KIjg+d2wi9EXsPvAPKe7i7WjfVWB8g==", + "dev": true, + "license": "MIT" + }, "node_modules/@sinclair/typebox": { "version": "0.27.8", "resolved": "https://registry.npmjs.org/@sinclair/typebox/-/typebox-0.27.8.tgz", @@ -1523,13 +1547,13 @@ } }, "node_modules/@sinonjs/fake-timers": { - "version": "11.2.2", - "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-11.2.2.tgz", - "integrity": "sha512-G2piCSxQ7oWOxwGSAyFHfPIsyeJGXYtc6mFbnFA+kRXkiEnTl8c/8jul2S329iFBnDI9HGoeWWAZvuvOkZccgw==", + "version": "11.3.1", + "resolved": "https://registry.npmjs.org/@sinonjs/fake-timers/-/fake-timers-11.3.1.tgz", + "integrity": "sha512-EVJO7nW5M/F5Tur0Rf2z/QoMo+1Ia963RiMtapiQrEWvY0iBUvADo8Beegwjpnle5BHkyHuoxSTW3jF43H1XRA==", "dev": true, "license": "BSD-3-Clause", "dependencies": { - "@sinonjs/commons": "^3.0.0" + "@sinonjs/commons": "^3.0.1" } }, "node_modules/@tsd/typescript": { @@ -1599,9 +1623,9 @@ } }, "node_modules/@types/estree": { - "version": "1.0.5", - "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", - "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", + "version": "1.0.6", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.6.tgz", + "integrity": "sha512-AYnb1nQyY49te+VRAVgmzfcgjYS91mY5P0TKUDCLEM+gNnA+3T6rWITXRLYCpahpqSQbN5cE+gHpnPyXjHWxcw==", "dev": true, "license": "MIT" }, @@ -1664,9 +1688,9 @@ "license": "MIT" }, "node_modules/@types/node": { - "version": "18.19.45", - "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.45.tgz", - "integrity": "sha512-VZxPKNNhjKmaC1SUYowuXSRSMGyQGmQjvvA1xE4QZ0xce2kLtEhPDS+kqpCPBZYgqblCLQ2DAjSzmgCM5auvhA==", + "version": "18.19.55", + "resolved": "https://registry.npmjs.org/@types/node/-/node-18.19.55.tgz", + "integrity": "sha512-zzw5Vw52205Zr/nmErSEkN5FLqXPuKX/k5d1D7RKHATGqU7y6YfX9QxZraUzUrFGqH6XzOzG196BC35ltJC4Cw==", "dev": true, "license": "MIT", "dependencies": { @@ -1804,9 +1828,9 @@ } }, "node_modules/ansi-regex": { - "version": "6.0.1", - "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", - "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "version": "6.1.0", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.1.0.tgz", + "integrity": "sha512-7HSX4QQb4CspciLpVFwyRe79O3xsIZDDLER21kERQ71oaPodF8jL725AgJMFAYbooIqolJoRLuM81SpeUkpkvA==", "dev": true, "license": "MIT", "engines": { @@ -2402,9 +2426,9 @@ } }, "node_modules/browserslist": { - "version": "4.23.3", - "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.23.3.tgz", - "integrity": "sha512-btwCFJVjI4YWDNfau8RhZ+B1Q/VLoUITrm3RlP6y1tYGWIOa+InuYiRGXUBXo8nA1qKmHMyLB/iVQg5TT4eFoA==", + "version": "4.24.0", + "resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.24.0.tgz", + "integrity": "sha512-Rmb62sR1Zpjql25eSanFGEhAxcFwfA1K0GuQcLoaJBAcENegrQut3hYdhXFF1obQfiDyqIW/cLM5HSJ/9k884A==", "dev": true, "funding": [ { @@ -2422,8 +2446,8 @@ ], "license": "MIT", "dependencies": { - "caniuse-lite": "^1.0.30001646", - "electron-to-chromium": "^1.5.4", + "caniuse-lite": "^1.0.30001663", + "electron-to-chromium": "^1.5.28", "node-releases": "^2.0.18", "update-browserslist-db": "^1.1.0" }, @@ -2626,9 +2650,9 @@ } }, "node_modules/caniuse-lite": { - "version": "1.0.30001651", - "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001651.tgz", - "integrity": "sha512-9Cf+Xv1jJNe1xPZLGuUXLNkE1BoDkqRqYyFJ9TDYSqhduqA4hu4oR9HluGoWYQC/aj8WHjsGVV+bwkh0+tegRg==", + "version": "1.0.30001667", + "resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001667.tgz", + "integrity": "sha512-7LTwJjcRkzKFmtqGsibMeuXmvFDfZq/nzIjnmgCGzKKRVzjD72selLDK1oPF/Oxzmt4fNcPvTDvGqSDG4tCALw==", "dev": true, "funding": [ { @@ -2690,9 +2714,9 @@ } }, "node_modules/cjs-module-lexer": { - "version": "1.3.1", - "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.3.1.tgz", - "integrity": "sha512-a3KdPAANPbNE4ZUv9h6LckSl9zLsYOP4MBmhIPkRaeyybt+r4UghLvq+xw/YwUcC1gqylCkL4rdVs3Lwupjm4Q==", + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/cjs-module-lexer/-/cjs-module-lexer-1.4.1.tgz", + "integrity": "sha512-cuSVIHi9/9E/+821Qjdvngor+xpnlwnuwIyZOaLmHBVdXL+gP+I6QQB9VkO7RI77YIcTV+S1W9AreJ5eN63JBA==", "dev": true, "license": "MIT" }, @@ -2959,25 +2983,18 @@ } }, "node_modules/cssstyle": { - "version": "4.0.1", - "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.0.1.tgz", - "integrity": "sha512-8ZYiJ3A/3OkDd093CBT/0UKDWry7ak4BdPTFP2+QEP7cmhouyq/Up709ASSj2cK02BbZiMgk7kYjZNS4QP5qrQ==", + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cssstyle/-/cssstyle-4.1.0.tgz", + "integrity": "sha512-h66W1URKpBS5YMI/V8PyXvTMFT8SupJ1IzoIV8IeBC/ji8WVmrO8dGlTi+2dh6whmdk6BiKJLD/ZBkhWbcg6nA==", "dev": true, "license": "MIT", "dependencies": { - "rrweb-cssom": "^0.6.0" + "rrweb-cssom": "^0.7.1" }, "engines": { "node": ">=18" } }, - "node_modules/cssstyle/node_modules/rrweb-cssom": { - "version": "0.6.0", - "resolved": "https://registry.npmjs.org/rrweb-cssom/-/rrweb-cssom-0.6.0.tgz", - "integrity": "sha512-APM0Gt1KoXBz0iIkkdB/kfvGOwC4UuJFeG/c+yV7wSc7q96cG/kJ0HiYCnzivD9SB53cLV1MlHFNfOuPaadYSw==", - "dev": true, - "license": "MIT" - }, "node_modules/data-urls": { "version": "5.0.0", "resolved": "https://registry.npmjs.org/data-urls/-/data-urls-5.0.0.tgz", @@ -3047,13 +3064,13 @@ } }, "node_modules/debug": { - "version": "4.3.6", - "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.6.tgz", - "integrity": "sha512-O/09Bd4Z1fBrU4VzkhFqVgpPzaGbw6Sm9FEkBT1A/YBXQFGuuSxa1dN2nxgxS34JmKXqYx8CZAwEVoJFImUXIg==", + "version": "4.3.7", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.7.tgz", + "integrity": "sha512-Er2nc/H7RrMXZBFCEim6TCmMk02Z8vLC2Rbi1KEBggpo0fS6l0S1nnapwmIi3yW/+GOJap1Krg4w0Hg80oCqgQ==", "dev": true, "license": "MIT", "dependencies": { - "ms": "2.1.2" + "ms": "^2.1.3" }, "engines": { "node": ">=6.0" @@ -3253,9 +3270,9 @@ "license": "MIT" }, "node_modules/electron-to-chromium": { - "version": "1.5.11", - "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.11.tgz", - "integrity": "sha512-R1CccCDYqndR25CaXFd6hp/u9RaaMcftMkphmvuepXr5b1vfLkRml6aWVeBhXJ7rbevHkKEMJtz8XqPf7ffmew==", + "version": "1.5.33", + "resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.33.tgz", + "integrity": "sha512-+cYTcFB1QqD4j4LegwLfpCNxifb6dDFUAwk6RsLusCwIaZI6or2f+q8rs5tTB2YC53HhOlIbEaqHMAAC8IOIwA==", "dev": true, "license": "ISC" }, @@ -3387,9 +3404,9 @@ } }, "node_modules/es-iterator-helpers": { - "version": "1.0.19", - "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.0.19.tgz", - "integrity": "sha512-zoMwbCcH5hwUkKJkT8kDIBZSz9I6mVG//+lDCinLCGov4+r7NIy0ld8o03M0cJxl2spVf6ESYVS6/gpIfq1FFw==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/es-iterator-helpers/-/es-iterator-helpers-1.1.0.tgz", + "integrity": "sha512-/SurEfycdyssORP/E+bj4sEu1CWw4EmLDsHynHwSXQ7utgbrMRWW195pTrCjFgFCddf/UkYm3oqKPRq5i8bJbw==", "dev": true, "license": "MIT", "dependencies": { @@ -3400,12 +3417,12 @@ "es-set-tostringtag": "^2.0.3", "function-bind": "^1.1.2", "get-intrinsic": "^1.2.4", - "globalthis": "^1.0.3", + "globalthis": "^1.0.4", "has-property-descriptors": "^1.0.2", "has-proto": "^1.0.3", "has-symbols": "^1.0.3", "internal-slot": "^1.0.7", - "iterator.prototype": "^1.1.2", + "iterator.prototype": "^1.1.3", "safe-array-concat": "^1.1.2" }, "engines": { @@ -3469,9 +3486,9 @@ } }, "node_modules/escalade": { - "version": "3.1.2", - "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.1.2.tgz", - "integrity": "sha512-ErCHMCae19vR8vQGe50xIsVomy19rg6gFu3+r3jkEO46suLMWBksvVyoGgQV+jOfl84ZSOSlmv6Gxa89PmTGmA==", + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/escalade/-/escalade-3.2.0.tgz", + "integrity": "sha512-WUj2qlxaQtO4g6Pq5c29GTcWGDyd8itL8zTlipgECz3JesAiiOKotd8JU6otB3PACgG6xkJUyVhboMS+bje/jA==", "dev": true, "license": "MIT", "engines": { @@ -3489,17 +3506,18 @@ } }, "node_modules/eslint": { - "version": "8.57.0", - "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.0.tgz", - "integrity": "sha512-dZ6+mexnaTIbSBZWgou51U6OmzIhYM2VcNdtiTtI7qPNZm35Akpr0f6vtw3w1Kmn5PYo+tZVfh13WrhpS6oLqQ==", + "version": "8.57.1", + "resolved": "https://registry.npmjs.org/eslint/-/eslint-8.57.1.tgz", + "integrity": "sha512-ypowyDxpVSYpkXr9WPv2PAZCtNip1Mv5KTW0SCurXv/9iOpcrH9PaqUElksqEB6pChqHGDRCFTyrZlGhnLNGiA==", + "deprecated": "This version is no longer supported. Please see https://eslint.org/version-support for other options.", "dev": true, "license": "MIT", "dependencies": { "@eslint-community/eslint-utils": "^4.2.0", "@eslint-community/regexpp": "^4.6.1", "@eslint/eslintrc": "^2.1.4", - "@eslint/js": "8.57.0", - "@humanwhocodes/config-array": "^0.11.14", + "@eslint/js": "8.57.1", + "@humanwhocodes/config-array": "^0.13.0", "@humanwhocodes/module-importer": "^1.0.1", "@nodelib/fs.walk": "^1.2.8", "@ungap/structured-clone": "^1.2.0", @@ -3690,9 +3708,9 @@ } }, "node_modules/eslint-module-utils": { - "version": "2.8.1", - "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.8.1.tgz", - "integrity": "sha512-rXDXR3h7cs7dy9RNpUlQf80nX31XWJEyGq1tRMo+6GsO5VmTe4UTwtmonAD4ZkAsrfMVDA2wlGJ3790Ys+D49Q==", + "version": "2.12.0", + "resolved": "https://registry.npmjs.org/eslint-module-utils/-/eslint-module-utils-2.12.0.tgz", + "integrity": "sha512-wALZ0HFoytlyh/1+4wuZ9FJCD/leWHQzzrxJ8+rebyReSLk7LApMyd3WJaLVoN+D5+WIdJyDK1c6JnE65V4Zyg==", "dev": true, "license": "MIT", "dependencies": { @@ -3764,35 +3782,37 @@ } }, "node_modules/eslint-plugin-import": { - "version": "2.29.1", - "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.29.1.tgz", - "integrity": "sha512-BbPC0cuExzhiMo4Ff1BTVwHpjjv28C5R+btTOGaCRC7UEz801up0JadwkeSk5Ued6TG34uaczuVuH6qyy5YUxw==", + "version": "2.31.0", + "resolved": "https://registry.npmjs.org/eslint-plugin-import/-/eslint-plugin-import-2.31.0.tgz", + "integrity": "sha512-ixmkI62Rbc2/w8Vfxyh1jQRTdRTF52VxwRVHl/ykPAmqG+Nb7/kNn+byLP0LxPgI7zWA16Jt82SybJInmMia3A==", "dev": true, "license": "MIT", "dependencies": { - "array-includes": "^3.1.7", - "array.prototype.findlastindex": "^1.2.3", + "@rtsao/scc": "^1.1.0", + "array-includes": "^3.1.8", + "array.prototype.findlastindex": "^1.2.5", "array.prototype.flat": "^1.3.2", "array.prototype.flatmap": "^1.3.2", "debug": "^3.2.7", "doctrine": "^2.1.0", "eslint-import-resolver-node": "^0.3.9", - "eslint-module-utils": "^2.8.0", - "hasown": "^2.0.0", - "is-core-module": "^2.13.1", + "eslint-module-utils": "^2.12.0", + "hasown": "^2.0.2", + "is-core-module": "^2.15.1", "is-glob": "^4.0.3", "minimatch": "^3.1.2", - "object.fromentries": "^2.0.7", - "object.groupby": "^1.0.1", - "object.values": "^1.1.7", + "object.fromentries": "^2.0.8", + "object.groupby": "^1.0.3", + "object.values": "^1.2.0", "semver": "^6.3.1", + "string.prototype.trimend": "^1.0.8", "tsconfig-paths": "^3.15.0" }, "engines": { "node": ">=4" }, "peerDependencies": { - "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8" + "eslint": "^2 || ^3 || ^4 || ^5 || ^6 || ^7.2.0 || ^8 || ^9" } }, "node_modules/eslint-plugin-import/node_modules/brace-expansion": { @@ -3922,9 +3942,9 @@ } }, "node_modules/eslint-plugin-react": { - "version": "7.35.0", - "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.35.0.tgz", - "integrity": "sha512-v501SSMOWv8gerHkk+IIQBkcGRGrO2nfybfj5pLxuJNFTPxxA3PSryhXTK+9pNbtkggheDdsC0E9Q8CuPk6JKA==", + "version": "7.37.1", + "resolved": "https://registry.npmjs.org/eslint-plugin-react/-/eslint-plugin-react-7.37.1.tgz", + "integrity": "sha512-xwTnwDqzbDRA8uJ7BMxPs/EXRB3i8ZfnOIp8BsxEQkT0nHPp+WWceqGgo6rKb9ctNi8GJLDT4Go5HAWELa/WMg==", "dev": true, "license": "MIT", "dependencies": { @@ -4382,9 +4402,9 @@ } }, "node_modules/fast-check": { - "version": "3.21.0", - "resolved": "https://registry.npmjs.org/fast-check/-/fast-check-3.21.0.tgz", - "integrity": "sha512-QpmbiqRFRZ+SIlBJh6xi5d/PgXciUc/xWKc4Vi2RWEHHIRx6oM3f0fWNna++zP9VB5HUBTObUK9gTKQP3vVcrQ==", + "version": "3.22.0", + "resolved": "https://registry.npmjs.org/fast-check/-/fast-check-3.22.0.tgz", + "integrity": "sha512-8HKz3qXqnHYp/VCNn2qfjHdAdcI8zcSqOyX64GOMukp7SL2bfzfeDKjSd+UyECtejccaZv3LcvZTm9YDD22iCQ==", "dev": true, "funding": [ { @@ -5058,9 +5078,9 @@ } }, "node_modules/husky": { - "version": "9.1.4", - "resolved": "https://registry.npmjs.org/husky/-/husky-9.1.4.tgz", - "integrity": "sha512-bho94YyReb4JV7LYWRWxZ/xr6TtOTt8cMfmQ39MQYJ7f/YE268s3GdghGwi+y4zAeqewE5zYLvuhV0M0ijsDEA==", + "version": "9.1.6", + "resolved": "https://registry.npmjs.org/husky/-/husky-9.1.6.tgz", + "integrity": "sha512-sqbjZKK7kf44hfdE94EoX8MZNk0n7HeW37O4YrVGCF4wzgQjp+akPAkfUK5LZ6KuR/6sqeAVuXHji+RzQgOn5A==", "dev": true, "license": "MIT", "bin": { @@ -5291,9 +5311,9 @@ } }, "node_modules/is-core-module": { - "version": "2.15.0", - "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.0.tgz", - "integrity": "sha512-Dd+Lb2/zvk9SKy1TGCt1wFJFo/MWBPMX5x7KcvLajWTGuomczdQX61PvY5yK6SVACwpoexWo81IfFyoKY2QnTA==", + "version": "2.15.1", + "resolved": "https://registry.npmjs.org/is-core-module/-/is-core-module-2.15.1.tgz", + "integrity": "sha512-z0vtXSwucUJtANQWldhbtbt7BnL0vxiFjIdDLAatwhDYty2bad6s+rijD6Ri4YuYJubLzIJLUidCh09e1djEVQ==", "dev": true, "license": "MIT", "dependencies": { @@ -5751,9 +5771,9 @@ } }, "node_modules/iterator.prototype": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.2.tgz", - "integrity": "sha512-DR33HMMr8EzwuRL8Y9D3u2BMj8+RqSE850jfGu59kS7tbmPLzGkZmVSfyCFSDxuZiEY6Rzt3T2NA/qU+NwVj1w==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/iterator.prototype/-/iterator.prototype-1.1.3.tgz", + "integrity": "sha512-FW5iMbeQ6rBGm/oKgzq2aW4KvAGpxPzYES8N4g4xNXUKpL1mclMvOe+76AcLDTvD+Ze+sOpVhgdAQEKF4L9iGQ==", "dev": true, "license": "MIT", "dependencies": { @@ -5762,6 +5782,9 @@ "has-symbols": "^1.0.3", "reflect.getprototypeof": "^1.0.4", "set-function-name": "^2.0.1" + }, + "engines": { + "node": ">= 0.4" } }, "node_modules/jackspeak": { @@ -6606,9 +6629,9 @@ } }, "node_modules/jsdom": { - "version": "24.1.1", - "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-24.1.1.tgz", - "integrity": "sha512-5O1wWV99Jhq4DV7rCLIoZ/UIhyQeDR7wHVyZAHAshbrvZsLs+Xzz7gtwnlJTJDjleiTKh54F4dXrX70vJQTyJQ==", + "version": "24.1.3", + "resolved": "https://registry.npmjs.org/jsdom/-/jsdom-24.1.3.tgz", + "integrity": "sha512-MyL55p3Ut3cXbeBEG7Hcv0mVM8pp8PBNWxRqchZnSfAiES1v1mRnMeFfaHWIPULpwsYfvO+ZmMZz5tGCnjzDUQ==", "dev": true, "license": "MIT", "dependencies": { @@ -6647,16 +6670,16 @@ } }, "node_modules/jsesc": { - "version": "2.5.2", - "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-2.5.2.tgz", - "integrity": "sha512-OYu7XEzjkCQ3C5Ps3QIZsQfNpqoJyZZA99wd9aWd05NCtC5pWOkShK2mkL6HXQR6/Cy2lbNdPlZBpuQHXE63gA==", + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/jsesc/-/jsesc-3.0.2.tgz", + "integrity": "sha512-xKqzzWXDttJuOcawBt4KnKHHIf5oQ/Cxax+0PWFG+DFDgHNAdi+TXECADI+RYiFUMmx8792xsMbbgXj4CwnP4g==", "dev": true, "license": "MIT", "bin": { "jsesc": "bin/jsesc" }, "engines": { - "node": ">=4" + "node": ">=6" } }, "node_modules/json-buffer": { @@ -7018,9 +7041,9 @@ } }, "node_modules/micromatch": { - "version": "4.0.7", - "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", - "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.8.tgz", + "integrity": "sha512-PXwfBhYu0hBCPw8Dn0E+WDYb7af3dSLVWKi3HGv84IdF4TyFoC0ysxFd0Goxw7nSv4T/PzEJQxsYsEiFCKo2BA==", "dev": true, "license": "MIT", "dependencies": { @@ -7139,9 +7162,9 @@ } }, "node_modules/ms": { - "version": "2.1.2", - "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", - "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "version": "2.1.3", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz", + "integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==", "dev": true, "license": "MIT" }, @@ -7245,9 +7268,9 @@ } }, "node_modules/nwsapi": { - "version": "2.2.12", - "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.12.tgz", - "integrity": "sha512-qXDmcVlZV4XRtKFzddidpfVP4oMSGhga+xdMc25mv8kaLUHtgzCDhUxkrN8exkGdTlLNaXj7CV3GtON7zuGZ+w==", + "version": "2.2.13", + "resolved": "https://registry.npmjs.org/nwsapi/-/nwsapi-2.2.13.tgz", + "integrity": "sha512-cTGB9ptp9dY9A5VbMSe7fQBcl/tt22Vcqdq8+eN93rblOuE0aCFu4aZ2vMwct/2t+lFnosm8RkQW1I0Omb1UtQ==", "dev": true, "license": "MIT" }, @@ -7495,9 +7518,9 @@ } }, "node_modules/package-json-from-dist": { - "version": "1.0.0", - "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.0.tgz", - "integrity": "sha512-dATvCeZN/8wQsGywez1mzHtTlP22H8OEfPrVMLNr4/eGa+ijtLn/6M5f0dY8UKNrC2O9UCU6SSoG3qRKnt7STw==", + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/package-json-from-dist/-/package-json-from-dist-1.0.1.tgz", + "integrity": "sha512-UEZIS3/by4OC8vL3P2dTXRETpebLI2NiI5vIrjaD/5UtrkFX/tNbwjTSRAGC/+7CAo2pIcBaRgWmcBBHcsaCIw==", "dev": true, "license": "BlueOak-1.0.0" }, @@ -7618,9 +7641,9 @@ } }, "node_modules/picocolors": { - "version": "1.0.1", - "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", - "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.1.0.tgz", + "integrity": "sha512-TQ92mBOW0l3LeMeyLV6mzy/kWr8lkd/hp3mTg7wYK7zJhuBStmGMBG0BdeDZS/dZx1IukaX6Bk11zcln25o1Aw==", "dev": true, "license": "ISC" }, @@ -8305,16 +8328,16 @@ } }, "node_modules/regexp.prototype.flags": { - "version": "1.5.2", - "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.2.tgz", - "integrity": "sha512-NcDiDkTLuPR+++OCKB0nWafEmhg/Da8aUPLPMQbK+bxKKCm1/S5he+AqYa4PlMCVBalb4/yxIRub6qkEx5yJbw==", + "version": "1.5.3", + "resolved": "https://registry.npmjs.org/regexp.prototype.flags/-/regexp.prototype.flags-1.5.3.tgz", + "integrity": "sha512-vqlC04+RQoFalODCbCumG2xIOvapzVMHwsyIGM/SIE8fRhFFsXeH8/QQ+s0T0kDAhKc4k30s73/0ydkHQz6HlQ==", "dev": true, "license": "MIT", "dependencies": { - "call-bind": "^1.0.6", + "call-bind": "^1.0.7", "define-properties": "^1.2.1", "es-errors": "^1.3.0", - "set-function-name": "^2.0.1" + "set-function-name": "^2.0.2" }, "engines": { "node": ">= 0.4" @@ -8836,9 +8859,9 @@ } }, "node_modules/spdx-license-ids": { - "version": "3.0.18", - "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.18.tgz", - "integrity": "sha512-xxRs31BqRYHwiMzudOrpSiHtZ8i/GeionCBDSilhYRj+9gIcI8wCZTlXZKu9vZIVqViP3dcp9qE5G6AlIaD+TQ==", + "version": "3.0.20", + "resolved": "https://registry.npmjs.org/spdx-license-ids/-/spdx-license-ids-3.0.20.tgz", + "integrity": "sha512-jg25NiDV/1fLtSgEgyvVyDunvaNHbuwF9lfNV17gSmPFAlYzdfNBlLtLzXTevwkPj7DhGbmN9VnmJIgLnhvaBw==", "dev": true, "license": "CC0-1.0" }, @@ -8863,9 +8886,9 @@ } }, "node_modules/standard": { - "version": "17.1.0", - "resolved": "https://registry.npmjs.org/standard/-/standard-17.1.0.tgz", - "integrity": "sha512-jaDqlNSzLtWYW4lvQmU0EnxWMUGQiwHasZl5ZEIwx3S/ijZDjZOzs1y1QqKwKs5vqnFpGtizo4NOYX2s0Voq/g==", + "version": "17.1.2", + "resolved": "https://registry.npmjs.org/standard/-/standard-17.1.2.tgz", + "integrity": "sha512-WLm12WoXveKkvnPnPnaFUUHuOB2cUdAsJ4AiGHL2G0UNMrcRAWY2WriQaV8IQ3oRmYr0AWUbLNr94ekYFAHOrA==", "dev": true, "funding": [ { @@ -8889,8 +8912,8 @@ "eslint-plugin-import": "^2.27.5", "eslint-plugin-n": "^15.7.0", "eslint-plugin-promise": "^6.1.1", - "eslint-plugin-react": "^7.32.2", - "standard-engine": "^15.0.0", + "eslint-plugin-react": "^7.36.1", + "standard-engine": "^15.1.0", "version-guard": "^1.1.1" }, "bin": { @@ -9424,9 +9447,9 @@ } }, "node_modules/tsd": { - "version": "0.31.1", - "resolved": "https://registry.npmjs.org/tsd/-/tsd-0.31.1.tgz", - "integrity": "sha512-sSL84A0SFwx2xGMWrxlGaarKFSQszWjJS2vgNDDLwatytzg2aq6ShlwHsBYxRNmjzXISODwMva5ZOdAg/4AoOA==", + "version": "0.31.2", + "resolved": "https://registry.npmjs.org/tsd/-/tsd-0.31.2.tgz", + "integrity": "sha512-VplBAQwvYrHzVihtzXiUVXu5bGcr7uH1juQZ1lmKgkuGNGT+FechUCqmx9/zk7wibcqR2xaNEwCkDyKh+VVZnQ==", "dev": true, "license": "MIT", "dependencies": { @@ -9586,9 +9609,9 @@ "license": "MIT" }, "node_modules/typescript": { - "version": "5.5.4", - "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.5.4.tgz", - "integrity": "sha512-Mtq29sKDAEYP7aljRgtPOpTvOfbwRWlS6dPRzwjdE+C0R4brX/GUyhHSecbHMFLNBLcJIPt9nl9yG5TZ1weH+Q==", + "version": "5.6.3", + "resolved": "https://registry.npmjs.org/typescript/-/typescript-5.6.3.tgz", + "integrity": "sha512-hjcS1mhfuyi4WW8IWtjP7brDrG2cuDZukyrYrSauoXGNgx0S7zceP07adYkJycEr56BOUTNPzbInooiN3fn1qw==", "dev": true, "license": "Apache-2.0", "bin": { @@ -9659,9 +9682,9 @@ } }, "node_modules/update-browserslist-db": { - "version": "1.1.0", - "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.0.tgz", - "integrity": "sha512-EdRAaAyk2cUE1wOf2DkEhzxqOQvFOoRJFNS6NeyJ01Gp2beMRpBAINjM2iDXE3KCuKhwnvHIQCJm6ThL2Z+HzQ==", + "version": "1.1.1", + "resolved": "https://registry.npmjs.org/update-browserslist-db/-/update-browserslist-db-1.1.1.tgz", + "integrity": "sha512-R8UzCaa9Az+38REPiJ1tXlImTJXlVfgHZsglwBD/k6nj76ctsH1E3q4doGrukiLQd3sGQYu56r5+lo5r94l29A==", "dev": true, "funding": [ { @@ -9679,8 +9702,8 @@ ], "license": "MIT", "dependencies": { - "escalade": "^3.1.2", - "picocolors": "^1.0.1" + "escalade": "^3.2.0", + "picocolors": "^1.1.0" }, "bin": { "update-browserslist-db": "cli.js" @@ -9717,16 +9740,6 @@ "dev": true, "license": "MIT" }, - "node_modules/uuid": { - "version": "8.3.2", - "resolved": "https://registry.npmjs.org/uuid/-/uuid-8.3.2.tgz", - "integrity": "sha512-+NYs2QeMWy+GWFOEm9xnn6HCDp0l7QBD7ml8zLUmJ+93Q5NF0NocErnwkTkXVFNiX3/fpC6afS8Dhb/gz7R7eg==", - "dev": true, - "license": "MIT", - "bin": { - "uuid": "dist/bin/uuid" - } - }, "node_modules/v8-to-istanbul": { "version": "9.3.0", "resolved": "https://registry.npmjs.org/v8-to-istanbul/-/v8-to-istanbul-9.3.0.tgz", @@ -9754,9 +9767,9 @@ } }, "node_modules/version-guard": { - "version": "1.1.2", - "resolved": "https://registry.npmjs.org/version-guard/-/version-guard-1.1.2.tgz", - "integrity": "sha512-D8d+YxCUpoqtCnQzDxm6SF7DLU3gr2535T4khAtMq4osBahsQnmSxuwXFdrbAdDGG8Uokzfis/jvyeFPdmlc7w==", + "version": "1.1.3", + "resolved": "https://registry.npmjs.org/version-guard/-/version-guard-1.1.3.tgz", + "integrity": "sha512-JwPr6erhX53EWH/HCSzfy1tTFrtPXUe927wdM1jqBBeYp1OM+qPHjWbsvv6pIBduqdgxxS+ScfG7S28pzyr2DQ==", "dev": true, "license": "0BSD", "engines": { diff --git a/deps/undici/src/package.json b/deps/undici/src/package.json index bf777b24a2b90a..80b5b95e3f18b8 100644 --- a/deps/undici/src/package.json +++ b/deps/undici/src/package.json @@ -1,6 +1,6 @@ { "name": "undici", - "version": "6.19.8", + "version": "6.20.0", "description": "An HTTP/1.1 client, written from scratch for Node.js", "homepage": "https://undici.nodejs.org", "bugs": { @@ -105,7 +105,7 @@ "@fastify/busboy": "2.1.1", "@matteo.collina/tspl": "^0.1.1", "@sinonjs/fake-timers": "^11.1.0", - "@types/node": "^18.0.3", + "@types/node": "~18.19.50", "abort-controller": "^3.0.0", "borp": "^0.15.0", "c8": "^10.0.0", diff --git a/deps/undici/src/types/eventsource.d.ts b/deps/undici/src/types/eventsource.d.ts index eecda8c0151191..deccd730041e0b 100644 --- a/deps/undici/src/types/eventsource.d.ts +++ b/deps/undici/src/types/eventsource.d.ts @@ -2,8 +2,6 @@ import { MessageEvent, ErrorEvent } from './websocket' import Dispatcher from './dispatcher' import { - EventTarget, - Event, EventListenerOptions, AddEventListenerOptions, EventListenerOrEventListenerObject diff --git a/deps/undici/src/types/filereader.d.ts b/deps/undici/src/types/filereader.d.ts index f05d231b2ffac7..d1c0f9ef7238ec 100644 --- a/deps/undici/src/types/filereader.d.ts +++ b/deps/undici/src/types/filereader.d.ts @@ -1,7 +1,7 @@ /// import { Blob } from 'buffer' -import { DOMException, Event, EventInit, EventTarget } from './patch' +import { DOMException, EventInit } from './patch' export declare class FileReader { __proto__: EventTarget & FileReader diff --git a/deps/undici/src/types/interceptors.d.ts b/deps/undici/src/types/interceptors.d.ts index fab6da08c0d9b5..24166b61f4f86d 100644 --- a/deps/undici/src/types/interceptors.d.ts +++ b/deps/undici/src/types/interceptors.d.ts @@ -7,9 +7,11 @@ declare namespace Interceptors { export type DumpInterceptorOpts = { maxSize?: number } export type RetryInterceptorOpts = RetryHandler.RetryOptions export type RedirectInterceptorOpts = { maxRedirections?: number } - + export type ResponseErrorInterceptorOpts = { throwOnError: boolean } + export function createRedirectInterceptor(opts: RedirectInterceptorOpts): Dispatcher.DispatcherComposeInterceptor export function dump(opts?: DumpInterceptorOpts): Dispatcher.DispatcherComposeInterceptor export function retry(opts?: RetryInterceptorOpts): Dispatcher.DispatcherComposeInterceptor export function redirect(opts?: RedirectInterceptorOpts): Dispatcher.DispatcherComposeInterceptor + export function responseError(opts?: ResponseErrorInterceptorOpts): Dispatcher.DispatcherComposeInterceptor } diff --git a/deps/undici/src/types/patch.d.ts b/deps/undici/src/types/patch.d.ts index 3871acfebc658a..4ac38450e67fbd 100644 --- a/deps/undici/src/types/patch.d.ts +++ b/deps/undici/src/types/patch.d.ts @@ -6,44 +6,6 @@ export type DOMException = typeof globalThis extends { DOMException: infer T } ? T : any -export type EventTarget = typeof globalThis extends { EventTarget: infer T } - ? T - : { - addEventListener( - type: string, - listener: any, - options?: any, - ): void - dispatchEvent(event: Event): boolean - removeEventListener( - type: string, - listener: any, - options?: any | boolean, - ): void - } - -export type Event = typeof globalThis extends { Event: infer T } - ? T - : { - readonly bubbles: boolean - cancelBubble: () => void - readonly cancelable: boolean - readonly composed: boolean - composedPath(): [EventTarget?] - readonly currentTarget: EventTarget | null - readonly defaultPrevented: boolean - readonly eventPhase: 0 | 2 - readonly isTrusted: boolean - preventDefault(): void - returnValue: boolean - readonly srcElement: EventTarget | null - stopImmediatePropagation(): void - stopPropagation(): void - readonly target: EventTarget | null - readonly timeStamp: number - readonly type: string - } - export interface EventInit { bubbles?: boolean cancelable?: boolean diff --git a/deps/undici/src/types/websocket.d.ts b/deps/undici/src/types/websocket.d.ts index d1be45235d4242..dfdd8156cceb14 100644 --- a/deps/undici/src/types/websocket.d.ts +++ b/deps/undici/src/types/websocket.d.ts @@ -3,8 +3,6 @@ import type { Blob } from 'buffer' import type { MessagePort } from 'worker_threads' import { - EventTarget, - Event, EventInit, EventListenerOptions, AddEventListenerOptions, diff --git a/deps/undici/undici.js b/deps/undici/undici.js index acbece91687872..34796507bd7f9d 100644 --- a/deps/undici/undici.js +++ b/deps/undici/undici.js @@ -247,6 +247,20 @@ var require_errors = __commonJS({ this.headers = headers; } }; + var ResponseError = class extends UndiciError { + static { + __name(this, "ResponseError"); + } + constructor(message, code, { headers, data }) { + super(message); + this.name = "ResponseError"; + this.message = message || "Response error"; + this.code = "UND_ERR_RESPONSE"; + this.statusCode = code; + this.data = data; + this.headers = headers; + } + }; var SecureProxyConnectionError = class extends UndiciError { static { __name(this, "SecureProxyConnectionError"); @@ -281,6 +295,7 @@ var require_errors = __commonJS({ BalancedPoolMissingUpstreamError, ResponseExceededMaxSizeError, RequestRetryError, + ResponseError, SecureProxyConnectionError }; } @@ -1266,7 +1281,7 @@ var require_util = __commonJS({ if (!host) { return null; } - assert.strictEqual(typeof host, "string"); + assert(typeof host === "string"); const servername = getHostname(host); if (net.isIP(servername)) { return ""; @@ -2166,6 +2181,242 @@ var require_request = __commonJS({ } }); +// lib/util/timers.js +var require_timers = __commonJS({ + "lib/util/timers.js"(exports2, module2) { + "use strict"; + var fastNow = 0; + var RESOLUTION_MS = 1e3; + var TICK_MS = (RESOLUTION_MS >> 1) - 1; + var fastNowTimeout; + var kFastTimer = Symbol("kFastTimer"); + var fastTimers = []; + var NOT_IN_LIST = -2; + var TO_BE_CLEARED = -1; + var PENDING = 0; + var ACTIVE = 1; + function onTick() { + fastNow += TICK_MS; + let idx = 0; + let len = fastTimers.length; + while (idx < len) { + const timer = fastTimers[idx]; + if (timer._state === PENDING) { + timer._idleStart = fastNow - TICK_MS; + timer._state = ACTIVE; + } else if (timer._state === ACTIVE && fastNow >= timer._idleStart + timer._idleTimeout) { + timer._state = TO_BE_CLEARED; + timer._idleStart = -1; + timer._onTimeout(timer._timerArg); + } + if (timer._state === TO_BE_CLEARED) { + timer._state = NOT_IN_LIST; + if (--len !== 0) { + fastTimers[idx] = fastTimers[len]; + } + } else { + ++idx; + } + } + fastTimers.length = len; + if (fastTimers.length !== 0) { + refreshTimeout(); + } + } + __name(onTick, "onTick"); + function refreshTimeout() { + if (fastNowTimeout) { + fastNowTimeout.refresh(); + } else { + clearTimeout(fastNowTimeout); + fastNowTimeout = setTimeout(onTick, TICK_MS); + if (fastNowTimeout.unref) { + fastNowTimeout.unref(); + } + } + } + __name(refreshTimeout, "refreshTimeout"); + var FastTimer = class { + static { + __name(this, "FastTimer"); + } + [kFastTimer] = true; + /** + * The state of the timer, which can be one of the following: + * - NOT_IN_LIST (-2) + * - TO_BE_CLEARED (-1) + * - PENDING (0) + * - ACTIVE (1) + * + * @type {-2|-1|0|1} + * @private + */ + _state = NOT_IN_LIST; + /** + * The number of milliseconds to wait before calling the callback. + * + * @type {number} + * @private + */ + _idleTimeout = -1; + /** + * The time in milliseconds when the timer was started. This value is used to + * calculate when the timer should expire. + * + * @type {number} + * @default -1 + * @private + */ + _idleStart = -1; + /** + * The function to be executed when the timer expires. + * @type {Function} + * @private + */ + _onTimeout; + /** + * The argument to be passed to the callback when the timer expires. + * + * @type {*} + * @private + */ + _timerArg; + /** + * @constructor + * @param {Function} callback A function to be executed after the timer + * expires. + * @param {number} delay The time, in milliseconds that the timer should wait + * before the specified function or code is executed. + * @param {*} arg + */ + constructor(callback, delay, arg) { + this._onTimeout = callback; + this._idleTimeout = delay; + this._timerArg = arg; + this.refresh(); + } + /** + * Sets the timer's start time to the current time, and reschedules the timer + * to call its callback at the previously specified duration adjusted to the + * current time. + * Using this on a timer that has already called its callback will reactivate + * the timer. + * + * @returns {void} + */ + refresh() { + if (this._state === NOT_IN_LIST) { + fastTimers.push(this); + } + if (!fastNowTimeout || fastTimers.length === 1) { + refreshTimeout(); + } + this._state = PENDING; + } + /** + * The `clear` method cancels the timer, preventing it from executing. + * + * @returns {void} + * @private + */ + clear() { + this._state = TO_BE_CLEARED; + this._idleStart = -1; + } + }; + module2.exports = { + /** + * The setTimeout() method sets a timer which executes a function once the + * timer expires. + * @param {Function} callback A function to be executed after the timer + * expires. + * @param {number} delay The time, in milliseconds that the timer should + * wait before the specified function or code is executed. + * @param {*} [arg] An optional argument to be passed to the callback function + * when the timer expires. + * @returns {NodeJS.Timeout|FastTimer} + */ + setTimeout(callback, delay, arg) { + return delay <= RESOLUTION_MS ? setTimeout(callback, delay, arg) : new FastTimer(callback, delay, arg); + }, + /** + * The clearTimeout method cancels an instantiated Timer previously created + * by calling setTimeout. + * + * @param {NodeJS.Timeout|FastTimer} timeout + */ + clearTimeout(timeout) { + if (timeout[kFastTimer]) { + timeout.clear(); + } else { + clearTimeout(timeout); + } + }, + /** + * The setFastTimeout() method sets a fastTimer which executes a function once + * the timer expires. + * @param {Function} callback A function to be executed after the timer + * expires. + * @param {number} delay The time, in milliseconds that the timer should + * wait before the specified function or code is executed. + * @param {*} [arg] An optional argument to be passed to the callback function + * when the timer expires. + * @returns {FastTimer} + */ + setFastTimeout(callback, delay, arg) { + return new FastTimer(callback, delay, arg); + }, + /** + * The clearTimeout method cancels an instantiated FastTimer previously + * created by calling setFastTimeout. + * + * @param {FastTimer} timeout + */ + clearFastTimeout(timeout) { + timeout.clear(); + }, + /** + * The now method returns the value of the internal fast timer clock. + * + * @returns {number} + */ + now() { + return fastNow; + }, + /** + * Trigger the onTick function to process the fastTimers array. + * Exported for testing purposes only. + * Marking as deprecated to discourage any use outside of testing. + * @deprecated + * @param {number} [delay=0] The delay in milliseconds to add to the now value. + */ + tick(delay = 0) { + fastNow += delay - RESOLUTION_MS + 1; + onTick(); + onTick(); + }, + /** + * Reset FastTimers. + * Exported for testing purposes only. + * Marking as deprecated to discourage any use outside of testing. + * @deprecated + */ + reset() { + fastNow = 0; + fastTimers.length = 0; + clearTimeout(fastNowTimeout); + fastNowTimeout = null; + }, + /** + * Exporting for testing purposes only. + * Marking as deprecated to discourage any use outside of testing. + * @deprecated + */ + kFastTimer + }; + } +}); + // lib/core/connect.js var require_connect = __commonJS({ "lib/core/connect.js"(exports2, module2) { @@ -2174,6 +2425,10 @@ var require_connect = __commonJS({ var assert = require("node:assert"); var util = require_util(); var { InvalidArgumentError, ConnectTimeoutError } = require_errors(); + var timers = require_timers(); + function noop() { + } + __name(noop, "noop"); var tls; var SessionCache; if (global.FinalizationRegistry && !(process.env.NODE_V8_COVERAGE || process.env.UNDICI_NO_FG)) { @@ -2246,8 +2501,9 @@ var require_connect = __commonJS({ } servername = servername || options.servername || util.getServerName(host) || null; const sessionKey = servername || hostname; - const session = customSession || sessionCache.get(sessionKey) || null; assert(sessionKey); + const session = customSession || sessionCache.get(sessionKey) || null; + port = port || 443; socket = tls.connect({ highWaterMark: 16384, // TLS in node can't have bigger HWM anyway... @@ -2259,7 +2515,7 @@ var require_connect = __commonJS({ ALPNProtocols: allowH2 ? ["http/1.1", "h2"] : ["http/1.1"], socket: httpSocket, // upgrade socket connection - port: port || 443, + port, host: hostname }); socket.on("session", function(session2) { @@ -2267,12 +2523,13 @@ var require_connect = __commonJS({ }); } else { assert(!httpSocket, "httpSocket can only be sent on TLS update"); + port = port || 80; socket = net.connect({ highWaterMark: 64 * 1024, // Same as nodejs fs streams. ...options, localAddress, - port: port || 80, + port, host: hostname }); } @@ -2280,16 +2537,16 @@ var require_connect = __commonJS({ const keepAliveInitialDelay = options.keepAliveInitialDelay === void 0 ? 6e4 : options.keepAliveInitialDelay; socket.setKeepAlive(true, keepAliveInitialDelay); } - const cancelTimeout = setupTimeout(() => onConnectTimeout(socket), timeout); + const clearConnectTimeout = setupConnectTimeout(new WeakRef(socket), { timeout, hostname, port }); socket.setNoDelay(true).once(protocol === "https:" ? "secureConnect" : "connect", function() { - cancelTimeout(); + queueMicrotask(clearConnectTimeout); if (callback) { const cb = callback; callback = null; cb(null, this); } }).on("error", function(err) { - cancelTimeout(); + queueMicrotask(clearConnectTimeout); if (callback) { const cb = callback; callback = null; @@ -2300,34 +2557,45 @@ var require_connect = __commonJS({ }, "connect"); } __name(buildConnector, "buildConnector"); - function setupTimeout(onConnectTimeout2, timeout) { - if (!timeout) { - return () => { - }; + var setupConnectTimeout = process.platform === "win32" ? (socketWeakRef, opts) => { + if (!opts.timeout) { + return noop; } let s1 = null; let s2 = null; - const timeoutId = setTimeout(() => { + const fastTimer = timers.setFastTimeout(() => { s1 = setImmediate(() => { - if (process.platform === "win32") { - s2 = setImmediate(() => onConnectTimeout2()); - } else { - onConnectTimeout2(); - } + s2 = setImmediate(() => onConnectTimeout(socketWeakRef.deref(), opts)); }); - }, timeout); + }, opts.timeout); return () => { - clearTimeout(timeoutId); + timers.clearFastTimeout(fastTimer); clearImmediate(s1); clearImmediate(s2); }; - } - __name(setupTimeout, "setupTimeout"); - function onConnectTimeout(socket) { + } : (socketWeakRef, opts) => { + if (!opts.timeout) { + return noop; + } + let s1 = null; + const fastTimer = timers.setFastTimeout(() => { + s1 = setImmediate(() => { + onConnectTimeout(socketWeakRef.deref(), opts); + }); + }, opts.timeout); + return () => { + timers.clearFastTimeout(fastTimer); + clearImmediate(s1); + }; + }; + function onConnectTimeout(socket, opts) { let message = "Connect Timeout Error"; if (Array.isArray(socket.autoSelectFamilyAttemptedAddresses)) { - message += ` (attempted addresses: ${socket.autoSelectFamilyAttemptedAddresses.join(", ")})`; + message += ` (attempted addresses: ${socket.autoSelectFamilyAttemptedAddresses.join(", ")},`; + } else { + message += ` (attempted address: ${opts.hostname}:${opts.port},`; } + message += ` timeout: ${opts.timeout}ms)`; util.destroy(socket, new ConnectTimeoutError(message)); } __name(onConnectTimeout, "onConnectTimeout"); @@ -2335,94 +2603,6 @@ var require_connect = __commonJS({ } }); -// lib/util/timers.js -var require_timers = __commonJS({ - "lib/util/timers.js"(exports2, module2) { - "use strict"; - var TICK_MS = 499; - var fastNow = Date.now(); - var fastNowTimeout; - var fastTimers = []; - function onTimeout() { - fastNow = Date.now(); - let len = fastTimers.length; - let idx = 0; - while (idx < len) { - const timer = fastTimers[idx]; - if (timer.state === 0) { - timer.state = fastNow + timer.delay - TICK_MS; - } else if (timer.state > 0 && fastNow >= timer.state) { - timer.state = -1; - timer.callback(timer.opaque); - } - if (timer.state === -1) { - timer.state = -2; - if (idx !== len - 1) { - fastTimers[idx] = fastTimers.pop(); - } else { - fastTimers.pop(); - } - len -= 1; - } else { - idx += 1; - } - } - if (fastTimers.length > 0) { - refreshTimeout(); - } - } - __name(onTimeout, "onTimeout"); - function refreshTimeout() { - if (fastNowTimeout?.refresh) { - fastNowTimeout.refresh(); - } else { - clearTimeout(fastNowTimeout); - fastNowTimeout = setTimeout(onTimeout, TICK_MS); - if (fastNowTimeout.unref) { - fastNowTimeout.unref(); - } - } - } - __name(refreshTimeout, "refreshTimeout"); - var Timeout = class { - static { - __name(this, "Timeout"); - } - constructor(callback, delay, opaque) { - this.callback = callback; - this.delay = delay; - this.opaque = opaque; - this.state = -2; - this.refresh(); - } - refresh() { - if (this.state === -2) { - fastTimers.push(this); - if (!fastNowTimeout || fastTimers.length === 1) { - refreshTimeout(); - } - } - this.state = 0; - } - clear() { - this.state = -1; - } - }; - module2.exports = { - setTimeout(callback, delay, opaque) { - return delay <= 1e3 ? setTimeout(callback, delay, opaque) : new Timeout(callback, delay, opaque); - }, - clearTimeout(timeout) { - if (timeout instanceof Timeout) { - timeout.clear(); - } else { - clearTimeout(timeout); - } - } - }; - } -}); - // lib/llhttp/utils.js var require_utils = __commonJS({ "lib/llhttp/utils.js"(exports2) { @@ -4523,13 +4703,19 @@ var require_util2 = __commonJS({ static { __name(this, "InflateStream"); } + #zlibOptions; + /** @param {zlib.ZlibOptions} [zlibOptions] */ + constructor(zlibOptions) { + super(); + this.#zlibOptions = zlibOptions; + } _transform(chunk, encoding, callback) { if (!this._inflateStream) { if (chunk.length === 0) { callback(); return; } - this._inflateStream = (chunk[0] & 15) === 8 ? zlib.createInflate() : zlib.createInflateRaw(); + this._inflateStream = (chunk[0] & 15) === 8 ? zlib.createInflate(this.#zlibOptions) : zlib.createInflateRaw(this.#zlibOptions); this._inflateStream.on("data", this.push.bind(this)); this._inflateStream.on("end", () => this.push(null)); this._inflateStream.on("error", (err) => this.destroy(err)); @@ -4544,8 +4730,8 @@ var require_util2 = __commonJS({ callback(); } }; - function createInflate() { - return new InflateStream(); + function createInflate(zlibOptions) { + return new InflateStream(zlibOptions); } __name(createInflate, "createInflate"); function extractMimeType(headers) { @@ -4984,9 +5170,16 @@ var require_formdata_parser = __commonJS({ const boundary = Buffer.from(`--${boundaryString}`, "utf8"); const entryList = []; const position = { position: 0 }; - if (input[0] === 13 && input[1] === 10) { + while (input[position.position] === 13 && input[position.position + 1] === 10) { position.position += 2; } + let trailing = input.length; + while (input[trailing - 1] === 10 && input[trailing - 2] === 13) { + trailing -= 2; + } + if (trailing !== input.length) { + input = input.subarray(0, trailing); + } while (true) { if (input.subarray(position.position, position.position + boundary.length).equals(boundary)) { position.position += boundary.length; @@ -5584,35 +5777,35 @@ var require_client_h1 = __commonJS({ return 0; }, wasm_on_status: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p); + assert(currentParser.ptr === p); const start = at - currentBufferPtr + currentBufferRef.byteOffset; return currentParser.onStatus(new FastBuffer(currentBufferRef.buffer, start, len)) || 0; }, wasm_on_message_begin: (p) => { - assert.strictEqual(currentParser.ptr, p); + assert(currentParser.ptr === p); return currentParser.onMessageBegin() || 0; }, wasm_on_header_field: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p); + assert(currentParser.ptr === p); const start = at - currentBufferPtr + currentBufferRef.byteOffset; return currentParser.onHeaderField(new FastBuffer(currentBufferRef.buffer, start, len)) || 0; }, wasm_on_header_value: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p); + assert(currentParser.ptr === p); const start = at - currentBufferPtr + currentBufferRef.byteOffset; return currentParser.onHeaderValue(new FastBuffer(currentBufferRef.buffer, start, len)) || 0; }, wasm_on_headers_complete: (p, statusCode, upgrade, shouldKeepAlive) => { - assert.strictEqual(currentParser.ptr, p); + assert(currentParser.ptr === p); return currentParser.onHeadersComplete(statusCode, Boolean(upgrade), Boolean(shouldKeepAlive)) || 0; }, wasm_on_body: (p, at, len) => { - assert.strictEqual(currentParser.ptr, p); + assert(currentParser.ptr === p); const start = at - currentBufferPtr + currentBufferRef.byteOffset; return currentParser.onBody(new FastBuffer(currentBufferRef.buffer, start, len)) || 0; }, wasm_on_message_complete: (p) => { - assert.strictEqual(currentParser.ptr, p); + assert(currentParser.ptr === p); return currentParser.onMessageComplete() || 0; } /* eslint-enable camelcase */ @@ -5627,9 +5820,11 @@ var require_client_h1 = __commonJS({ var currentBufferRef = null; var currentBufferSize = 0; var currentBufferPtr = null; - var TIMEOUT_HEADERS = 1; - var TIMEOUT_BODY = 2; - var TIMEOUT_IDLE = 3; + var USE_NATIVE_TIMER = 0; + var USE_FAST_TIMER = 1; + var TIMEOUT_HEADERS = 2 | USE_FAST_TIMER; + var TIMEOUT_BODY = 4 | USE_FAST_TIMER; + var TIMEOUT_KEEP_ALIVE = 8 | USE_NATIVE_TIMER; var Parser = class { static { __name(this, "Parser"); @@ -5658,24 +5853,27 @@ var require_client_h1 = __commonJS({ this.connection = ""; this.maxResponseSize = client[kMaxResponseSize]; } - setTimeout(value, type) { - this.timeoutType = type; - if (value !== this.timeoutValue) { - timers.clearTimeout(this.timeout); - if (value) { - this.timeout = timers.setTimeout(onParserTimeout, value, this); - if (this.timeout.unref) { + setTimeout(delay, type) { + if (delay !== this.timeoutValue || type & USE_FAST_TIMER ^ this.timeoutType & USE_FAST_TIMER) { + if (this.timeout) { + timers.clearTimeout(this.timeout); + this.timeout = null; + } + if (delay) { + if (type & USE_FAST_TIMER) { + this.timeout = timers.setFastTimeout(onParserTimeout, delay, new WeakRef(this)); + } else { + this.timeout = setTimeout(onParserTimeout, delay, new WeakRef(this)); this.timeout.unref(); } - } else { - this.timeout = null; } - this.timeoutValue = value; + this.timeoutValue = delay; } else if (this.timeout) { if (this.timeout.refresh) { this.timeout.refresh(); } } + this.timeoutType = type; } resume() { if (this.socket.destroyed || !this.paused) { @@ -5752,7 +5950,7 @@ var require_client_h1 = __commonJS({ assert(currentParser == null); this.llhttp.llhttp_free(this.ptr); this.ptr = null; - timers.clearTimeout(this.timeout); + this.timeout && timers.clearTimeout(this.timeout); this.timeout = null; this.timeoutValue = null; this.timeoutType = null; @@ -5811,16 +6009,16 @@ var require_client_h1 = __commonJS({ onUpgrade(head) { const { upgrade, client, socket, headers, statusCode } = this; assert(upgrade); - const request = client[kQueue][client[kRunningIdx]]; - assert(request); + assert(client[kSocket] === socket); assert(!socket.destroyed); - assert(socket === client[kSocket]); assert(!this.paused); + assert((headers.length & 1) === 0); + const request = client[kQueue][client[kRunningIdx]]; + assert(request); assert(request.upgrade || request.method === "CONNECT"); this.statusCode = null; this.statusText = ""; this.shouldKeepAlive = null; - assert(this.headers.length % 2 === 0); this.headers = []; this.headersSize = 0; socket.unshift(head); @@ -5859,7 +6057,7 @@ var require_client_h1 = __commonJS({ util.destroy(socket, new SocketError("bad upgrade", util.getSocketInfo(socket))); return -1; } - assert.strictEqual(this.timeoutType, TIMEOUT_HEADERS); + assert(this.timeoutType === TIMEOUT_HEADERS); this.statusCode = statusCode; this.shouldKeepAlive = shouldKeepAlive || // Override llhttp value which does not allow keepAlive for HEAD. request.method === "HEAD" && !socket[kReset] && this.connection.toLowerCase() === "keep-alive"; @@ -5881,7 +6079,7 @@ var require_client_h1 = __commonJS({ this.upgrade = true; return 2; } - assert(this.headers.length % 2 === 0); + assert((this.headers.length & 1) === 0); this.headers = []; this.headersSize = 0; if (this.shouldKeepAlive && client[kPipelining]) { @@ -5925,7 +6123,7 @@ var require_client_h1 = __commonJS({ } const request = client[kQueue][client[kRunningIdx]]; assert(request); - assert.strictEqual(this.timeoutType, TIMEOUT_BODY); + assert(this.timeoutType === TIMEOUT_BODY); if (this.timeout) { if (this.timeout.refresh) { this.timeout.refresh(); @@ -5949,16 +6147,16 @@ var require_client_h1 = __commonJS({ if (upgrade) { return; } + assert(statusCode >= 100); + assert((this.headers.length & 1) === 0); const request = client[kQueue][client[kRunningIdx]]; assert(request); - assert(statusCode >= 100); this.statusCode = null; this.statusText = ""; this.bytesRead = 0; this.contentLength = ""; this.keepAlive = ""; this.connection = ""; - assert(this.headers.length % 2 === 0); this.headers = []; this.headersSize = 0; if (statusCode < 200) { @@ -5971,7 +6169,7 @@ var require_client_h1 = __commonJS({ request.onComplete(headers); client[kQueue][client[kRunningIdx]++] = null; if (socket[kWriting]) { - assert.strictEqual(client[kRunning], 0); + assert(client[kRunning] === 0); util.destroy(socket, new InformationalError("reset")); return constants.ERROR.PAUSED; } else if (!shouldKeepAlive) { @@ -5988,17 +6186,17 @@ var require_client_h1 = __commonJS({ } }; function onParserTimeout(parser) { - const { socket, timeoutType, client } = parser; + const { socket, timeoutType, client, paused } = parser.deref(); if (timeoutType === TIMEOUT_HEADERS) { if (!socket[kWriting] || socket.writableNeedDrain || client[kRunning] > 1) { - assert(!parser.paused, "cannot be paused while waiting for headers"); + assert(!paused, "cannot be paused while waiting for headers"); util.destroy(socket, new HeadersTimeoutError()); } } else if (timeoutType === TIMEOUT_BODY) { - if (!parser.paused) { + if (!paused) { util.destroy(socket, new BodyTimeoutError()); } - } else if (timeoutType === TIMEOUT_IDLE) { + } else if (timeoutType === TIMEOUT_KEEP_ALIVE) { assert(client[kRunning] === 0 && client[kKeepAliveTimeoutValue]); util.destroy(socket, new InformationalError("socket idle timeout")); } @@ -6016,8 +6214,8 @@ var require_client_h1 = __commonJS({ socket[kBlocking] = false; socket[kParser] = new Parser(client, socket, llhttpInstance); addListener(socket, "error", function(err) { - const parser = this[kParser]; assert(err.code !== "ERR_TLS_CERT_ALTNAME_INVALID"); + const parser = this[kParser]; if (err.code === "ECONNRESET" && parser.statusCode && !parser.shouldKeepAlive) { parser.onMessageComplete(); return; @@ -6125,8 +6323,8 @@ var require_client_h1 = __commonJS({ socket[kNoRef] = false; } if (client[kSize] === 0) { - if (socket[kParser].timeoutType !== TIMEOUT_IDLE) { - socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_IDLE); + if (socket[kParser].timeoutType !== TIMEOUT_KEEP_ALIVE) { + socket[kParser].setTimeout(client[kKeepAliveTimeoutValue], TIMEOUT_KEEP_ALIVE); } } else if (client[kRunning] > 0 && socket[kParser].statusCode < 200) { if (socket[kParser].timeoutType !== TIMEOUT_HEADERS) { @@ -10889,22 +11087,31 @@ var require_fetch = __commonJS({ finishFlush: zlib.constants.Z_SYNC_FLUSH })); } else if (coding === "deflate") { - decoders.push(createInflate()); + decoders.push(createInflate({ + flush: zlib.constants.Z_SYNC_FLUSH, + finishFlush: zlib.constants.Z_SYNC_FLUSH + })); } else if (coding === "br") { - decoders.push(zlib.createBrotliDecompress()); + decoders.push(zlib.createBrotliDecompress({ + flush: zlib.constants.BROTLI_OPERATION_FLUSH, + finishFlush: zlib.constants.BROTLI_OPERATION_FLUSH + })); } else { decoders.length = 0; break; } } } + const onError = this.onError.bind(this); resolve({ status, statusText, headersList, - body: decoders.length ? pipeline(this.body, ...decoders, () => { - }) : this.body.on("error", () => { - }) + body: decoders.length ? pipeline(this.body, ...decoders, (err) => { + if (err) { + this.onError(err); + } + }).on("error", onError) : this.body.on("error", onError) }); return true; }, diff --git a/deps/v8/src/builtins/set-difference.tq b/deps/v8/src/builtins/set-difference.tq index 32529a3fb23a0b..e0df3b8b69ed15 100644 --- a/deps/v8/src/builtins/set-difference.tq +++ b/deps/v8/src/builtins/set-difference.tq @@ -85,7 +85,8 @@ transitioning javascript builtin SetPrototypeDifference( } } label SlowPath { // 6. If thisSize ≤ otherRec.[[Size]], then - if (thisSize <= Convert(otherRec.size)) { + if (otherRec.size == V8_INFINITY || + thisSize <= Convert(otherRec.size)) { // a. Let index be 0. let thisIter = collections::NewOrderedHashSetIterator(table.GetTable()); diff --git a/deps/v8/src/builtins/set-intersection.tq b/deps/v8/src/builtins/set-intersection.tq index a72026a6502873..017d72d3e6280d 100644 --- a/deps/v8/src/builtins/set-intersection.tq +++ b/deps/v8/src/builtins/set-intersection.tq @@ -81,7 +81,8 @@ transitioning javascript builtin SetPrototypeIntersection( } } label SlowPath { // 6. If thisSize ≤ otherRec.[[Size]], then - if (thisSize <= Convert(otherRec.size)) { + if (otherRec.size == V8_INFINITY || + thisSize <= Convert(otherRec.size)) { // a. Let index be 0. let thisIter = collections::NewOrderedHashSetIterator(table.GetTable()); diff --git a/deps/v8/src/builtins/set-is-disjoint-from.tq b/deps/v8/src/builtins/set-is-disjoint-from.tq index a43a5c85887f2b..62936ea0b741dd 100644 --- a/deps/v8/src/builtins/set-is-disjoint-from.tq +++ b/deps/v8/src/builtins/set-is-disjoint-from.tq @@ -73,7 +73,8 @@ transitioning javascript builtin SetPrototypeIsDisjointFrom( } } label SlowPath { // 5. If thisSize ≤ otherRec.[[Size]], then - if (thisSize <= Convert(otherRec.size)) { + if (otherRec.size == V8_INFINITY || + thisSize <= Convert(otherRec.size)) { // a. Let index be 0. let thisIter = collections::NewOrderedHashSetIterator(table.GetTable()); diff --git a/deps/v8/src/builtins/set-is-subset-of.tq b/deps/v8/src/builtins/set-is-subset-of.tq index 890e4bf04d8941..b7096ca823f679 100644 --- a/deps/v8/src/builtins/set-is-subset-of.tq +++ b/deps/v8/src/builtins/set-is-subset-of.tq @@ -25,7 +25,8 @@ transitioning javascript builtin SetPrototypeIsSubsetOf( const thisSize = table.LoadSize(); // 5. If thisSize > otherRec.[[Size]], return false. - if (thisSize > Convert(otherRec.size)) { + if (!(otherRec.size == V8_INFINITY) && + thisSize > Convert(otherRec.size)) { return False; } diff --git a/deps/v8/src/builtins/set-is-superset-of.tq b/deps/v8/src/builtins/set-is-superset-of.tq index c7b9173b06c434..a95aea1496cb02 100644 --- a/deps/v8/src/builtins/set-is-superset-of.tq +++ b/deps/v8/src/builtins/set-is-superset-of.tq @@ -26,7 +26,8 @@ transitioning javascript builtin SetPrototypeIsSupersetOf( const thisSize = table.LoadSize(); // 5. If thisSize < otherRec.[[Size]], return false. - if (thisSize < Convert(otherRec.size)) { + if (otherRec.size == V8_INFINITY || + thisSize < Convert(otherRec.size)) { return False; } diff --git a/deps/v8/src/flags/flag-definitions.h b/deps/v8/src/flags/flag-definitions.h index 0d50ac1522ef87..340d770020a6d0 100644 --- a/deps/v8/src/flags/flag-definitions.h +++ b/deps/v8/src/flags/flag-definitions.h @@ -299,8 +299,7 @@ DEFINE_BOOL(js_shipping, true, "enable all shipped JavaScript features") V(harmony_set_methods, "harmony Set Methods") \ V(harmony_import_attributes, "harmony import attributes") -#define JAVASCRIPT_SHIPPING_FEATURES_BASE(V) \ - V(js_promise_withresolvers, "Promise.withResolvers") +#define JAVASCRIPT_SHIPPING_FEATURES_BASE(V) #ifdef V8_INTL_SUPPORT #define HARMONY_SHIPPING(V) HARMONY_SHIPPING_BASE(V) diff --git a/deps/v8/src/flags/flags-impl.h b/deps/v8/src/flags/flags-impl.h index d2d440c0263876..111b9b5b9c145b 100644 --- a/deps/v8/src/flags/flags-impl.h +++ b/deps/v8/src/flags/flags-impl.h @@ -5,6 +5,8 @@ #ifndef V8_FLAGS_FLAGS_IMPL_H_ #define V8_FLAGS_FLAGS_IMPL_H_ +#include + #include "src/base/macros.h" #include "src/base/optional.h" #include "src/base/vector.h" @@ -91,9 +93,12 @@ struct Flag { #ifdef DEBUG bool ImpliedBy(const void* ptr) const { const Flag* current = this->implied_by_ptr_; + std::unordered_set visited_flags; while (current != nullptr) { + visited_flags.insert(current); if (current->PointsTo(ptr)) return true; current = current->implied_by_ptr_; + if (visited_flags.contains(current)) break; } return false; } diff --git a/deps/v8/src/init/bootstrapper.cc b/deps/v8/src/init/bootstrapper.cc index 0d5b3ec7e9a455..3b151ea3e86a16 100644 --- a/deps/v8/src/init/bootstrapper.cc +++ b/deps/v8/src/init/bootstrapper.cc @@ -3217,6 +3217,15 @@ void Genesis::InitializeGlobal(Handle global_object, InstallFunctionWithBuiltinId(isolate_, promise_fun, "reject", Builtin::kPromiseReject, 1, true); + std::array, 3> fields{factory->promise_string(), + factory->resolve_string(), + factory->reject_string()}; + DirectHandle result_map = + CreateLiteralObjectMapFromCache(isolate_, fields); + native_context()->set_promise_withresolvers_result_map(*result_map); + InstallFunctionWithBuiltinId(isolate_, promise_fun, "withResolvers", + Builtin::kPromiseWithResolvers, 0, true); + SetConstructorInstanceType(isolate_, promise_fun, JS_PROMISE_CONSTRUCTOR_TYPE); @@ -5445,23 +5454,6 @@ void Genesis::InitializeGlobal_harmony_iterator_helpers() { #undef ITERATOR_HELPERS } -void Genesis::InitializeGlobal_js_promise_withresolvers() { - if (!v8_flags.js_promise_withresolvers) return; - - Factory* factory = isolate()->factory(); - - std::array, 3> fields{factory->promise_string(), - factory->resolve_string(), - factory->reject_string()}; - Handle result_map = CreateLiteralObjectMapFromCache(isolate(), fields); - native_context()->set_promise_withresolvers_result_map(*result_map); - - Handle promise_fun = - handle(native_context()->promise_function(), isolate()); - InstallFunctionWithBuiltinId(isolate(), promise_fun, "withResolvers", - Builtin::kPromiseWithResolvers, 0, true); -} - void Genesis::InitializeGlobal_harmony_set_methods() { if (!v8_flags.harmony_set_methods) return; diff --git a/deps/v8/test/mjsunit/harmony/promise-withresolvers.js b/deps/v8/test/mjsunit/harmony/promise-withresolvers.js index 9cb244a73fb653..9466a6ab5ba78c 100644 --- a/deps/v8/test/mjsunit/harmony/promise-withresolvers.js +++ b/deps/v8/test/mjsunit/harmony/promise-withresolvers.js @@ -2,7 +2,7 @@ // Use of this source code is governed by a BSD-style license that can be // found in the LICENSE file. -// Flags: --js-promise-withresolvers --allow-natives-syntax +// Flags: --allow-natives-syntax const desc = Object.getOwnPropertyDescriptor(Promise, 'withResolvers'); assertTrue(desc.configurable); diff --git a/deps/v8/test/mjsunit/harmony/set-difference.js b/deps/v8/test/mjsunit/harmony/set-difference.js index 9afa182cf54216..59c1d541928e44 100644 --- a/deps/v8/test/mjsunit/harmony/set-difference.js +++ b/deps/v8/test/mjsunit/harmony/set-difference.js @@ -305,4 +305,43 @@ assertThrows(() => { new Set().difference(setLike); }, RangeError, "'-1' is an invalid size"); -})() +})(); + +(function TestDifferenceSetLikeWithInfiniteSize() { + let setLike = { + size: Infinity, + has(v) { + return true; + }, + keys() { + throw new Error('Unexpected call to |keys| method'); + }, + }; + + const firstSet = new Set(); + firstSet.add(42); + firstSet.add(43); + + const resultSet = new Set(); + + const resultArray = Array.from(resultSet); + const differenceArray = Array.from(firstSet.difference(setLike)); + + assertEquals(resultArray, differenceArray); +})(); + +(function TestDifferenceSetLikeWithNegativeInfiniteSize() { + let setLike = { + size: -Infinity, + has(v) { + return true; + }, + keys() { + throw new Error('Unexpected call to |keys| method'); + }, + }; + + assertThrows(() => { + new Set().difference(setLike); + }, RangeError, '\'-Infinity\' is an invalid size'); +})(); diff --git a/deps/v8/test/mjsunit/harmony/set-intersection.js b/deps/v8/test/mjsunit/harmony/set-intersection.js index 77d0a8cdd7255f..8ac5e137e1778c 100644 --- a/deps/v8/test/mjsunit/harmony/set-intersection.js +++ b/deps/v8/test/mjsunit/harmony/set-intersection.js @@ -281,3 +281,40 @@ assertEquals([43], Array.from(firstSet.intersection(evil))); })(); + +(function TestIntersectionSetLikeWithInfiniteSize() { + let setLike = { + size: Infinity, + has(v) { + return true; + }, + keys() { + throw new Error('Unexpected call to |keys| method'); + }, + }; + + const firstSet = new Set(); + firstSet.add(42); + firstSet.add(43); + + const resultArray = [42, 43]; + const intersectionArray = Array.from(firstSet.intersection(setLike)); + + assertEquals(resultArray, intersectionArray); +})(); + +(function TestIntersectionSetLikeWithNegativeInfiniteSize() { + let setLike = { + size: -Infinity, + has(v) { + return true; + }, + keys() { + throw new Error('Unexpected call to |keys| method'); + }, + }; + + assertThrows(() => { + new Set().intersection(setLike); + }, RangeError, '\'-Infinity\' is an invalid size'); +})(); diff --git a/deps/v8/test/mjsunit/harmony/set-is-disjoint-from.js b/deps/v8/test/mjsunit/harmony/set-is-disjoint-from.js index f89365a6a6dfeb..33b53a2620a416 100644 --- a/deps/v8/test/mjsunit/harmony/set-is-disjoint-from.js +++ b/deps/v8/test/mjsunit/harmony/set-is-disjoint-from.js @@ -216,3 +216,37 @@ assertFalse(firstSet.isDisjointFrom(evil)); })(); + +(function TestIsDisjointFromSetLikeWithInfiniteSize() { + let setLike = { + size: Infinity, + has(v) { + return true; + }, + keys() { + throw new Error('Unexpected call to |keys| method'); + }, + }; + + const firstSet = new Set(); + firstSet.add(42); + firstSet.add(43); + + assertEquals(firstSet.isDisjointFrom(setLike), false); +})(); + +(function TestIsDisjointFromSetLikeWithNegativeInfiniteSize() { + let setLike = { + size: -Infinity, + has(v) { + return true; + }, + keys() { + throw new Error('Unexpected call to |keys| method'); + }, + }; + + assertThrows(() => { + new Set().isDisjointFrom(setLike); + }, RangeError, '\'-Infinity\' is an invalid size'); +})(); diff --git a/deps/v8/test/mjsunit/harmony/set-is-subset-of.js b/deps/v8/test/mjsunit/harmony/set-is-subset-of.js index 5f1e41c7ef3fb6..01c48ccff6c8ed 100644 --- a/deps/v8/test/mjsunit/harmony/set-is-subset-of.js +++ b/deps/v8/test/mjsunit/harmony/set-is-subset-of.js @@ -266,3 +266,37 @@ assertEquals(firstSet.isSubsetOf(setLike), true); })(); + +(function TestIsSubsetOfSetLikeWithInfiniteSize() { + let setLike = { + size: Infinity, + has(v) { + return true; + }, + keys() { + throw new Error('Unexpected call to |keys| method'); + }, + }; + + const firstSet = new Set(); + firstSet.add(42); + firstSet.add(43); + + assertEquals(firstSet.isSubsetOf(setLike), true); +})(); + +(function TestIsSubsetOfSetLikeWithNegativeInfiniteSize() { + let setLike = { + size: -Infinity, + has(v) { + return true; + }, + keys() { + throw new Error('Unexpected call to |keys| method'); + }, + }; + + assertThrows(() => { + new Set().isSubsetOf(setLike); + }, RangeError, '\'-Infinity\' is an invalid size'); +})(); diff --git a/deps/v8/test/mjsunit/harmony/set-is-superset-of.js b/deps/v8/test/mjsunit/harmony/set-is-superset-of.js index 17f6418ffcaf6f..91ca8e535e9fd7 100644 --- a/deps/v8/test/mjsunit/harmony/set-is-superset-of.js +++ b/deps/v8/test/mjsunit/harmony/set-is-superset-of.js @@ -245,3 +245,37 @@ assertTrue(firstSet.isSupersetOf(evil)); })(); + +(function TestIsSupersetOfSetLikeWithInfiniteSize() { + let setLike = { + size: Infinity, + has(v) { + return true; + }, + keys() { + throw new Error('Unexpected call to |keys| method'); + }, + }; + + const firstSet = new Set(); + firstSet.add(42); + firstSet.add(43); + + assertEquals(firstSet.isSupersetOf(setLike), false); +})(); + +(function TestIsSupersetOfSetLikeWithNegativeInfiniteSize() { + let setLike = { + size: -Infinity, + has(v) { + return true; + }, + keys() { + throw new Error('Unexpected call to |keys| method'); + }, + }; + + assertThrows(() => { + new Set().isSupersetOf(setLike); + }, RangeError, '\'-Infinity\' is an invalid size'); +})(); diff --git a/deps/v8/test/test262/testcfg.py b/deps/v8/test/test262/testcfg.py index ec4fad84078075..10b6243926396f 100644 --- a/deps/v8/test/test262/testcfg.py +++ b/deps/v8/test/test262/testcfg.py @@ -58,7 +58,6 @@ 'json-parse-with-source': '--harmony-json-parse-with-source', 'iterator-helpers': '--harmony-iterator-helpers', 'set-methods': '--harmony-set-methods', - 'promise-with-resolvers': '--js-promise-withresolvers', 'Array.fromAsync': '--harmony-array-from-async', 'import-attributes': '--harmony-import-attributes', 'regexp-duplicate-named-groups': '--js-regexp-duplicate-named-groups', diff --git a/doc/README.md b/doc/README.md index 3d8ad6246839e6..a026746ff713a0 100644 --- a/doc/README.md +++ b/doc/README.md @@ -1,117 +1,234 @@ -# Documentation style guide - -This style guide helps us create organized and easy-to-read documentation. It -provides guidelines for organization, spelling, formatting, and more. - -These are guidelines rather than strict rules. Content is more important than -formatting. You do not need to learn the entire style guide before contributing -to documentation. Someone can always edit your material later to conform with -this guide. - -* Documentation is in markdown files with names formatted as - `lowercase-with-dashes.md`. - * Use an underscore in the filename only if the underscore is part of the - topic name (e.g., `child_process`). - * Some files, such as top-level markdown files, are exceptions. -* Documents should be word-wrapped at 80 characters. -* `.editorconfig` describes the preferred formatting. - * A [plugin][] is available for some editors to apply these rules. -* Check changes to documentation with `make test-doc -j` or `vcbuild test-doc`. -* [Use US spelling][]. -* [Use serial commas][]. +# Node.js documentation style guide + +This guide provides clear and concise instructions to help you create well-organized and readable documentation for +the Node.js community. It covers organization, spelling, formatting, and more to ensure consistency and +professionalism across all documents. + +## Table of contents + +1. [General Guidelines](#general-guidelines) +2. [Writing Style](#writing-style) +3. [Punctuation](#punctuation) +4. [Document Structure](#document-structure) +5. [API Documentation](#api-documentation) +6. [Code Blocks](#code-blocks) +7. [Formatting](#formatting) +8. [Product and Project Naming](#product-and-project-naming) + +*** + +## General guidelines + +### File naming + +* **Markdown Files:** Use `lowercase-with-dashes.md`. + * Use underscores only if they are part of the topic name (e.g., `child_process`). + * Some files, like top-level Markdown files, may be exceptions. + +### Text wrapping + +* Wrap documents at 120 characters per line to enhance readability and version control. + +### Editor configuration + +* Follow the formatting rules specified in `.editorconfig`. + * A [plugin][] is available for some editors to enforce these rules. + +### Testing documentation + +* Validate documentation changes using `make test-doc -j` or `vcbuild test-doc`. + +*** + +## Writing style + +### Spelling and grammar + +* **Spelling:** Use [US spelling][]. +* **Grammar:** Use clear, concise language. Avoid unnecessary jargon. + +### Commas + +* **Serial Commas:** Use [serial commas][] for clarity. + * Example: _apples, oranges, and bananas_ + +### Pronouns + * Avoid first-person pronouns (_I_, _we_). - * Exception: _we recommend foo_ is preferable to _foo is recommended_. -* Use gender-neutral pronouns and gender-neutral plural nouns. + * Exception: Use _we recommend foo_ instead of _foo is recommended_. + +### Gender-neutral language + +* Use gender-neutral pronouns and plural nouns. * OK: _they_, _their_, _them_, _folks_, _people_, _developers_ * NOT OK: _his_, _hers_, _him_, _her_, _guys_, _dudes_ -* When combining wrapping elements (parentheses and quotes), place terminal - punctuation: - * Inside the wrapping element if the wrapping element contains a complete - clause. - * Outside of the wrapping element if the wrapping element contains only a - fragment of a clause. -* Documents must start with a level-one heading. -* Prefer affixing links (`[a link][]`) to inlining links - (`[a link](http://example.com)`). -* When documenting APIs, update the YAML comment associated with the API as - appropriate. This is especially true when introducing or deprecating an API. -* When documenting APIs, every function should have a usage example or - link to an example that uses the function. -* For code blocks: - * Use [language][]-aware fences. (\`\`\`js) - - * For the [info string][], use one of the following. - - | Meaning | Info string | - | ------------- | ------------ | - | Bash | `bash` | - | C | `c` | - | C++ | `cpp` | - | CoffeeScript | `coffee` | - | Diff | `diff` | - | HTTP | `http` | - | JavaScript | `js` | - | JSON | `json` | - | Markdown | `markdown` | - | Plaintext | `text` | - | Powershell | `powershell` | - | R | `r` | - | Shell Session | `console` | - - If one of your language-aware fences needs an info string that is not - already on this list, you may use `text` until the grammar gets added to - [`remark-preset-lint-node`][]. - - * Code need not be complete. Treat code blocks as an illustration or aid to - your point, not as complete running programs. If a complete running program - is necessary, include it as an asset in `assets/code-examples` and link to - it. -* When using underscores, asterisks, and backticks, please use - backslash-escaping: `\_`, `\*`, and ``\` ``. -* Constructors should use PascalCase. -* Instances should use camelCase. -* Denote methods with parentheses: `socket.end()` instead of `socket.end`. -* Function arguments or object properties should use the following format: - * ``* `name` {type|type2} Optional description. **Default:** `value`.`` - - * For example: \* `byteOffset` {integer} Index of first byte to expose. **Default:** `0`. - - * The `type` should refer to a Node.js type or a [JavaScript type][]. -* Function returns should use the following format: - * \* Returns: {type|type2} Optional description. - * E.g. \* Returns: {AsyncHook} A reference to `asyncHook`. -* Use official styling for capitalization in products and projects. + +### Terminology + +* Use precise technical terms and avoid colloquialisms. +* Define any specialized terms or acronyms at first use. + +*** + +## Punctuation + +### Terminal punctuation + +* Place inside parentheses or quotes if the content is a complete clause. +* Place outside if the content is a fragment of a clause. + +### Quotation marks + +* Use double quotation marks for direct quotes. +* Use single quotation marks for quotes within quotes. + +### Colons and semicolons + +* Use colons to introduce lists or explanations. +* Use semicolons to link closely related independent clauses. + +*** + +## Document structure + +### Headings + +* Start documents with a level-one heading (`#`). +* Use subsequent headings (`##`, `###`, etc.) to organize content hierarchically. + +### Links + +* Prefer reference-style links (`[a link][]`) over inline links (`[a link](http://example.com)`). + +### Lists + +* Use bullet points for unordered lists and numbers for ordered lists. +* Keep list items parallel in structure. + +### Tables + +* Use tables to present structured information clearly. Ensure they are readable in plain text. + +*** + +## API documentation + +### YAML comments + +* Update the YAML comments associated with the API, especially when introducing or deprecating an API. + +### Usage examples + +* Provide a usage example or a link to an example for every function. + +### Parameter descriptions + +* Clearly describe parameters and return values, including types and defaults. + * Example: + ```markdown + * `byteOffset` {integer} Index of first byte to expose. **Default:** `0`. + ``` + +*** + +## Code blocks + +### Language-aware fences + +* Use language-aware fences (e.g., ` ```js `) for code blocks. + + * **Info String:** Use the appropriate info string from the following list: + + | Language | Info String | + | ---------------- | ------------ | + | Bash | `bash` | + | C | `c` | + | CommonJS | `cjs` | + | CoffeeScript | `coffee` | + | Terminal Session | `console` | + | C++ | `cpp` | + | Diff | `diff` | + | HTTP | `http` | + | JavaScript | `js` | + | JSON | `json` | + | Markdown | `markdown` | + | EcmaScript | `mjs` | + | Powershell | `powershell` | + | R | `r` | + | Plaintext | `text` | + | TypeScript | `typescript` | + + * Use `text` for languages not listed until their grammar is added to [`remark-preset-lint-node`][]. + +### Code comments + +* Use comments to explain complex logic within code examples. +* Follow the standard commenting style of the respective language. + +*** + +## Formatting + +### Escaping characters + +* Use backslash-escaping for underscores, asterisks, and backticks: `\_`, `\*`, `` \` ``. + +### Naming conventions + +* **Constructors:** Use PascalCase. +* **Instances:** Use camelCase. +* **Methods:** Indicate methods with parentheses: `socket.end()` instead of `socket.end`. + +### Function arguments and returns + +* **Arguments:** + ```markdown + * `name` {type|type2} Optional description. **Default:** `value`. + ``` + Example: + ```markdown + * `byteOffset` {integer} Index of first byte to expose. **Default:** `0`. + ``` +* **Returns:** + ```markdown + * Returns: {type|type2} Optional description. + ``` + Example: + ```markdown + * Returns: {AsyncHook} A reference to `asyncHook`. + ``` + +*** + +## Product and project naming + + + +### Official styling + +* Use official capitalization for products and projects. * OK: JavaScript, Google's V8 - * NOT OK: Javascript, Google's v8 -* Use _Node.js_ and not _Node_, _NodeJS_, or similar variants. - - * When referring to the executable, _`node`_ is acceptable. -* [Be direct][]. - +### Node.js references + +* Use _Node.js_ instead of _Node_, _NodeJS_, or similar variants. + * For the executable, _`node`_ is acceptable. -* When referring to a version of Node.js in prose, use _Node.js_ and the version - number. Do not prefix the version number with _v_ in prose. This is to avoid - confusion about whether _v8_ refers to Node.js 8.x or the V8 JavaScript - engine. - +### Version references + +* Use _Node.js_ and the version number in prose. Do not prefix the version number with _v_. * OK: _Node.js 14.x_, _Node.js 14.3.1_ * NOT OK: _Node.js v14_ -* [Use sentence-style capitalization for headings][]. -See also API documentation structure overview in [doctools README][]. + + +For topics not addressed here, please consult the [Microsoft Writing Style Guide][]. -For topics not covered here, refer to the [Microsoft Writing Style Guide][]. +*** -[Be direct]: https://docs.microsoft.com/en-us/style-guide/word-choice/use-simple-words-concise-sentences -[Javascript type]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Guide/Grammar_and_types#Data_structures_and_types -[Microsoft Writing Style Guide]: https://docs.microsoft.com/en-us/style-guide/welcome/ -[Use US spelling]: https://docs.microsoft.com/en-us/style-guide/word-choice/use-us-spelling-avoid-non-english-words -[Use sentence-style capitalization for headings]: https://docs.microsoft.com/en-us/style-guide/scannable-content/headings#formatting-headings -[Use serial commas]: https://docs.microsoft.com/en-us/style-guide/punctuation/commas +[Microsoft Writing Style Guide]: https://learn.microsoft.com/en-us/style-guide/welcome/ +[US spelling]: https://learn.microsoft.com/en-us/style-guide/word-choice/use-us-spelling-avoid-non-english-words [`remark-preset-lint-node`]: https://github.com/nodejs/remark-preset-lint-node -[doctools README]: ../tools/doc/README.md -[info string]: https://github.github.com/gfm/#info-string -[language]: https://github.com/highlightjs/highlight.js/blob/HEAD/SUPPORTED_LANGUAGES.md [plugin]: https://editorconfig.org/#download +[serial commas]: https://learn.microsoft.com/en-us/style-guide/punctuation/commas diff --git a/doc/api/buffer.md b/doc/api/buffer.md index fbbcc04083ef51..ef0ffc9e892afa 100644 --- a/doc/api/buffer.md +++ b/doc/api/buffer.md @@ -518,7 +518,6 @@ the `Blob` data. The `blob.bytes()` method returns the byte of the `Blob` object as a `Promise`. diff --git a/doc/api/cli.md b/doc/api/cli.md index 0cfed4a4a91a3d..62d3d37a061f78 100644 --- a/doc/api/cli.md +++ b/doc/api/cli.md @@ -220,7 +220,7 @@ The initializer module also needs to be allowed. Consider the following example: ```console $ node --experimental-permission t.js node:internal/modules/cjs/loader:162 - const result = internalModuleStat(filename); + const result = internalModuleStat(receiver, filename); ^ Error: Access to this API has been restricted @@ -795,6 +795,28 @@ when `Error.stack` is accessed. If you access `Error.stack` frequently in your application, take into account the performance implications of `--enable-source-maps`. +### `--entry-url` + + + +> Stability: 1 - Experimental + +When present, Node.js will interpret the entry point as a URL, rather than a +path. + +Follows [ECMAScript module][] resolution rules. + +Any query parameter or hash in the URL will be accessible via [`import.meta.url`][]. + +```bash +node --entry-url 'file:///path/to/file.js?queryparams=work#and-hashes-too' +node --entry-url --experimental-strip-types 'file.ts?query#hash' +node --entry-url 'data:text/javascript,console.log("Hello")' +``` + ### `--env-file=config` > Stability: 1.1 - Active development @@ -862,6 +884,15 @@ export USERNAME="nodejs" # will result in `nodejs` as the value. If you want to load environment variables from a file that may not exist, you can use the [`--env-file-if-exists`][] flag instead. +### `--env-file-if-exists=config` + + + +Behavior is the same as [`--env-file`][], but an error is not thrown if the file +does not exist. + ### `-e`, `--eval "script"` - -Behavior is the same as [`--env-file`][], but an error is not thrown if the file -does not exist. - ### `--pending-deprecation` -> Stability: 1.2 - Release candidate +> Stability: 2 - Stable This runs a specified command from a package.json's `"scripts"` object. If a missing `"command"` is provided, it will list the available scripts. @@ -2110,6 +2133,8 @@ the current directory, to the `PATH` in order to execute the binaries from different folders where multiple `node_modules` directories are present, if `ancestor-folder/node_modules/.bin` is a directory. +`--run` executes the command in the directory containing the related `package.json`. + For example, the following command will run the `test` script of the `package.json` in the current folder: @@ -3022,6 +3047,7 @@ one is included in the list below. * `--enable-fips` * `--enable-network-family-autoselection` * `--enable-source-maps` +* `--entry-url` * `--experimental-abortcontroller` * `--experimental-async-context-frame` * `--experimental-default-type` @@ -3587,6 +3613,7 @@ node --stack-trace-limit=12 -p -e "Error.stackTraceLimit" # prints 12 [V8 JavaScript code coverage]: https://v8project.blogspot.com/2017/12/javascript-code-coverage.html [Web Crypto API]: webcrypto.md [`"type"`]: packages.md#type +[`--allow-addons`]: #--allow-addons [`--allow-child-process`]: #--allow-child-process [`--allow-fs-read`]: #--allow-fs-read [`--allow-fs-write`]: #--allow-fs-write @@ -3621,6 +3648,7 @@ node --stack-trace-limit=12 -p -e "Error.stackTraceLimit" # prints 12 [`dns.lookup()`]: dns.md#dnslookuphostname-options-callback [`dns.setDefaultResultOrder()`]: dns.md#dnssetdefaultresultorderorder [`dnsPromises.lookup()`]: dns.md#dnspromiseslookuphostname-options +[`import.meta.url`]: esm.md#importmetaurl [`import` specifier]: esm.md#import-specifiers [`net.getDefaultAutoSelectFamilyAttemptTimeout()`]: net.md#netgetdefaultautoselectfamilyattempttimeout [`node:sqlite`]: sqlite.md diff --git a/doc/api/crypto.md b/doc/api/crypto.md index 6df129b36ab3d4..eafee87cd532de 100644 --- a/doc/api/crypto.md +++ b/doc/api/crypto.md @@ -2134,6 +2134,24 @@ added: v11.6.0 For secret keys, this property represents the size of the key in bytes. This property is `undefined` for asymmetric keys. +### `keyObject.toCryptoKey(algorithm, extractable, keyUsages)` + + + + + +* `algorithm`: {AlgorithmIdentifier|RsaHashedImportParams|EcKeyImportParams|HmacImportParams} + + + +* `extractable`: {boolean} +* `keyUsages`: {string\[]} See [Key usages][]. +* Returns: {CryptoKey} + +Converts a `KeyObject` instance to a `CryptoKey`. + ### `keyObject.type` + +* Type: {Date} + +The date/time from which this certificate is valid, encapsulated in a `Date` object. + ### `x509.validTo` + +* Type: {Date} + +The date/time until which this certificate is valid, encapsulated in a `Date` object. + ### `x509.verify(publicKey)` > Stability: 1 - Experimental diff --git a/doc/api/errors.md b/doc/api/errors.md index 663fd7d35ba561..11aa1b56e9cd4d 100644 --- a/doc/api/errors.md +++ b/doc/api/errors.md @@ -1301,18 +1301,20 @@ added: v16.7.0 When using [`fs.cp()`][], `src` or `dest` pointed to an invalid path. - + ### `ERR_HTTP_BODY_NOT_ALLOWED` An error is thrown when writing to an HTTP response which does not allow -contents. +contents. + + ### `ERR_HTTP_CONTENT_LENGTH_MISMATCH` Response body size doesn't match with the specified content-length header value. - + ### `ERR_FS_CP_FIFO_PIPE` @@ -2014,13 +2016,6 @@ An invalid [`package.json`][] file failed parsing. The `package.json` [`"exports"`][] field contains an invalid target mapping value for the attempted module resolution. - - -### `ERR_INVALID_PERFORMANCE_MARK` - -While using the Performance Timing API (`perf_hooks`), a performance mark is -invalid. - ### `ERR_INVALID_PROTOCOL` @@ -2105,6 +2100,18 @@ An element in the `iterable` provided to the [WHATWG][WHATWG URL API] represent a `[name, value]` tuple – that is, if an element is not iterable, or does not consist of exactly two elements. + + +### `ERR_INVALID_TYPESCRIPT_SYNTAX` + + + +The provided TypeScript syntax is not valid or unsupported. +This could happen when using TypeScript syntax that requires +transformation with [type-stripping][]. + ### `ERR_INVALID_URI` @@ -2289,21 +2296,6 @@ function. An error occurred while attempting to retrieve the JavaScript `undefined` value. - - -### `ERR_NAPI_TSFN_START_IDLE_LOOP` - -On the main thread, values are removed from the queue associated with the -thread-safe function in an idle loop. This error indicates that an error -has occurred when attempting to start the loop. - - - -### `ERR_NAPI_TSFN_STOP_IDLE_LOOP` - -Once no more items are left in the queue, the idle loop must be suspended. This -error indicates that the idle loop has failed to stop. - ### `ERR_NOT_BUILDING_SNAPSHOT` @@ -2351,6 +2343,17 @@ compiled with ICU support. A non-context-aware native addon was loaded in a process that disallows them. + + +### `ERR_OPERATION_FAILED` + + + +An operation failed. This is typically used to signal the general failure +of an asynchronous operation. + ### `ERR_OUT_OF_RANGE` @@ -2433,6 +2436,42 @@ Accessing `Object.prototype.__proto__` has been forbidden using [`Object.setPrototypeOf`][] should be used to get and set the prototype of an object. + + +### `ERR_QUIC_CONNECTION_FAILED` + + + +> Stability: 1 - Experimental + +Establishing a QUIC connection failed. + + + +### `ERR_QUIC_ENDPOINT_CLOSED` + + + +> Stability: 1 - Experimental + +A QUIC Endpoint closed with an error. + + + +### `ERR_QUIC_OPEN_STREAM_FAILED` + + + +> Stability: 1 - Experimental + +Opening a QUIC stream failed. + ### `ERR_REQUIRE_CYCLE_MODULE` @@ -2586,6 +2625,18 @@ disconnected socket. A call was made and the UDP subsystem was not running. + + +### `ERR_SOURCE_MAP_CORRUPT` + +The source map could not be parsed because it does not exist, or is corrupt. + + + +### `ERR_SOURCE_MAP_MISSING_SOURCE` + +A file imported from a source map was not found. + ### `ERR_SQLITE_ERROR` @@ -2990,6 +3041,16 @@ try { } ``` + + +### `ERR_UNSUPPORTED_NODE_MODULES_TYPE_STRIPPING` + + + +Type stripping is not supported for files descendent of a `node_modules` directory. + ### `ERR_USE_AFTER_CLOSE` @@ -3381,6 +3442,18 @@ removed: v15.0.0 An invalid or unknown file encoding was passed. + + +### `ERR_INVALID_PERFORMANCE_MARK` + + + +While using the Performance Timing API (`perf_hooks`), a performance mark is +invalid. + ### `ERR_INVALID_TRANSFER_OBJECT` @@ -3573,23 +3646,45 @@ removed: v10.0.0 Used by the `Node-API` when `Constructor.prototype` is not an object. - + -### `ERR_NO_LONGER_SUPPORTED` +### `ERR_NAPI_TSFN_START_IDLE_LOOP` -A Node.js API was called in an unsupported manner, such as -`Buffer.write(string, encoding, offset[, length])`. + - +On the main thread, values are removed from the queue associated with the +thread-safe function in an idle loop. This error indicates that an error +has occurred when attempting to start the loop. -### `ERR_OPERATION_FAILED` + + +### `ERR_NAPI_TSFN_STOP_IDLE_LOOP` -An operation failed. This is typically used to signal the general failure -of an asynchronous operation. +Once no more items are left in the queue, the idle loop must be suspended. This +error indicates that the idle loop has failed to stop. + + + +### `ERR_NO_LONGER_SUPPORTED` + +A Node.js API was called in an unsupported manner, such as +`Buffer.write(string, encoding, offset[, length])`. @@ -4005,16 +4100,6 @@ The public key in the certificate SubjectPublicKeyInfo could not be read. An error occurred trying to allocate memory. This should never happen. - - -#### `ERR_UNSUPPORTED_NODE_MODULES_TYPE_STRIPPING` - - - -Type stripping is not supported for files descendent of a `node_modules` directory. - [ES Module]: esm.md [ICU]: intl.md#internationalization-support [JSON Web Key Elliptic Curve Registry]: https://www.iana.org/assignments/jose/jose.xhtml#web-key-elliptic-curve @@ -4109,4 +4194,5 @@ Type stripping is not supported for files descendent of a `node_modules` directo [stream-based]: stream.md [syscall]: https://man7.org/linux/man-pages/man2/syscalls.2.html [try-catch]: https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Statements/try...catch +[type-stripping]: typescript.md#type-stripping [vm]: vm.md diff --git a/doc/api/events.md b/doc/api/events.md index 1addeb123f4ed3..17688859a58d1f 100644 --- a/doc/api/events.md +++ b/doc/api/events.md @@ -1459,8 +1459,7 @@ async function foo(emitter, event, signal) { } foo(ee, 'foo', ac.signal); -ac.abort(); // Abort waiting for the event -ee.emit('foo'); // Prints: Waiting for the event was canceled! +ac.abort(); // Prints: Waiting for the event was canceled! ``` ```cjs @@ -1483,8 +1482,7 @@ async function foo(emitter, event, signal) { } foo(ee, 'foo', ac.signal); -ac.abort(); // Abort waiting for the event -ee.emit('foo'); // Prints: Waiting for the event was canceled! +ac.abort(); // Prints: Waiting for the event was canceled! ``` ### Awaiting multiple events emitted on `process.nextTick()` diff --git a/doc/api/fs.md b/doc/api/fs.md index 6fe661900a6296..0c26b4afae9db1 100644 --- a/doc/api/fs.md +++ b/doc/api/fs.md @@ -267,6 +267,7 @@ added: v16.11.0 * `start` {integer} * `end` {integer} **Default:** `Infinity` * `highWaterMark` {integer} **Default:** `64 * 1024` + * `signal` {AbortSignal|undefined} **Default:** `undefined` * Returns: {fs.ReadStream} Unlike the 16 KiB default `highWaterMark` for a {stream.Readable}, the stream @@ -6810,7 +6811,7 @@ added: - v18.20.0 --> -> Stability: 1 – Experimental +> Stability: 1 - Experimental * {string} diff --git a/doc/api/globals.md b/doc/api/globals.md index 33cc849745c643..65cdfa78fb49c4 100644 --- a/doc/api/globals.md +++ b/doc/api/globals.md @@ -453,6 +453,17 @@ changes: A browser-compatible implementation of the `Event` class. See [`EventTarget` and `Event` API][] for more details. +## `EventSource` + + + +> Stability: 1 - Experimental. Enable this API with the [`--experimental-eventsource`][] +> CLI flag. + +A browser-compatible implementation of the [`EventSource`][] class. + ## `EventTarget` + +> Stability: 1.1 - Active Development + +Flush the [module compile cache][] accumulated from modules already loaded +in the current Node.js instance to disk. This returns after all the flushing +file system operations come to an end, no matter they succeed or not. If there +are any errors, this will fail silently, since compile cache misses should not +interfer with the actual operation of the application. + ### Class: `module.SourceMap` + +> Stability: 1.1 - Active Development. Enable this API with the +> [`--experimental-require-module`][] CLI flag. + The `.mjs` extension is reserved for [ECMAScript Modules][]. Currently, if the flag `--experimental-require-module` is not used, loading an ECMAScript module using `require()` will throw a [`ERR_REQUIRE_ESM`][] @@ -243,6 +250,9 @@ If `--experimental-print-required-tla` is enabled, instead of throwing module, try to locate the top-level awaits, and print their location to help users fix them. +This feature can be detected by checking if +[`process.features.require_module`][] is `true`. + ## All together @@ -325,7 +335,7 @@ NODE_MODULES_PATHS(START) 2. let I = count of PARTS - 1 3. let DIRS = [] 4. while I >= 0, - a. if PARTS[I] = "node_modules" CONTINUE + a. if PARTS[I] = "node_modules", GOTO d. b. DIR = path join(PARTS[0 .. I] + "node_modules") c. DIRS = DIR + DIRS d. let I = I - 1 @@ -335,9 +345,12 @@ LOAD_PACKAGE_IMPORTS(X, DIR) 1. Find the closest package scope SCOPE to DIR. 2. If no scope was found, return. 3. If the SCOPE/package.json "imports" is null or undefined, return. -4. let MATCH = PACKAGE_IMPORTS_RESOLVE(X, pathToFileURL(SCOPE), - ["node", "require"]) defined in the ESM resolver. -5. RESOLVE_ESM_MATCH(MATCH). +4. If `--experimental-require-module` is enabled + a. let CONDITIONS = ["node", "require", "module-sync"] + b. Else, let CONDITIONS = ["node", "require"] +5. let MATCH = PACKAGE_IMPORTS_RESOLVE(X, pathToFileURL(SCOPE), + CONDITIONS) defined in the ESM resolver. +6. RESOLVE_ESM_MATCH(MATCH). LOAD_PACKAGE_EXPORTS(X, DIR) 1. Try to interpret X as a combination of NAME and SUBPATH where the name @@ -346,9 +359,12 @@ LOAD_PACKAGE_EXPORTS(X, DIR) return. 3. Parse DIR/NAME/package.json, and look for "exports" field. 4. If "exports" is null or undefined, return. -5. let MATCH = PACKAGE_EXPORTS_RESOLVE(pathToFileURL(DIR/NAME), "." + SUBPATH, - `package.json` "exports", ["node", "require"]) defined in the ESM resolver. -6. RESOLVE_ESM_MATCH(MATCH) +5. If `--experimental-require-module` is enabled + a. let CONDITIONS = ["node", "require", "module-sync"] + b. Else, let CONDITIONS = ["node", "require"] +6. let MATCH = PACKAGE_EXPORTS_RESOLVE(pathToFileURL(DIR/NAME), "." + SUBPATH, + `package.json` "exports", CONDITIONS) defined in the ESM resolver. +7. RESOLVE_ESM_MATCH(MATCH) LOAD_PACKAGE_SELF(X, DIR) 1. Find the closest package scope SCOPE to DIR. @@ -1183,6 +1199,7 @@ This section was moved to [GLOBAL_FOLDERS]: #loading-from-the-global-folders [`"main"`]: packages.md#main [`"type"`]: packages.md#type +[`--experimental-require-module`]: cli.md#--experimental-require-module [`ERR_REQUIRE_ASYNC_MODULE`]: errors.md#err_require_async_module [`ERR_REQUIRE_ESM`]: errors.md#err_require_esm [`ERR_UNSUPPORTED_DIR_IMPORT`]: errors.md#err_unsupported_dir_import @@ -1200,6 +1217,7 @@ This section was moved to [`node:test`]: test.md [`package.json`]: packages.md#nodejs-packagejson-field-definitions [`path.dirname()`]: path.md#pathdirnamepath +[`process.features.require_module`]: process.md#processfeaturesrequire_module [`require.main`]: #requiremain [exports shortcut]: #exports-shortcut [module resolution]: #all-together diff --git a/doc/api/net.md b/doc/api/net.md index a8d6f94e7713e3..83de5d5afc1fcf 100644 --- a/doc/api/net.md +++ b/doc/api/net.md @@ -14,7 +14,11 @@ TCP or [IPC][] servers ([`net.createServer()`][]) and clients It can be accessed using: -```js +```mjs +import net from 'node:net'; +``` + +```cjs const net = require('node:net'); ``` @@ -658,6 +662,9 @@ changes: - version: v15.14.0 pr-url: https://github.com/nodejs/node/pull/37735 description: AbortSignal support was added. + - version: v12.10.0 + pr-url: https://github.com/nodejs/node/pull/25436 + description: Added `onread` option. --> * `options` {Object} Available options are: @@ -986,9 +993,6 @@ changes: pr-url: https://github.com/nodejs/node/pull/41310 description: The `noDelay`, `keepAlive`, and `keepAliveInitialDelay` options are supported now. - - version: v12.10.0 - pr-url: https://github.com/nodejs/node/pull/25436 - description: Added `onread` option. - version: v6.0.0 pr-url: https://github.com/nodejs/node/pull/6021 description: The `hints` option defaults to `0` in all cases now. @@ -1531,7 +1535,23 @@ Additional options: Following is an example of a client of the echo server described in the [`net.createServer()`][] section: -```js +```mjs +import net from 'node:net'; +const client = net.createConnection({ port: 8124 }, () => { + // 'connect' listener. + console.log('connected to server!'); + client.write('world!\r\n'); +}); +client.on('data', (data) => { + console.log(data.toString()); + client.end(); +}); +client.on('end', () => { + console.log('disconnected from server'); +}); +``` + +```cjs const net = require('node:net'); const client = net.createConnection({ port: 8124 }, () => { // 'connect' listener. @@ -1558,10 +1578,26 @@ option. In this case, the `onread` option will be only used to call `new net.Socket([options])` and the `port` option will be used to call `socket.connect(options[, connectListener])`. -```js +```mjs +import net from 'node:net'; +import { Buffer } from 'node:buffer'; +net.createConnection({ + port: 8124, + onread: { + // Reuses a 4KiB Buffer for every read from the socket. + buffer: Buffer.alloc(4 * 1024), + callback: function(nread, buf) { + // Received data is available in `buf` from 0 to `nread`. + console.log(buf.toString('utf8', 0, nread)); + }, + }, +}); +``` + +```cjs const net = require('node:net'); net.createConnection({ - port: 80, + port: 8124, onread: { // Reuses a 4KiB Buffer for every read from the socket. buffer: Buffer.alloc(4 * 1024), @@ -1684,7 +1720,26 @@ The server can be a TCP server or an [IPC][] server, depending on what it Here is an example of a TCP echo server which listens for connections on port 8124: -```js +```mjs +import net from 'node:net'; +const server = net.createServer((c) => { + // 'connection' listener. + console.log('client connected'); + c.on('end', () => { + console.log('client disconnected'); + }); + c.write('hello\r\n'); + c.pipe(c); +}); +server.on('error', (err) => { + throw err; +}); +server.listen(8124, () => { + console.log('server bound'); +}); +``` + +```cjs const net = require('node:net'); const server = net.createServer((c) => { // 'connection' listener. @@ -1743,7 +1798,9 @@ added: v19.4.0 Sets the default value of the `autoSelectFamily` option of [`socket.connect(options)`][]. -* `value` {boolean} The new default value. The initial default value is `false`. +* `value` {boolean} The new default value. + The initial default value is `true`, unless the command line option + `--no-network-family-autoselection` is provided. ## `net.getDefaultAutoSelectFamilyAttemptTimeout()` diff --git a/doc/api/packages.md b/doc/api/packages.md index 9e55b053e75993..5648f56df68996 100644 --- a/doc/api/packages.md +++ b/doc/api/packages.md @@ -665,6 +665,10 @@ specific to least specific as conditions should be defined: formats include CommonJS, JSON, native addons, and ES modules if `--experimental-require-module` is enabled. _Always mutually exclusive with `"import"`._ +* `"module-sync"` - matches no matter the package is loaded via `import`, + `import()` or `require()`. The format is expected to be ES modules that does + not contain top-level await in its module graph - if it does, + `ERR_REQUIRE_ASYNC_MODULE` will be thrown when the module is `require()`-ed. * `"default"` - the generic fallback that always matches. Can be a CommonJS or ES module file. _This condition should always come last._ @@ -755,7 +759,7 @@ Any number of custom conditions can be set with repeat flags. ### Community Conditions Definitions -Condition strings other than the `"import"`, `"require"`, `"node"`, +Condition strings other than the `"import"`, `"require"`, `"node"`, `"module-sync"`, `"node-addons"` and `"default"` conditions [implemented in Node.js core](#conditional-exports) are ignored by default. @@ -886,6 +890,17 @@ $ node other.js ## Dual CommonJS/ES module packages + + Prior to the introduction of support for ES modules in Node.js, it was a common pattern for package authors to include both CommonJS and ES module JavaScript sources in their package, with `package.json` [`"main"`][] specifying the @@ -898,7 +913,7 @@ ignores) the top-level `"module"` field. Node.js can now run ES module entry points, and a package can contain both CommonJS and ES module entry points (either via separate specifiers such as `'pkg'` and `'pkg/es-module'`, or both at the same specifier via [Conditional -exports][]). Unlike in the scenario where `"module"` is only used by bundlers, +exports][]). Unlike in the scenario where top-level `"module"` field is only used by bundlers, or ES module files are transpiled into CommonJS on the fly before evaluation by Node.js, the files referenced by the ES module entry point are evaluated as ES modules. diff --git a/doc/api/permissions.md b/doc/api/permissions.md index fd1a8e5859fae1..e37c2982bc146a 100644 --- a/doc/api/permissions.md +++ b/doc/api/permissions.md @@ -48,7 +48,7 @@ will be restricted. ```console $ node --experimental-permission index.js node:internal/modules/cjs/loader:171 - const result = internalModuleStat(filename); + const result = internalModuleStat(receiver, filename); ^ Error: Access to this API has been restricted diff --git a/doc/api/process.md b/doc/api/process.md index d3606a7f2e3f5b..1fa77523f1b667 100644 --- a/doc/api/process.md +++ b/doc/api/process.md @@ -1888,6 +1888,121 @@ a code. Specifying a code to [`process.exit(code)`][`process.exit()`] will override any previous setting of `process.exitCode`. +## `process.features.cached_builtins` + + + +* {boolean} + +A boolean value that is `true` if the current Node.js build is caching builtin modules. + +## `process.features.debug` + + + +* {boolean} + +A boolean value that is `true` if the current Node.js build is a debug build. + +## `process.features.inspector` + + + +* {boolean} + +A boolean value that is `true` if the current Node.js build includes the inspector. + +## `process.features.ipv6` + + + +* {boolean} + +A boolean value that is `true` if the current Node.js build includes support for IPv6. + +## `process.features.require_module` + + + +* {boolean} + +A boolean value that is `true` if the current Node.js build supports +[loading ECMAScript modules using `require()`][]. + +## `process.features.tls` + + + +* {boolean} + +A boolean value that is `true` if the current Node.js build includes support for TLS. + +## `process.features.tls_alpn` + + + +* {boolean} + +A boolean value that is `true` if the current Node.js build includes support for ALPN in TLS. + +## `process.features.tls_ocsp` + + + +* {boolean} + +A boolean value that is `true` if the current Node.js build includes support for OCSP in TLS. + +## `process.features.tls_sni` + + + +* {boolean} + +A boolean value that is `true` if the current Node.js build includes support for SNI in TLS. + +## `process.features.typescript` + + + +> Stability: 1.0 - Early development + +* {boolean|string} + +A value that is `"strip"` if Node.js is run with `--experimental-strip-types`, +`"transform"` if Node.js is run with `--experimental-transform-types`, and `false` otherwise. + +## `process.features.uv` + + + +* {boolean} + +A boolean value that is `true` if the current Node.js build includes support for libuv. +Since it's currently not possible to build Node.js without libuv, this value is always `true`. + ## `process.finalization.register(ref, callback)` - -> Stability: 0 - Deprecated. - -* `keyword` {string} the potential keyword to parse and execute -* `rest` {any} any parameters to the keyword command -* Returns: {boolean} - -An internal method used to parse and execute `REPLServer` keywords. -Returns `true` if `keyword` is a valid keyword, otherwise `false`. - ### `replServer.setupHistory(historyPath, callback)` -* Returns: {string} The source SQL expanded to include parameter values. +* {string} The source SQL expanded to include parameter values. -This method returns the source SQL of the prepared statement with parameter +The source SQL text of the prepared statement with parameter placeholders replaced by the values that were used during the most recent -execution of this prepared statement. This method is a wrapper around +execution of this prepared statement. This property is a wrapper around [`sqlite3_expanded_sql()`][]. ### `statement.get([namedParameters][, ...anonymousParameters])` @@ -288,15 +297,15 @@ be used to read `INTEGER` data using JavaScript `BigInt`s. This method has no impact on database write operations where numbers and `BigInt`s are both supported at all times. -### `statement.sourceSQL()` +### `statement.sourceSQL` -* Returns: {string} The source SQL used to create this prepared statement. +* {string} The source SQL used to create this prepared statement. -This method returns the source SQL of the prepared statement. This method is a +The source SQL text of the prepared statement. This property is a wrapper around [`sqlite3_sql()`][]. ### Type conversion between JavaScript and SQLite @@ -317,6 +326,7 @@ exception. [SQL injection]: https://en.wikipedia.org/wiki/SQL_injection [`--experimental-sqlite`]: cli.md#--experimental-sqlite +[`PRAGMA foreign_keys`]: https://www.sqlite.org/pragma.html#pragma_foreign_keys [`sqlite3_changes64()`]: https://www.sqlite.org/c3ref/changes.html [`sqlite3_close_v2()`]: https://www.sqlite.org/c3ref/close.html [`sqlite3_exec()`]: https://www.sqlite.org/c3ref/exec.html @@ -326,6 +336,7 @@ exception. [`sqlite3_sql()`]: https://www.sqlite.org/c3ref/expanded_sql.html [connection]: https://www.sqlite.org/c3ref/sqlite3.html [data types]: https://www.sqlite.org/datatype3.html +[double-quoted string literals]: https://www.sqlite.org/quirks.html#dblquote [in memory]: https://www.sqlite.org/inmemorydb.html [parameters are bound]: https://www.sqlite.org/c3ref/bind_blob.html [prepared statement]: https://www.sqlite.org/c3ref/stmt.html diff --git a/doc/api/stream.md b/doc/api/stream.md index 2d21c2b837f1c1..87d7ae8923ff11 100644 --- a/doc/api/stream.md +++ b/doc/api/stream.md @@ -257,6 +257,11 @@ changes: - v18.14.0 pr-url: https://github.com/nodejs/node/pull/46205 description: Added support for `ReadableStream` and `WritableStream`. + - version: + - v19.1.0 + - v18.13.0 + pr-url: https://github.com/nodejs/node/pull/44862 + description: The `cleanup` option was added. --> * `stream` {Stream|ReadableStream|WritableStream} A readable and/or writable @@ -265,7 +270,9 @@ changes: * `error` {boolean|undefined} * `readable` {boolean|undefined} * `writable` {boolean|undefined} - * `signal`: {AbortSignal|undefined} + * `signal` {AbortSignal|undefined} + * `cleanup` {boolean|undefined} If `true`, removes the listeners registered by + this function before the promise is fulfilled. **Default:** `false`. * Returns: {Promise} Fulfills when the stream is no longer readable or writable. @@ -301,6 +308,17 @@ rs.resume(); // Drain the stream. The `finished` API also provides a [callback version][stream-finished]. +`stream.finished()` leaves dangling event listeners (in particular +`'error'`, `'end'`, `'finish'` and `'close'`) after the returned promise is +resolved or rejected. The reason for this is so that unexpected `'error'` +events (due to incorrect stream implementations) do not cause unexpected +crashes. If this is unwanted behavior then `options.cleanup` should be set to +`true`: + +```js +await finished(rs, { cleanup: true }); +``` + ### Object mode All streams created by Node.js APIs operate exclusively on strings, {Buffer}, @@ -2744,8 +2762,6 @@ changes: underlying stream will _not_ be aborted if the signal is aborted. The callback will get called with an `AbortError`. All registered listeners added by this function will also be removed. - * `cleanup` {boolean} remove all registered stream listeners. - **Default:** `false`. * `callback` {Function} A callback function that takes an optional error argument. * Returns: {Function} A cleanup function which removes all registered diff --git a/doc/api/test.md b/doc/api/test.md index 48214a4b75f56f..8052bbed3e875e 100644 --- a/doc/api/test.md +++ b/doc/api/test.md @@ -422,12 +422,22 @@ node --test By default, Node.js will run all files matching these patterns: -* `**/*.test.?(c|m)js` -* `**/*-test.?(c|m)js` -* `**/*_test.?(c|m)js` -* `**/test-*.?(c|m)js` -* `**/test.?(c|m)js` -* `**/test/**/*.?(c|m)js` +* `**/*.test.{cjs,mjs,js}` +* `**/*-test.{cjs,mjs,js}` +* `**/*_test.{cjs,mjs,js}` +* `**/test-*.{cjs,mjs,js}` +* `**/test.{cjs,mjs,js}` +* `**/test/**/*.{cjs,mjs,js}` + +When [`--experimental-strip-types`][] is supplied, the following +additional patterns are matched: + +* `**/*.test.{cts,mts,ts}` +* `**/*-test.{cts,mts,ts}` +* `**/*_test.{cts,mts,ts}` +* `**/test-*.{cts,mts,ts}` +* `**/test.{cts,mts,ts}` +* `**/test/**/*.{cts,mts,ts}` Alternatively, one or more glob patterns can be provided as the final argument(s) to the Node.js command, as shown below. @@ -1248,6 +1258,9 @@ added: - v18.9.0 - v16.19.0 changes: + - version: v22.10.0 + pr-url: https://github.com/nodejs/node/pull/53937 + description: Added coverage options. - version: v22.8.0 pr-url: https://github.com/nodejs/node/pull/53927 description: Added the `isolation` option. @@ -1274,7 +1287,7 @@ changes: If `false`, it would only run one test file at a time. **Default:** `false`. * `files`: {Array} An array containing the list of files to run. - **Default** matching files from [test runner execution model][]. + **Default:** matching files from [test runner execution model][]. * `forceExit`: {boolean} Configures the test runner to exit the process once all known tests have finished executing even if the event loop would otherwise remain active. **Default:** `false`. @@ -1296,6 +1309,12 @@ changes: * `setup` {Function} A function that accepts the `TestsStream` instance and can be used to setup listeners before any tests are run. **Default:** `undefined`. + * `execArgv` {Array} An array of CLI flags to pass to the `node` executable when + spawning the subprocesses. This option has no effect when `isolation` is `'none`'. + **Default:** `[]` + * `argv` {Array} An array of CLI flags to pass to each test file when spawning the + subprocesses. This option has no effect when `isolation` is `'none'`. + **Default:** `[]`. * `signal` {AbortSignal} Allows aborting an in-progress test execution. * `testNamePatterns` {string|RegExp|Array} A String, RegExp or a RegExp Array, that can be used to only run tests whose name matches the provided pattern. @@ -1319,6 +1338,29 @@ changes: that specifies the index of the shard to run. This option is _required_. * `total` {number} is a positive integer that specifies the total number of shards to split the test files to. This option is _required_. + * `coverage` {boolean} enable [code coverage][] collection. + **Default:** `false`. + * `coverageExcludeGlobs` {string|Array} Excludes specific files from code coverage + using a glob pattern, which can match both absolute and relative file paths. + This property is only applicable when `coverage` was set to `true`. + If both `coverageExcludeGlobs` and `coverageIncludeGlobs` are provided, + files must meet **both** criteria to be included in the coverage report. + **Default:** `undefined`. + * `coverageIncludeGlobs` {string|Array} Includes specific files in code coverage + using a glob pattern, which can match both absolute and relative file paths. + This property is only applicable when `coverage` was set to `true`. + If both `coverageExcludeGlobs` and `coverageIncludeGlobs` are provided, + files must meet **both** criteria to be included in the coverage report. + **Default:** `undefined`. + * `lineCoverage` {number} Require a minimum percent of covered lines. If code + coverage does not reach the threshold specified, the process will exit with code `1`. + **Default:** `0`. + * `branchCoverage` {number} Require a minimum percent of covered branches. If code + coverage does not reach the threshold specified, the process will exit with code `1`. + **Default:** `0`. + * `functionCoverage` {number} Require a minimum percent of covered functions. If code + coverage does not reach the threshold specified, the process will exit with code `1`. + **Default:** `0`. * Returns: {TestsStream} **Note:** `shard` is used to horizontally parallelize test running across @@ -2078,8 +2120,10 @@ added: v22.3.0 This function is used to mock the exports of ECMAScript modules, CommonJS modules, and Node.js builtin modules. Any references to the original module -prior to mocking are not impacted. The following example demonstrates how a mock -is created for a module. +prior to mocking are not impacted. In order to enable module mocking, Node.js must +be started with the [`--experimental-test-module-mocks`][] command-line flag. + +The following example demonstrates how a mock is created for a module. ```js test('mocks a builtin module in both module systems', async (t) => { @@ -3008,6 +3052,31 @@ This event is only emitted if `--test` flag is passed. This event is not guaranteed to be emitted in the same order as the tests are defined. +### Event: `'test:summary'` + +* `data` {Object} + * `counts` {Object} An object containing the counts of various test results. + * `cancelled` {number} The total number of cancelled tests. + * `failed` {number} The total number of failed tests. + * `passed` {number} The total number of passed tests. + * `skipped` {number} The total number of skipped tests. + * `suites` {number} The total number of suites run. + * `tests` {number} The total number of tests run, excluding suites. + * `todo` {number} The total number of TODO tests. + * `topLevel` {number} The total number of top level tests and suites. + * `duration_ms` {number} The duration of the test run in milliseconds. + * `file` {string|undefined} The path of the test file that generated the + summary. If the summary corresponds to multiple files, this value is + `undefined`. + * `success` {boolean} Indicates whether or not the test run is considered + successful or not. If any error condition occurs, such as a failing test or + unmet coverage threshold, this value will be set to `false`. + +Emitted when a test run completes. This event contains metrics pertaining to +the completed test run, and is useful for determining if a test run passed or +failed. If process-level test isolation is used, a `'test:summary'` event is +generated for each test file in addition to a final cumulative summary. + ### Event: `'test:watch:drained'` Emitted when no more tests are queued for execution in watch mode. @@ -3499,7 +3568,9 @@ Can be used to abort test subtasks when the test has been aborted. [TAP]: https://testanything.org/ [TTY]: tty.md +[`--experimental-strip-types`]: cli.md#--experimental-strip-types [`--experimental-test-coverage`]: cli.md#--experimental-test-coverage +[`--experimental-test-module-mocks`]: cli.md#--experimental-test-module-mocks [`--experimental-test-snapshots`]: cli.md#--experimental-test-snapshots [`--import`]: cli.md#--importmodule [`--test-concurrency`]: cli.md#--test-concurrency @@ -3527,6 +3598,7 @@ Can be used to abort test subtasks when the test has been aborted. [`run()`]: #runoptions [`suite()`]: #suitename-options-fn [`test()`]: #testname-options-fn +[code coverage]: #collecting-code-coverage [describe options]: #describename-options-fn [it options]: #testname-options-fn [stream.compose]: stream.md#streamcomposestreams diff --git a/doc/api/util.md b/doc/api/util.md index 172b9645ee0c45..309428eebdec51 100644 --- a/doc/api/util.md +++ b/doc/api/util.md @@ -395,7 +395,7 @@ function exampleFunction() { console.log(`CallSite ${index + 1}:`); console.log(`Function Name: ${callSite.functionName}`); console.log(`Script Name: ${callSite.scriptName}`); - console.log(`Line Number: ${callSite.lineNumer}`); + console.log(`Line Number: ${callSite.lineNumber}`); console.log(`Column Number: ${callSite.column}`); }); // CallSite 1: diff --git a/doc/api/worker_threads.md b/doc/api/worker_threads.md index b718a90f621178..edba8c15c409d7 100644 --- a/doc/api/worker_threads.md +++ b/doc/api/worker_threads.md @@ -194,6 +194,38 @@ isMarkedAsUntransferable(pooledBuffer); // Returns true. There is no equivalent to this API in browsers. +## `worker.markAsUncloneable(object)` + + + +* `object` {any} Any arbitrary JavaScript value. + +Mark an object as not cloneable. If `object` is used as [`message`](#event-message) in +a [`port.postMessage()`][] call, an error is thrown. This is a no-op if `object` is a +primitive value. + +This has no effect on `ArrayBuffer`, or any `Buffer` like objects. + +This operation cannot be undone. + +```js +const { markAsUncloneable } = require('node:worker_threads'); + +const anyObject = { foo: 'bar' }; +markAsUncloneable(anyObject); +const { port1 } = new MessageChannel(); +try { + // This will throw an error, because anyObject is not cloneable. + port1.postMessage(anyObject) +} catch (error) { + // error.name === 'DataCloneError' +} +``` + +There is no equivalent to this API in browsers. + ## `worker.moveMessagePortToContext(port, contextifiedSandbox)` - -Activate inspector on `host:port` and break at start of the first internal -JavaScript script executed when the inspector is available. -Default `host:port` is `127.0.0.1:9229`. +Pauses execution at the start of Node.js application code, waiting for a debugger to connect on the specified +`host` and `port`. This is useful for debugging application startup issues. If `host` and `port` are not +provided, it defaults to `127.0.0.1:9229`. #### `--node-snapshot` +Enables the use of Node.js snapshots, potentially improving startup performance. + #### `--test-udp-no-try-send` +Used for testing UDP functionality without attempting to send data. + #### `--trace-promises` +Enables tracing of promises for debugging and performance analysis. + #### `--verify-base-objects` + +Allows verification of base objects for debugging purposes. diff --git a/doc/contributing/maintaining/maintaining-dependencies.md b/doc/contributing/maintaining/maintaining-dependencies.md index d7e884514bfe16..8ebbd3f7bb8924 100644 --- a/doc/contributing/maintaining/maintaining-dependencies.md +++ b/doc/contributing/maintaining/maintaining-dependencies.md @@ -126,6 +126,17 @@ can be added by: paste one of the existing entries and then update to match the import name for the dependency and the #define generated. +* if the version of the dependency is reported in `process.versions`, + update `src/node_metadata.h` and `src/node_metadata.cc` so that the + version is not reported when the dependency is externalized. + Not reporting the version is better than incorrectly reporting + the version of the dependency bundled with Node.js, instead of the + version for the externalized dependency. Use one of the existing + externalized dependencies, like Undici, as an example of how to + update these files correctly. Make sure to run the tests with the + dependency externalized, as the tests will also need to be updated + to handle this properly. + ## Supporting non-externalized dependencies with JavaScript code If the dependency consists of JavaScript in the diff --git a/doc/contributing/primordials.md b/doc/contributing/primordials.md index e7d94147a8759b..bbffca00806c66 100644 --- a/doc/contributing/primordials.md +++ b/doc/contributing/primordials.md @@ -10,6 +10,7 @@ important than reliability against prototype pollution: * `node:http` * `node:http2` * `node:tls` +* `node:zlib` Usage of primordials should be preferred for new code in other areas, but replacing current code with primordials should be diff --git a/doc/node.1 b/doc/node.1 index eb7ad01aaee198..46829cc3b2b0de 100644 --- a/doc/node.1 +++ b/doc/node.1 @@ -160,6 +160,9 @@ Requires Node.js to be built with .It Fl -enable-source-maps Enable Source Map V3 support for stack traces. . +.It Fl -entry-url +Interpret the entry point as a URL. +. .It Fl -experimental-default-type Ns = Ns Ar type Interpret as either ES modules or CommonJS modules input via --eval or STDIN, when --input-type is unspecified; .js or extensionless files with no sibling or parent package.json; diff --git a/eslint.config.mjs b/eslint.config.mjs index 67d21b540f945a..2ba63203d8e1b9 100644 --- a/eslint.config.mjs +++ b/eslint.config.mjs @@ -313,6 +313,7 @@ export default [ 'node-core/no-unescaped-regexp-dot': 'error', 'node-core/no-duplicate-requires': 'error', 'node-core/prefer-proto': 'error', + 'node-core/prefer-optional-chaining': 'error', }, }, // #endregion diff --git a/lib/_http_agent.js b/lib/_http_agent.js index 1f7989b843c073..7e5c224470cac4 100644 --- a/lib/_http_agent.js +++ b/lib/_http_agent.js @@ -128,7 +128,7 @@ function Agent(options) { } const requests = this.requests[name]; - if (requests && requests.length) { + if (requests?.length) { const req = requests.shift(); const reqAsyncRes = req[kRequestAsyncResource]; if (reqAsyncRes) { @@ -437,7 +437,7 @@ Agent.prototype.removeSocket = function removeSocket(s, options) { } let req; - if (this.requests[name] && this.requests[name].length) { + if (this.requests[name]?.length) { debug('removeSocket, have a request, make a socket'); req = this.requests[name][0]; } else { @@ -449,7 +449,7 @@ Agent.prototype.removeSocket = function removeSocket(s, options) { for (let i = 0; i < keys.length; i++) { const prop = keys[i]; // Check whether this specific origin is already at maxSockets - if (this.sockets[prop] && this.sockets[prop].length) break; + if (this.sockets[prop]?.length) break; debug('removeSocket, have a request with different origin,' + ' make a socket'); req = this.requests[prop][0]; diff --git a/lib/_http_client.js b/lib/_http_client.js index 6d3b3591e7ba40..750536dd06cac9 100644 --- a/lib/_http_client.js +++ b/lib/_http_client.js @@ -174,7 +174,7 @@ function ClientRequest(input, options, cb) { const protocol = options.protocol || defaultAgent.protocol; let expectedProtocol = defaultAgent.protocol; - if (this.agent && this.agent.protocol) + if (this.agent?.protocol) expectedProtocol = this.agent.protocol; if (options.path) { @@ -190,7 +190,7 @@ function ClientRequest(input, options, cb) { } const defaultPort = options.defaultPort || - (this.agent && this.agent.defaultPort); + (this.agent?.defaultPort); const optsWithoutSignal = { __proto__: null, ...options }; @@ -553,7 +553,7 @@ function socketOnData(d) { socket.destroy(); req.socket._hadError = true; emitErrorEvent(req, ret); - } else if (parser.incoming && parser.incoming.upgrade) { + } else if (parser.incoming?.upgrade) { // Upgrade (if status code 101) or CONNECT const bytesParsed = ret; const res = parser.incoming; @@ -591,7 +591,7 @@ function socketOnData(d) { // Requested Upgrade or used CONNECT method, but have no handler. socket.destroy(); } - } else if (parser.incoming && parser.incoming.complete && + } else if (parser.incoming?.complete && // When the status code is informational (100, 102-199), // the server will send a final response after this client // sends a request body, so we must not free the parser. @@ -838,7 +838,7 @@ function tickOnSocket(req, socket) { if ( req.timeout !== undefined || - (req.agent && req.agent.options && req.agent.options.timeout) + (req.agent?.options?.timeout) ) { listenSocketTimeout(req); } diff --git a/lib/_http_common.js b/lib/_http_common.js index 5ccf43e1d66668..96d9bdfc9fcbe5 100644 --- a/lib/_http_common.js +++ b/lib/_http_common.js @@ -85,8 +85,7 @@ function parserOnHeadersComplete(versionMajor, versionMinor, headers, method, } // Parser is also used by http client - const ParserIncomingMessage = (socket && socket.server && - socket.server[kIncomingMessage]) || + const ParserIncomingMessage = (socket?.server?.[kIncomingMessage]) || IncomingMessage; const incoming = parser.incoming = new ParserIncomingMessage(socket); diff --git a/lib/_http_incoming.js b/lib/_http_incoming.js index 1dd04fdf3e4228..c3e901e53e8b90 100644 --- a/lib/_http_incoming.js +++ b/lib/_http_incoming.js @@ -242,7 +242,7 @@ IncomingMessage.prototype._destroy = function _destroy(err, cb) { IncomingMessage.prototype._addHeaderLines = _addHeaderLines; function _addHeaderLines(headers, n) { - if (headers && headers.length) { + if (headers?.length) { let dest; if (this.complete) { this.rawTrailers = headers; diff --git a/lib/_http_outgoing.js b/lib/_http_outgoing.js index 658ad80f994083..7a402a0f900f30 100644 --- a/lib/_http_outgoing.js +++ b/lib/_http_outgoing.js @@ -423,7 +423,7 @@ OutgoingMessage.prototype._send = function _send(data, encoding, callback, byteL OutgoingMessage.prototype._writeRaw = _writeRaw; function _writeRaw(data, encoding, callback, size) { const conn = this[kSocket]; - if (conn && conn.destroyed) { + if (conn?.destroyed) { // The socket was destroyed. If we're still trying to write to it, // then we haven't gotten the 'close' event yet. return false; @@ -789,7 +789,7 @@ OutgoingMessage.prototype.getHeader = function getHeader(name) { return; const entry = headers[name.toLowerCase()]; - return entry && entry[1]; + return entry?.[1]; }; @@ -1073,7 +1073,7 @@ OutgoingMessage.prototype.addTrailers = function addTrailers(headers) { }; function onFinish(outmsg) { - if (outmsg && outmsg.socket && outmsg.socket._hadError) return; + if (outmsg?.socket?._hadError) return; outmsg.emit('finish'); } @@ -1188,7 +1188,7 @@ OutgoingMessage.prototype._finish = function _finish() { OutgoingMessage.prototype._flush = function _flush() { const socket = this[kSocket]; - if (socket && socket.writable) { + if (socket?.writable) { // There might be remaining data in this.output; write it out const ret = this._flushOutput(socket); diff --git a/lib/_http_server.js b/lib/_http_server.js index 200e7a731daf08..6a0bcd75513b5c 100644 --- a/lib/_http_server.js +++ b/lib/_http_server.js @@ -738,7 +738,7 @@ function connectionListenerInternal(server, socket) { socket.setEncoding = socketSetEncoding; // We only consume the socket if it has never been consumed before. - if (socket._handle && socket._handle.isStreamBase && + if (socket._handle?.isStreamBase && !socket._handle._consumed) { parser._consumed = true; socket._handle._consumed = true; @@ -783,7 +783,7 @@ function socketOnDrain(socket, state) { } function socketOnTimeout() { - const req = this.parser && this.parser.incoming; + const req = this.parser?.incoming; const reqTimeout = req && !req.complete && req.emit('timeout', this); const res = this._httpMessage; const resTimeout = res && res.emit('timeout', this); @@ -918,7 +918,7 @@ function onParserExecuteCommon(server, socket, parser, state, ret, d) { prepareError(ret, parser, d); debug('parse error', ret); socketOnError.call(socket, ret); - } else if (parser.incoming && parser.incoming.upgrade) { + } else if (parser.incoming?.upgrade) { // Upgrade or CONNECT const req = parser.incoming; debug('SERVER upgrade or connect', req.method); @@ -963,7 +963,7 @@ function onParserExecuteCommon(server, socket, parser, state, ret, d) { function clearIncoming(req) { req = req || this; - const parser = req.socket && req.socket.parser; + const parser = req.socket?.parser; // Reset the .incoming property so that the request object can be gc'ed. if (parser && parser.incoming === req) { if (req.readableEnded) { @@ -1180,7 +1180,7 @@ function onSocketResume() { } function onSocketPause() { - if (this._handle && this._handle.reading) { + if (this._handle?.reading) { this._handle.reading = false; this._handle.readStop(); } diff --git a/lib/_tls_wrap.js b/lib/_tls_wrap.js index 2273b3c91b62b4..6818515d6ab684 100644 --- a/lib/_tls_wrap.js +++ b/lib/_tls_wrap.js @@ -489,7 +489,7 @@ function initRead(tlsSocket, socket) { return; // Socket already has some buffered data - emulate receiving it - if (socket && socket.readableLength) { + if (socket?.readableLength) { let buf; while ((buf = socket.read()) !== null) tlsSocket._handle.receive(buf); @@ -1683,7 +1683,7 @@ function onConnectSecure() { if (!verifyError && !this.isSessionReused()) { const hostname = options.servername || options.host || - (options.socket && options.socket._host) || + (options.socket?._host) || 'localhost'; const cert = this.getPeerCertificate(true); verifyError = options.checkServerIdentity(hostname, cert); diff --git a/lib/assert.js b/lib/assert.js index 9e54983c8682a3..d121d2718dfe95 100644 --- a/lib/assert.js +++ b/lib/assert.js @@ -446,7 +446,7 @@ function expectedException(actual, expected, message, fn) { message = 'The error is expected to be an instance of ' + `"${expected.name}". Received `; if (isError(actual)) { - const name = (actual.constructor && actual.constructor.name) || + const name = (actual.constructor?.name) || actual.name; if (expected.name === name) { message += 'an error with identical name but a different prototype.'; @@ -569,7 +569,7 @@ function expectsError(stackStartFn, actual, error, message) { if (actual === NO_EXCEPTION_SENTINEL) { let details = ''; - if (error && error.name) { + if (error?.name) { details += ` (${error.name})`; } details += message ? `: ${message}` : '.'; @@ -627,7 +627,7 @@ function expectsNoError(stackStartFn, actual, error, message) { expected: error, operator: stackStartFn.name, message: `Got unwanted ${fnType}${details}\n` + - `Actual message: "${actual && actual.message}"`, + `Actual message: "${actual?.message}"`, stackStartFn, }); } diff --git a/lib/child_process.js b/lib/child_process.js index 580a441a803bdd..51fc6fe995d3cf 100644 --- a/lib/child_process.js +++ b/lib/child_process.js @@ -392,8 +392,7 @@ function execFile(file, args, options, callback) { let stderr; if (encoding || ( - child.stdout && - child.stdout.readableEncoding + child.stdout?.readableEncoding )) { stdout = ArrayPrototypeJoin(_stdout, ''); } else { @@ -401,8 +400,7 @@ function execFile(file, args, options, callback) { } if (encoding || ( - child.stderr && - child.stderr.readableEncoding + child.stderr?.readableEncoding )) { stderr = ArrayPrototypeJoin(_stderr, ''); } else { @@ -855,7 +853,7 @@ function spawnSync(file, args, options) { // We may want to pass data in on any given fd, ensure it is a valid buffer for (let i = 0; i < options.stdio.length; i++) { - const input = options.stdio[i] && options.stdio[i].input; + const input = options.stdio[i]?.input; if (input != null) { const pipe = options.stdio[i] = { ...options.stdio[i] }; if (isArrayBufferView(input)) { diff --git a/lib/dgram.js b/lib/dgram.js index 5f93526e301f43..84b8708f701ed0 100644 --- a/lib/dgram.js +++ b/lib/dgram.js @@ -128,8 +128,8 @@ function Socket(type, listener) { bindState: BIND_STATE_UNBOUND, connectState: CONNECT_STATE_DISCONNECTED, queue: undefined, - reuseAddr: options && options.reuseAddr, // Use UV_UDP_REUSEADDR if true. - ipv6Only: options && options.ipv6Only, + reuseAddr: options?.reuseAddr, // Use UV_UDP_REUSEADDR if true. + ipv6Only: options?.ipv6Only, recvBufferSize, sendBufferSize, }; diff --git a/lib/domain.js b/lib/domain.js index 7da672a3691560..03d240e98d4506 100644 --- a/lib/domain.js +++ b/lib/domain.js @@ -40,6 +40,7 @@ const { ReflectApply, SafeMap, SafeWeakMap, + StringPrototypeRepeat, Symbol, } = primordials; @@ -131,7 +132,7 @@ const domainRequireStack = new Error('require(`domain`) at this point').stack; const { setUncaughtExceptionCaptureCallback } = process; process.setUncaughtExceptionCaptureCallback = function(fn) { const err = new ERR_DOMAIN_CANNOT_SET_UNCAUGHT_EXCEPTION_CAPTURE(); - err.stack = err.stack + '\n' + '-'.repeat(40) + '\n' + domainRequireStack; + err.stack += `\n${StringPrototypeRepeat('-', 40)}\n${domainRequireStack}`; throw err; }; diff --git a/lib/eslint.config_partial.mjs b/lib/eslint.config_partial.mjs index 97e4f56181564a..b6023518d451d6 100644 --- a/lib/eslint.config_partial.mjs +++ b/lib/eslint.config_partial.mjs @@ -494,6 +494,7 @@ export default [ 'lib/internal/http.js', 'lib/internal/http2/*.js', 'lib/tls.js', + 'lib/zlib.js', ], rules: { 'no-restricted-syntax': [ diff --git a/lib/events.js b/lib/events.js index 36217a8313c134..0f4a7dde08408e 100644 --- a/lib/events.js +++ b/lib/events.js @@ -51,14 +51,13 @@ const { } = primordials; const kRejection = SymbolFor('nodejs.rejection'); -const { SymbolDispose, kEmptyObject } = require('internal/util'); +const { SymbolDispose, kEmptyObject, spliceOne } = require('internal/util'); const { inspect, identicalSequenceRange, } = require('internal/util/inspect'); -let spliceOne; let FixedQueue; let kFirstEventParam; let kResistStopPropagation; @@ -717,8 +716,6 @@ EventEmitter.prototype.removeListener = if (position === 0) list.shift(); else { - if (spliceOne === undefined) - spliceOne = require('internal/util').spliceOne; spliceOne(list, position); } diff --git a/lib/fs.js b/lib/fs.js index 1497c0935a130c..11c53abc917875 100644 --- a/lib/fs.js +++ b/lib/fs.js @@ -1414,7 +1414,7 @@ function readdirSyncRecursive(basePath, options) { for (let i = 0; i < readdirResult.length; i++) { const resultPath = pathModule.join(path, readdirResult[i]); const relativeResultPath = pathModule.relative(basePath, resultPath); - const stat = binding.internalModuleStat(resultPath); + const stat = binding.internalModuleStat(binding, resultPath); ArrayPrototypePush(readdirResults, relativeResultPath); // 1 indicates directory if (stat === 1) { diff --git a/lib/internal/README.md b/lib/internal/README.md index d44321c6a24392..f02d352f6cdd04 100644 --- a/lib/internal/README.md +++ b/lib/internal/README.md @@ -1,6 +1,13 @@ # Internal Modules -The modules in `lib/internal` are intended for internal use in Node.js core -only, and are not accessible with `require()` from user modules. These modules -can be changed at **any** time. Reliance on these modules outside of core -is **not supported** in any way. +The modules located in `lib/internal` directory are exclusively meant +for internal usage within the Node.js core. They are not intended to +be accessed via user modules `require()`. These modules may change at +any point in time. Relying on these internal modules outside the core +is not supported and can lead to unpredictable behavior. + +In certain scenarios, accessing these internal modules for debugging or +experimental purposes might be necessary. Node.js provides the `--expose-internals` +flag to expose these modules to userland code. This flag only exists to +assist Node.js maintainers with debugging internals. It is not meant for +use outside the project. diff --git a/lib/internal/abort_controller.js b/lib/internal/abort_controller.js index ce05571c067398..3b34ed34afe346 100644 --- a/lib/internal/abort_controller.js +++ b/lib/internal/abort_controller.js @@ -4,9 +4,11 @@ // in https://github.com/mysticatea/abort-controller (MIT license) const { + ArrayPrototypePush, ObjectAssign, ObjectDefineProperties, ObjectDefineProperty, + ObjectSetPrototypeOf, PromiseResolve, SafeFinalizationRegistry, SafeSet, @@ -66,10 +68,10 @@ const { kDeserialize, kTransfer, kTransferList, + markTransferMode, } = require('internal/worker/js_transferable'); let _MessageChannel; -let markTransferMode; const kDontThrowSymbol = Symbol('kDontThrowSymbol'); @@ -82,12 +84,6 @@ function lazyMessageChannel() { return new _MessageChannel(); } -function lazyMarkTransferMode(obj, cloneable, transferable) { - markTransferMode ??= - require('internal/worker/js_transferable').markTransferMode; - markTransferMode(obj, cloneable, transferable); -} - const clearTimeoutRegistry = new SafeFinalizationRegistry(clearTimeout); const gcPersistentSignals = new SafeSet(); @@ -168,7 +164,7 @@ class AbortSignal extends EventTarget { this[kReason] = reason; this[kComposite] = composite; if (transferable) { - lazyMarkTransferMode(this, false, true); + markTransferMode(this, false, true); } } @@ -372,18 +368,46 @@ ObjectDefineProperty(AbortSignal.prototype, SymbolToStringTag, { defineEventHandler(AbortSignal.prototype, 'abort'); +// https://dom.spec.whatwg.org/#dom-abortsignal-abort function abortSignal(signal, reason) { + // 1. If signal is aborted, then return. if (signal[kAborted]) return; + + // 2. Set signal's abort reason to reason if it is given; + // otherwise to a new "AbortError" DOMException. signal[kAborted] = true; signal[kReason] = reason; + // 3. Let dependentSignalsToAbort be a new list. + const dependentSignalsToAbort = ObjectSetPrototypeOf([], null); + // 4. For each dependentSignal of signal's dependent signals: + signal[kDependantSignals]?.forEach((s) => { + const dependentSignal = s.deref(); + // 1. If dependentSignal is not aborted, then: + if (dependentSignal && !dependentSignal[kAborted]) { + // 1. Set dependentSignal's abort reason to signal's abort reason. + dependentSignal[kReason] = reason; + dependentSignal[kAborted] = true; + // 2. Append dependentSignal to dependentSignalsToAbort. + ArrayPrototypePush(dependentSignalsToAbort, dependentSignal); + } + }); + + // 5. Run the abort steps for signal + runAbort(signal); + // 6. For each dependentSignal of dependentSignalsToAbort, + // run the abort steps for dependentSignal. + for (let i = 0; i < dependentSignalsToAbort.length; i++) { + const dependentSignal = dependentSignalsToAbort[i]; + runAbort(dependentSignal); + } +} + +// To run the abort steps for an AbortSignal signal +function runAbort(signal) { const event = new Event('abort', { [kTrustEvent]: true, }); signal.dispatchEvent(event); - signal[kDependantSignals]?.forEach((s) => { - const signalRef = s.deref(); - if (signalRef) abortSignal(signalRef, reason); - }); } class AbortController { @@ -425,7 +449,7 @@ class AbortController { function transferableAbortSignal(signal) { if (signal?.[kAborted] === undefined) throw new ERR_INVALID_ARG_TYPE('signal', 'AbortSignal', signal); - lazyMarkTransferMode(signal, false, true); + markTransferMode(signal, false, true); return signal; } diff --git a/lib/internal/assert/assertion_error.js b/lib/internal/assert/assertion_error.js index 5d8c45040af0fe..2f222a3d85ebf9 100644 --- a/lib/internal/assert/assertion_error.js +++ b/lib/internal/assert/assertion_error.js @@ -72,7 +72,11 @@ function inspectValue(val) { ); } -function createErrDiff(actual, expected, operator) { +function getErrorMessage(operator, message) { + return message || kReadableOperator[operator]; +} + +function createErrDiff(actual, expected, operator, message = '') { let other = ''; let res = ''; let end = ''; @@ -110,7 +114,7 @@ function createErrDiff(actual, expected, operator) { if ((typeof actual !== 'object' || actual === null) && (typeof expected !== 'object' || expected === null) && (actual !== 0 || expected !== 0)) { // -0 === +0 - return `${kReadableOperator[operator]}\n\n` + + return `${getErrorMessage(operator, message)}\n\n` + `${actualLines[0]} !== ${expectedLines[0]}\n`; } } else if (operator !== 'strictEqualObject') { @@ -184,8 +188,7 @@ function createErrDiff(actual, expected, operator) { let printedLines = 0; let identical = 0; - const msg = kReadableOperator[operator] + - `\n${colors.green}+ actual${colors.white} ${colors.red}- expected${colors.white}`; + const msg = `${getErrorMessage(operator, message)}\n${colors.green}+ actual${colors.white} ${colors.red}- expected${colors.white}`; const skippedMsg = ` ${colors.blue}...${colors.white} Lines skipped`; let lines = actualLines; @@ -337,7 +340,11 @@ class AssertionError extends Error { if (isErrorStackTraceLimitWritable()) Error.stackTraceLimit = 0; if (message != null) { - super(String(message)); + if (operator === 'deepStrictEqual' || operator === 'strictEqual') { + super(createErrDiff(actual, expected, operator, message)); + } else { + super(String(message)); + } } else { // Reset colors on each call to make sure we handle dynamically set environment // variables correct. diff --git a/lib/internal/blob.js b/lib/internal/blob.js index 4ff2b0e1e7051b..af26a1bb7536b4 100644 --- a/lib/internal/blob.js +++ b/lib/internal/blob.js @@ -54,6 +54,7 @@ const { lazyDOMException, } = require('internal/util'); const { inspect } = require('internal/util/inspect'); +const { convertToInt } = require('internal/webidl'); const { codes: { @@ -239,6 +240,12 @@ class Blob { slice(start = 0, end = this[kLength], contentType = '') { if (!isBlob(this)) throw new ERR_INVALID_THIS('Blob'); + + // Coerce values to int + const opts = { __proto__: null, signed: true }; + start = convertToInt('start', start, 64, opts); + end = convertToInt('end', end, 64, opts); + if (start < 0) { start = MathMax(this[kLength] + start, 0); } else { @@ -273,43 +280,21 @@ class Blob { if (!isBlob(this)) return PromiseReject(new ERR_INVALID_THIS('Blob')); - const { promise, resolve, reject } = createDeferredPromise(); - const reader = this[kHandle].getReader(); - const buffers = []; - const readNext = () => { - reader.pull((status, buffer) => { - if (status === 0) { - // EOS, concat & resolve - // buffer should be undefined here - resolve(concat(buffers)); - return; - } else if (status < 0) { - // The read could fail for many different reasons when reading - // from a non-memory resident blob part (e.g. file-backed blob). - // The error details the system error code. - const error = lazyDOMException('The blob could not be read', 'NotReadableError'); - reject(error); - return; - } - if (buffer !== undefined) - buffers.push(buffer); - queueMicrotask(() => readNext()); - }); - }; - readNext(); - return promise; + return arrayBuffer(this); } /** * @returns {Promise} */ - async text() { + text() { if (!isBlob(this)) - throw new ERR_INVALID_THIS('Blob'); + return PromiseReject(new ERR_INVALID_THIS('Blob')); dec ??= new TextDecoder(); - return dec.decode(await this.arrayBuffer()); + return PromisePrototypeThen( + arrayBuffer(this), + (buffer) => dec.decode(buffer)); } /** @@ -317,10 +302,10 @@ class Blob { */ bytes() { if (!isBlob(this)) - throw new ERR_INVALID_THIS('Blob'); + return PromiseReject(new ERR_INVALID_THIS('Blob')); return PromisePrototypeThen( - this.arrayBuffer(), + arrayBuffer(this), (buffer) => new Uint8Array(buffer)); } @@ -439,6 +424,7 @@ ObjectDefineProperties(Blob.prototype, { stream: kEnumerableProperty, text: kEnumerableProperty, arrayBuffer: kEnumerableProperty, + bytes: kEnumerableProperty, }); function resolveObjectURL(url) { @@ -490,6 +476,34 @@ function createBlobFromFilePath(path, options) { return res; } +function arrayBuffer(blob) { + const { promise, resolve, reject } = createDeferredPromise(); + const reader = blob[kHandle].getReader(); + const buffers = []; + const readNext = () => { + reader.pull((status, buffer) => { + if (status === 0) { + // EOS, concat & resolve + // buffer should be undefined here + resolve(concat(buffers)); + return; + } else if (status < 0) { + // The read could fail for many different reasons when reading + // from a non-memory resident blob part (e.g. file-backed blob). + // The error details the system error code. + const error = lazyDOMException('The blob could not be read', 'NotReadableError'); + reject(error); + return; + } + if (buffer !== undefined) + buffers.push(buffer); + queueMicrotask(() => readNext()); + }); + }; + readNext(); + return promise; +} + module.exports = { Blob, createBlob, diff --git a/lib/internal/bootstrap/node.js b/lib/internal/bootstrap/node.js index f7a62364b6107a..11d585bc5b3e47 100644 --- a/lib/internal/bootstrap/node.js +++ b/lib/internal/bootstrap/node.js @@ -288,6 +288,9 @@ const features = { get cached_builtins() { return binding.hasCachedBuiltins(); }, + get require_module() { + return getOptionValue('--experimental-require-module'); + }, }; ObjectDefineProperty(process, 'features', { @@ -319,6 +322,29 @@ ObjectDefineProperty(process, 'features', { } const { emitWarning, emitWarningSync } = require('internal/process/warning'); +const { getOptionValue } = require('internal/options'); + +let kTypeStrippingMode = process.config.variables.node_use_amaro ? null : false; +// This must be a getter, as getOptionValue does not work +// before bootstrapping. +ObjectDefineProperty(process.features, 'typescript', { + __proto__: null, + get() { + if (kTypeStrippingMode === null) { + if (getOptionValue('--experimental-transform-types')) { + kTypeStrippingMode = 'transform'; + } else if (getOptionValue('--experimental-strip-types')) { + kTypeStrippingMode = 'strip'; + } else { + kTypeStrippingMode = false; + } + } + return kTypeStrippingMode; + }, + configurable: true, + enumerable: true, +}); + process.emitWarning = emitWarning; internalBinding('process_methods').setEmitWarningSync(emitWarningSync); diff --git a/lib/internal/bootstrap/switches/does_own_process_state.js b/lib/internal/bootstrap/switches/does_own_process_state.js index 21c5a0609109b1..370da66a825f65 100644 --- a/lib/internal/bootstrap/switches/does_own_process_state.js +++ b/lib/internal/bootstrap/switches/does_own_process_state.js @@ -33,6 +33,7 @@ const { parseFileMode, validateArray, validateString, + validateUint32, } = require('internal/validators'); function wrapPosixCredentialSetters(credentials) { @@ -42,9 +43,6 @@ function wrapPosixCredentialSetters(credentials) { ERR_UNKNOWN_CREDENTIAL, }, } = require('internal/errors'); - const { - validateUint32, - } = require('internal/validators'); const { initgroups: _initgroups, diff --git a/lib/internal/bootstrap/switches/is_main_thread.js b/lib/internal/bootstrap/switches/is_main_thread.js index 9d709ca743bf29..74486ba5310b78 100644 --- a/lib/internal/bootstrap/switches/is_main_thread.js +++ b/lib/internal/bootstrap/switches/is_main_thread.js @@ -251,7 +251,7 @@ function getStdin() { // `stdin` starts out life in a paused state, but node doesn't // know yet. Explicitly to readStop() it to put it in the // not-reading state. - if (stdin._handle && stdin._handle.readStop) { + if (stdin._handle?.readStop) { stdin._handle.reading = false; stdin._readableState.reading = false; stdin._handle.readStop(); diff --git a/lib/internal/child_process.js b/lib/internal/child_process.js index 11cac3971d4ca6..a8d3aae56b04c1 100644 --- a/lib/internal/child_process.js +++ b/lib/internal/child_process.js @@ -855,7 +855,7 @@ function setupChannel(target, channel, serializationMode) { if (handle) { if (!this._handleQueue) this._handleQueue = []; - if (obj && obj.postSend) + if (obj?.postSend) obj.postSend(message, handle, options, callback, target); } @@ -871,7 +871,7 @@ function setupChannel(target, channel, serializationMode) { } } else { // Cleanup handle on error - if (obj && obj.postSend) + if (obj?.postSend) obj.postSend(message, handle, options, callback); if (!options.swallowErrors) { @@ -1116,8 +1116,8 @@ function spawnSync(options) { } } - result.stdout = result.output && result.output[1]; - result.stderr = result.output && result.output[2]; + result.stdout = result.output?.[1]; + result.stderr = result.output?.[2]; if (result.error) { result.error = new ErrnoException(result.error, 'spawnSync ' + options.file); diff --git a/lib/internal/cluster/child.js b/lib/internal/cluster/child.js index 501fd6d739ad19..bee474567b6351 100644 --- a/lib/internal/cluster/child.js +++ b/lib/internal/cluster/child.js @@ -122,7 +122,7 @@ cluster._getServer = function(obj, options, cb) { cluster.worker.state = 'listening'; const address = obj.address(); message.act = 'listening'; - message.port = (address && address.port) || options.port; + message.port = (address?.port) || options.port; send(message); }); }; diff --git a/lib/internal/console/constructor.js b/lib/internal/console/constructor.js index c892b91919da75..d3535fa5b6a7d1 100644 --- a/lib/internal/console/constructor.js +++ b/lib/internal/console/constructor.js @@ -8,7 +8,6 @@ const { ArrayIsArray, ArrayPrototypeForEach, ArrayPrototypePush, - ArrayPrototypeSome, ArrayPrototypeUnshift, Boolean, ErrorCaptureStackTrace, @@ -60,7 +59,6 @@ const { const { CHAR_UPPERCASE_C: kTraceCount, } = require('internal/constants'); -const { styleText } = require('util'); const kCounts = Symbol('counts'); const { time, timeLog, timeEnd, kNone } = require('internal/util/debuglog'); @@ -264,7 +262,7 @@ ObjectDefineProperties(Console.prototype, { [kWriteToConsole]: { __proto__: null, ...consolePropAttributes, - value: function(streamSymbol, string, color = '') { + value: function(streamSymbol, string) { const ignoreErrors = this._ignoreErrors; const groupIndent = internalIndentationMap.get(this) || ''; @@ -279,11 +277,6 @@ ObjectDefineProperties(Console.prototype, { } string = groupIndent + string; } - - if (color) { - string = styleText(color, string); - } - string += '\n'; if (ignoreErrors === false) return stream.write(string); @@ -382,15 +375,12 @@ const consoleMethods = { log(...args) { this[kWriteToConsole](kUseStdout, this[kFormatForStdout](args)); }, + + warn(...args) { - const color = (shouldColorize(args) && 'yellow') || ''; - this[kWriteToConsole](kUseStderr, this[kFormatForStderr](args), color); + this[kWriteToConsole](kUseStderr, this[kFormatForStderr](args)); }, - error(...args) { - const color = (shouldColorize(args) && 'red') || ''; - this[kWriteToConsole](kUseStderr, this[kFormatForStderr](args), color); - }, dir(object, options) { this[kWriteToConsole](kUseStdout, inspect(object, { @@ -621,12 +611,6 @@ const iterKey = '(iteration index)'; const isArray = (v) => ArrayIsArray(v) || isTypedArray(v) || isBuffer(v); -// TODO: remove string type check once the styleText supports objects -// Return true if all args are type string -const shouldColorize = (args) => { - return lazyUtilColors().hasColors && !ArrayPrototypeSome(args, (arg) => typeof arg !== 'string'); -}; - function noop() {} for (const method of ReflectOwnKeys(consoleMethods)) @@ -635,6 +619,7 @@ for (const method of ReflectOwnKeys(consoleMethods)) Console.prototype.debug = Console.prototype.log; Console.prototype.info = Console.prototype.log; Console.prototype.dirxml = Console.prototype.log; +Console.prototype.error = Console.prototype.warn; Console.prototype.groupCollapsed = Console.prototype.group; function initializeGlobalConsole(globalConsole) { diff --git a/lib/internal/crypto/aes.js b/lib/internal/crypto/aes.js index bb35d2b21de261..1d5d0bdb2380d8 100644 --- a/lib/internal/crypto/aes.js +++ b/lib/internal/crypto/aes.js @@ -245,7 +245,7 @@ async function aesGenerateKey(algorithm, extractable, keyUsages) { extractable); } -async function aesImportKey( +function aesImportKey( algorithm, format, keyData, @@ -266,6 +266,11 @@ async function aesImportKey( let keyObject; let length; switch (format) { + case 'KeyObject': { + validateKeyLength(keyData.symmetricKeySize * 8); + keyObject = keyData; + break; + } case 'raw': { validateKeyLength(keyData.byteLength * 8); keyObject = createSecretKey(keyData); @@ -295,7 +300,12 @@ async function aesImportKey( } const handle = new KeyObjectHandle(); - handle.initJwk(keyData); + try { + handle.initJwk(keyData); + } catch (err) { + throw lazyDOMException( + 'Invalid keyData', { name: 'DataError', cause: err }); + } ({ length } = handle.keyDetail({ })); validateKeyLength(length); diff --git a/lib/internal/crypto/cfrg.js b/lib/internal/crypto/cfrg.js index e7141c04b3f437..7bea4ed3544f87 100644 --- a/lib/internal/crypto/cfrg.js +++ b/lib/internal/crypto/cfrg.js @@ -17,6 +17,12 @@ const { kSignJobModeVerify, } = internalBinding('crypto'); +const { + codes: { + ERR_CRYPTO_INVALID_JWK, + }, +} = require('internal/errors'); + const { getUsagesUnion, hasAnyNotIn, @@ -191,7 +197,7 @@ function cfrgExportKey(key, format) { key[kKeyObject][kHandle])); } -async function cfrgImportKey( +function cfrgImportKey( format, keyData, algorithm, @@ -202,6 +208,11 @@ async function cfrgImportKey( let keyObject; const usagesSet = new SafeSet(keyUsages); switch (format) { + case 'KeyObject': { + verifyAcceptableCfrgKeyUse(name, keyData.type === 'public', usagesSet); + keyObject = keyData; + break; + } case 'spki': { verifyAcceptableCfrgKeyUse(name, true, usagesSet); try { @@ -277,22 +288,26 @@ async function cfrgImportKey( isPublic, usagesSet); - const publicKeyObject = createCFRGRawKey( - name, - Buffer.from(keyData.x, 'base64'), - true); - - if (isPublic) { - keyObject = publicKeyObject; - } else { - keyObject = createCFRGRawKey( + try { + const publicKeyObject = createCFRGRawKey( name, - Buffer.from(keyData.d, 'base64'), - false); - - if (!createPublicKey(keyObject).equals(publicKeyObject)) { - throw lazyDOMException('Invalid JWK', 'DataError'); + Buffer.from(keyData.x, 'base64'), + true); + + if (isPublic) { + keyObject = publicKeyObject; + } else { + keyObject = createCFRGRawKey( + name, + Buffer.from(keyData.d, 'base64'), + false); + + if (!createPublicKey(keyObject).equals(publicKeyObject)) { + throw new ERR_CRYPTO_INVALID_JWK(); + } } + } catch (err) { + throw lazyDOMException('Invalid keyData', { name: 'DataError', cause: err }); } break; } diff --git a/lib/internal/crypto/ec.js b/lib/internal/crypto/ec.js index 9f30f6e1a0ba09..3cd5d12cdb2bc2 100644 --- a/lib/internal/crypto/ec.js +++ b/lib/internal/crypto/ec.js @@ -149,7 +149,7 @@ function ecExportKey(key, format) { key[kKeyObject][kHandle])); } -async function ecImportKey( +function ecImportKey( format, keyData, algorithm, @@ -167,6 +167,11 @@ async function ecImportKey( let keyObject; const usagesSet = new SafeSet(keyUsages); switch (format) { + case 'KeyObject': { + verifyAcceptableEcKeyUse(name, keyData.type === 'public', usagesSet); + keyObject = keyData; + break; + } case 'spki': { verifyAcceptableEcKeyUse(name, true, usagesSet); try { @@ -240,9 +245,15 @@ async function ecImportKey( } const handle = new KeyObjectHandle(); - const type = handle.initJwk(keyData, namedCurve); + let type; + try { + type = handle.initJwk(keyData, namedCurve); + } catch (err) { + throw lazyDOMException( + 'Invalid keyData', { name: 'DataError', cause: err }); + } if (type === undefined) - throw lazyDOMException('Invalid JWK', 'DataError'); + throw lazyDOMException('Invalid keyData', 'DataError'); keyObject = type === kKeyTypePrivate ? new PrivateKeyObject(handle) : new PublicKeyObject(handle); diff --git a/lib/internal/crypto/hashnames.js b/lib/internal/crypto/hashnames.js index 54cd87b6ba30a4..49729034facc7c 100644 --- a/lib/internal/crypto/hashnames.js +++ b/lib/internal/crypto/hashnames.js @@ -70,7 +70,7 @@ function normalizeHashName(name, context = kHashContextNode) { if (typeof name !== 'string') return name; name = StringPrototypeToLowerCase(name); - const alias = kHashNames[name] && kHashNames[name][context]; + const alias = kHashNames[name]?.[context]; return alias || name; } diff --git a/lib/internal/crypto/keys.js b/lib/internal/crypto/keys.js index ee1d09581696d3..4738fbbbf208d1 100644 --- a/lib/internal/crypto/keys.js +++ b/lib/internal/crypto/keys.js @@ -6,6 +6,7 @@ const { ObjectDefineProperties, ObjectDefineProperty, ObjectSetPrototypeOf, + SafeSet, Symbol, SymbolToStringTag, Uint8Array, @@ -49,6 +50,8 @@ const { kKeyObject, getArrayBufferOrView, bigIntArrayToUnsignedBigInt, + normalizeAlgorithm, + hasAnyNotIn, } = require('internal/crypto/util'); const { @@ -65,6 +68,7 @@ const { const { customInspectSymbol: kInspect, kEnumerableProperty, + lazyDOMException, } = require('internal/util'); const { inspect } = require('internal/util/inspect'); @@ -88,7 +92,7 @@ for (const m of [[kKeyEncodingPKCS1, 'pkcs1'], [kKeyEncodingPKCS8, 'pkcs8'], encodingNames[m[0]] = m[1]; // Creating the KeyObject class is a little complicated due to inheritance -// and the fact that KeyObjects should be transferrable between threads, +// and the fact that KeyObjects should be transferable between threads, // which requires the KeyObject base class to be implemented in C++. // The creation requires a callback to make sure that the NativeKeyObject // base class cannot exist without the other KeyObject implementations. @@ -148,6 +152,8 @@ const { }, }); + let webidl; + class SecretKeyObject extends KeyObject { constructor(handle) { super('secret', handle); @@ -168,6 +174,51 @@ const { } return this[kHandle].export(); } + + toCryptoKey(algorithm, extractable, keyUsages) { + webidl ??= require('internal/crypto/webidl'); + algorithm = normalizeAlgorithm(webidl.converters.AlgorithmIdentifier(algorithm), 'importKey'); + extractable = webidl.converters.boolean(extractable); + keyUsages = webidl.converters['sequence'](keyUsages); + + let result; + switch (algorithm.name) { + case 'HMAC': + result = require('internal/crypto/mac') + .hmacImportKey('KeyObject', this, algorithm, extractable, keyUsages); + break; + case 'AES-CTR': + // Fall through + case 'AES-CBC': + // Fall through + case 'AES-GCM': + // Fall through + case 'AES-KW': + result = require('internal/crypto/aes') + .aesImportKey(algorithm, 'KeyObject', this, extractable, keyUsages); + break; + case 'HKDF': + // Fall through + case 'PBKDF2': + result = importGenericSecretKey( + algorithm, + 'KeyObject', + this, + extractable, + keyUsages); + break; + default: + throw lazyDOMException('Unrecognized algorithm name', 'NotSupportedError'); + } + + if (result.usages.length === 0) { + throw lazyDOMException( + `Usages cannot be empty when importing a ${result.type} key.`, + 'SyntaxError'); + } + + return result; + } } const kAsymmetricKeyType = Symbol('kAsymmetricKeyType'); @@ -209,6 +260,51 @@ const { return {}; } } + + toCryptoKey(algorithm, extractable, keyUsages) { + webidl ??= require('internal/crypto/webidl'); + algorithm = normalizeAlgorithm(webidl.converters.AlgorithmIdentifier(algorithm), 'importKey'); + extractable = webidl.converters.boolean(extractable); + keyUsages = webidl.converters['sequence'](keyUsages); + + let result; + switch (algorithm.name) { + case 'RSASSA-PKCS1-v1_5': + // Fall through + case 'RSA-PSS': + // Fall through + case 'RSA-OAEP': + result = require('internal/crypto/rsa') + .rsaImportKey('KeyObject', this, algorithm, extractable, keyUsages); + break; + case 'ECDSA': + // Fall through + case 'ECDH': + result = require('internal/crypto/ec') + .ecImportKey('KeyObject', this, algorithm, extractable, keyUsages); + break; + case 'Ed25519': + // Fall through + case 'Ed448': + // Fall through + case 'X25519': + // Fall through + case 'X448': + result = require('internal/crypto/cfrg') + .cfrgImportKey('KeyObject', this, algorithm, extractable, keyUsages); + break; + default: + throw lazyDOMException('Unrecognized algorithm name', 'NotSupportedError'); + } + + if (result.type === 'private' && result.usages.length === 0) { + throw lazyDOMException( + `Usages cannot be empty when importing a ${result.type} key.`, + 'SyntaxError'); + } + + return result; + } } class PublicKeyObject extends AsymmetricKeyObject { @@ -801,6 +897,68 @@ function isCryptoKey(obj) { return obj != null && obj[kKeyObject] !== undefined; } +function importGenericSecretKey( + { name, length }, + format, + keyData, + extractable, + keyUsages) { + const usagesSet = new SafeSet(keyUsages); + if (extractable) + throw lazyDOMException(`${name} keys are not extractable`, 'SyntaxError'); + + if (hasAnyNotIn(usagesSet, ['deriveKey', 'deriveBits'])) { + throw lazyDOMException( + `Unsupported key usage for a ${name} key`, + 'SyntaxError'); + } + + switch (format) { + case 'KeyObject': { + if (hasAnyNotIn(usagesSet, ['deriveKey', 'deriveBits'])) { + throw lazyDOMException( + `Unsupported key usage for a ${name} key`, + 'SyntaxError'); + } + + const checkLength = keyData.symmetricKeySize * 8; + + // The Web Crypto spec allows for key lengths that are not multiples of + // 8. We don't. Our check here is stricter than that defined by the spec + // in that we require that algorithm.length match keyData.length * 8 if + // algorithm.length is specified. + if (length !== undefined && length !== checkLength) { + throw lazyDOMException('Invalid key length', 'DataError'); + } + return new InternalCryptoKey(keyData, { name }, keyUsages, false); + } + case 'raw': { + if (hasAnyNotIn(usagesSet, ['deriveKey', 'deriveBits'])) { + throw lazyDOMException( + `Unsupported key usage for a ${name} key`, + 'SyntaxError'); + } + + const checkLength = keyData.byteLength * 8; + + // The Web Crypto spec allows for key lengths that are not multiples of + // 8. We don't. Our check here is stricter than that defined by the spec + // in that we require that algorithm.length match keyData.length * 8 if + // algorithm.length is specified. + if (length !== undefined && length !== checkLength) { + throw lazyDOMException('Invalid key length', 'DataError'); + } + + const keyObject = createSecretKey(keyData); + return new InternalCryptoKey(keyObject, { name }, keyUsages, false); + } + } + + throw lazyDOMException( + `Unable to import ${name} key with format ${format}`, + 'NotSupportedError'); +} + module.exports = { // Public API. createSecretKey, @@ -822,4 +980,5 @@ module.exports = { PrivateKeyObject, isKeyObject, isCryptoKey, + importGenericSecretKey, }; diff --git a/lib/internal/crypto/mac.js b/lib/internal/crypto/mac.js index 91f58a85a9f6dd..356ce5a8d79a31 100644 --- a/lib/internal/crypto/mac.js +++ b/lib/internal/crypto/mac.js @@ -82,7 +82,7 @@ function getAlgorithmName(hash) { } } -async function hmacImportKey( +function hmacImportKey( format, keyData, algorithm, @@ -96,6 +96,24 @@ async function hmacImportKey( } let keyObject; switch (format) { + case 'KeyObject': { + const checkLength = keyData.symmetricKeySize * 8; + + if (checkLength === 0 || algorithm.length === 0) + throw lazyDOMException('Zero-length key is not supported', 'DataError'); + + // The Web Crypto spec allows for key lengths that are not multiples of + // 8. We don't. Our check here is stricter than that defined by the spec + // in that we require that algorithm.length match keyData.length * 8 if + // algorithm.length is specified. + if (algorithm.length !== undefined && + algorithm.length !== checkLength) { + throw lazyDOMException('Invalid key length', 'DataError'); + } + + keyObject = keyData; + break; + } case 'raw': { const checkLength = keyData.byteLength * 8; @@ -145,7 +163,12 @@ async function hmacImportKey( } const handle = new KeyObjectHandle(); - handle.initJwk(keyData); + try { + handle.initJwk(keyData); + } catch (err) { + throw lazyDOMException( + 'Invalid keyData', { name: 'DataError', cause: err }); + } keyObject = new SecretKeyObject(handle); break; } diff --git a/lib/internal/crypto/rsa.js b/lib/internal/crypto/rsa.js index 3b338ce8762135..99c12ebbe3b83f 100644 --- a/lib/internal/crypto/rsa.js +++ b/lib/internal/crypto/rsa.js @@ -200,7 +200,7 @@ function rsaExportKey(key, format) { kRsaVariants[key.algorithm.name])); } -async function rsaImportKey( +function rsaImportKey( format, keyData, algorithm, @@ -209,6 +209,11 @@ async function rsaImportKey( const usagesSet = new SafeSet(keyUsages); let keyObject; switch (format) { + case 'KeyObject': { + verifyAcceptableRsaKeyUse(algorithm.name, keyData.type === 'public', usagesSet); + keyObject = keyData; + break; + } case 'spki': { verifyAcceptableRsaKeyUse(algorithm.name, true, usagesSet); try { @@ -275,9 +280,15 @@ async function rsaImportKey( } const handle = new KeyObjectHandle(); - const type = handle.initJwk(keyData); + let type; + try { + type = handle.initJwk(keyData); + } catch (err) { + throw lazyDOMException( + 'Invalid keyData', { name: 'DataError', cause: err }); + } if (type === undefined) - throw lazyDOMException('Invalid JWK', 'DataError'); + throw lazyDOMException('Invalid keyData', 'DataError'); keyObject = type === kKeyTypePrivate ? new PrivateKeyObject(handle) : diff --git a/lib/internal/crypto/webcrypto.js b/lib/internal/crypto/webcrypto.js index 8cd27717532344..4758c2c1c7133a 100644 --- a/lib/internal/crypto/webcrypto.js +++ b/lib/internal/crypto/webcrypto.js @@ -8,7 +8,6 @@ const { ObjectDefineProperty, ReflectApply, ReflectConstruct, - SafeSet, StringPrototypeRepeat, SymbolToStringTag, } = primordials; @@ -36,8 +35,7 @@ const { const { CryptoKey, - InternalCryptoKey, - createSecretKey, + importGenericSecretKey, } = require('internal/crypto/keys'); const { @@ -46,7 +44,6 @@ const { const { getBlockSize, - hasAnyNotIn, normalizeAlgorithm, normalizeHashName, validateMaxBufferLength, @@ -526,50 +523,6 @@ async function exportKey(format, key) { 'Export format is unsupported', 'NotSupportedError'); } -async function importGenericSecretKey( - { name, length }, - format, - keyData, - extractable, - keyUsages) { - const usagesSet = new SafeSet(keyUsages); - if (extractable) - throw lazyDOMException(`${name} keys are not extractable`, 'SyntaxError'); - - if (hasAnyNotIn(usagesSet, ['deriveKey', 'deriveBits'])) { - throw lazyDOMException( - `Unsupported key usage for a ${name} key`, - 'SyntaxError'); - } - - switch (format) { - case 'raw': { - if (hasAnyNotIn(usagesSet, ['deriveKey', 'deriveBits'])) { - throw lazyDOMException( - `Unsupported key usage for a ${name} key`, - 'SyntaxError'); - } - - const checkLength = keyData.byteLength * 8; - - // The Web Crypto spec allows for key lengths that are not multiples of - // 8. We don't. Our check here is stricter than that defined by the spec - // in that we require that algorithm.length match keyData.length * 8 if - // algorithm.length is specified. - if (length !== undefined && length !== checkLength) { - throw lazyDOMException('Invalid key length', 'DataError'); - } - - const keyObject = createSecretKey(keyData); - return new InternalCryptoKey(keyObject, { name }, keyUsages, false); - } - } - - throw lazyDOMException( - `Unable to import ${name} key with format ${format}`, - 'NotSupportedError'); -} - async function importKey( format, keyData, @@ -611,13 +564,13 @@ async function importKey( case 'RSA-PSS': // Fall through case 'RSA-OAEP': - result = await require('internal/crypto/rsa') + result = require('internal/crypto/rsa') .rsaImportKey(format, keyData, algorithm, extractable, keyUsages); break; case 'ECDSA': // Fall through case 'ECDH': - result = await require('internal/crypto/ec') + result = require('internal/crypto/ec') .ecImportKey(format, keyData, algorithm, extractable, keyUsages); break; case 'Ed25519': @@ -627,11 +580,11 @@ async function importKey( case 'X25519': // Fall through case 'X448': - result = await require('internal/crypto/cfrg') + result = require('internal/crypto/cfrg') .cfrgImportKey(format, keyData, algorithm, extractable, keyUsages); break; case 'HMAC': - result = await require('internal/crypto/mac') + result = require('internal/crypto/mac') .hmacImportKey(format, keyData, algorithm, extractable, keyUsages); break; case 'AES-CTR': @@ -641,13 +594,13 @@ async function importKey( case 'AES-GCM': // Fall through case 'AES-KW': - result = await require('internal/crypto/aes') + result = require('internal/crypto/aes') .aesImportKey(algorithm, format, keyData, extractable, keyUsages); break; case 'HKDF': // Fall through case 'PBKDF2': - result = await importGenericSecretKey( + result = importGenericSecretKey( algorithm, format, keyData, diff --git a/lib/internal/crypto/webidl.js b/lib/internal/crypto/webidl.js index ee9b7b69b73077..394333033a0b25 100644 --- a/lib/internal/crypto/webidl.js +++ b/lib/internal/crypto/webidl.js @@ -80,7 +80,7 @@ function type(V) { case 'object': // Fall through case 'function': // Fall through default: - // Per ES spec, typeof returns an implemention-defined value that is not + // Per ES spec, typeof returns an implementation-defined value that is not // any of the existing ones for uncallable non-standard exotic objects. // Yet Type() which the Web IDL spec depends on returns Object for such // cases. So treat the default case as an object. diff --git a/lib/internal/crypto/x509.js b/lib/internal/crypto/x509.js index 30005390a4571e..5bc88d7b41c9ab 100644 --- a/lib/internal/crypto/x509.js +++ b/lib/internal/crypto/x509.js @@ -135,6 +135,8 @@ class X509Certificate { infoAccess: this.infoAccess, validFrom: this.validFrom, validTo: this.validTo, + validFromDate: this.validFromDate, + validToDate: this.validToDate, fingerprint: this.fingerprint, fingerprint256: this.fingerprint256, fingerprint512: this.fingerprint512, @@ -220,6 +222,24 @@ class X509Certificate { return value; } + get validFromDate() { + let value = this[kInternalState].get('validFromDate'); + if (value === undefined) { + value = this[kHandle].validFromDate(); + this[kInternalState].set('validFromDate', value); + } + return value; + } + + get validToDate() { + let value = this[kInternalState].get('validToDate'); + if (value === undefined) { + value = this[kHandle].validToDate(); + this[kInternalState].set('validToDate', value); + } + return value; + } + get fingerprint() { let value = this[kInternalState].get('fingerprint'); if (value === undefined) { diff --git a/lib/internal/debugger/inspect_repl.js b/lib/internal/debugger/inspect_repl.js index b4f454152dc438..8dc5e9823cae6e 100644 --- a/lib/internal/debugger/inspect_repl.js +++ b/lib/internal/debugger/inspect_repl.js @@ -694,7 +694,7 @@ function createRepl(inspector) { function handleBreakpointResolved({ breakpointId, location }) { const script = knownScripts[location.scriptId]; - const scriptUrl = script && script.url; + const scriptUrl = script?.url; if (scriptUrl) { ObjectAssign(location, { scriptUrl }); } @@ -733,7 +733,7 @@ function createRepl(inspector) { function setBreakpoint(script, line, condition, silent) { function registerBreakpoint({ breakpointId, actualLocation }) { handleBreakpointResolved({ breakpointId, location: actualLocation }); - if (actualLocation && actualLocation.scriptId) { + if (actualLocation?.scriptId) { if (!silent) return getSourceSnippet(actualLocation, 5); } else { print(`Warning: script '${script}' was not loaded yet.`); diff --git a/lib/internal/dns/callback_resolver.js b/lib/internal/dns/callback_resolver.js index c7bf1ce1d69cfd..fdafd310ad10a5 100644 --- a/lib/internal/dns/callback_resolver.js +++ b/lib/internal/dns/callback_resolver.js @@ -67,7 +67,7 @@ function resolver(bindingName) { req.callback = callback; req.hostname = name; req.oncomplete = onresolve; - req.ttl = !!(options && options.ttl); + req.ttl = !!(options?.ttl); const err = this._handle[bindingName](req, name); if (err) throw new DNSException(err, bindingName, name); if (hasObserver('dns')) { diff --git a/lib/internal/dns/promises.js b/lib/internal/dns/promises.js index 59e5252f7e14d5..443d21dc2798ac 100644 --- a/lib/internal/dns/promises.js +++ b/lib/internal/dns/promises.js @@ -336,7 +336,7 @@ function resolver(bindingName) { function query(name, options) { validateString(name, 'name'); - const ttl = !!(options && options.ttl); + const ttl = !!(options?.ttl); return createResolverPromise(this, bindingName, name, ttl); } diff --git a/lib/internal/dns/utils.js b/lib/internal/dns/utils.js index e8c8accc501e0d..7d9e22d1c2458f 100644 --- a/lib/internal/dns/utils.js +++ b/lib/internal/dns/utils.js @@ -59,7 +59,7 @@ const kSerializeResolver = Symbol('dns:resolver:serialize'); const kDeserializeResolver = Symbol('dns:resolver:deserialize'); const kSnapshotStates = Symbol('dns:resolver:config'); const kInitializeHandle = Symbol('dns:resolver:initializeHandle'); -const kSetServersInteral = Symbol('dns:resolver:setServers'); +const kSetServersInternal = Symbol('dns:resolver:setServers'); // Resolver instances correspond 1:1 to c-ares channels. @@ -139,10 +139,10 @@ class ResolverBase { throw new ERR_INVALID_IP_ADDRESS(serv); }); - this[kSetServersInteral](newSet, servers); + this[kSetServersInternal](newSet, servers); } - [kSetServersInteral](newSet, servers) { + [kSetServersInternal](newSet, servers) { const orig = ArrayPrototypeMap(this._handle.getServers() || [], (val) => { val.unshift(isIP(val[0])); return val; @@ -194,7 +194,7 @@ class ResolverBase { this._handle.setLocalAddress(ipv4, ipv6); } if (servers) { - this[kSetServersInteral](servers, servers); + this[kSetServersInternal](servers, servers); } } } diff --git a/lib/internal/error_serdes.js b/lib/internal/error_serdes.js index 89f24eab9b1989..e53291c6b770fc 100644 --- a/lib/internal/error_serdes.js +++ b/lib/internal/error_serdes.js @@ -85,7 +85,7 @@ function GetConstructors(object) { current !== null; current = ObjectGetPrototypeOf(current)) { const desc = ObjectGetOwnPropertyDescriptor(current, 'constructor'); - if (desc && desc.value) { + if (desc?.value) { ObjectDefineProperty(constructors, constructors.length, { __proto__: null, value: desc.value, enumerable: true, @@ -98,7 +98,7 @@ function GetConstructors(object) { function GetName(object) { const desc = ObjectGetOwnPropertyDescriptor(object, 'name'); - return desc && desc.value; + return desc?.value; } let internalUtilInspect; diff --git a/lib/internal/errors.js b/lib/internal/errors.js index 855ba65c4bb100..a14c0f9511251e 100644 --- a/lib/internal/errors.js +++ b/lib/internal/errors.js @@ -1528,6 +1528,7 @@ E('ERR_INVALID_SYNC_FORK_INPUT', TypeError); E('ERR_INVALID_THIS', 'Value of "this" must be of type %s', TypeError); E('ERR_INVALID_TUPLE', '%s must be an iterable %s tuple', TypeError); +E('ERR_INVALID_TYPESCRIPT_SYNTAX', '%s', SyntaxError); E('ERR_INVALID_URI', 'URI malformed', URIError); E('ERR_INVALID_URL', function(input, base = null) { this.input = input; @@ -1648,6 +1649,9 @@ E('ERR_PARSE_ARGS_UNKNOWN_OPTION', (option, allowPositionals) => { E('ERR_PERFORMANCE_INVALID_TIMESTAMP', '%d is not a valid timestamp', TypeError); E('ERR_PERFORMANCE_MEASURE_INVALID_OPTIONS', '%s', TypeError); +E('ERR_QUIC_CONNECTION_FAILED', 'QUIC connection failed', Error); +E('ERR_QUIC_ENDPOINT_CLOSED', 'QUIC endpoint closed: %s (%d)', Error); +E('ERR_QUIC_OPEN_STREAM_FAILED', 'Failed to open QUIC stream', Error); E('ERR_REQUIRE_CYCLE_MODULE', '%s', Error); E('ERR_REQUIRE_ESM', function(filename, hasEsmSyntax, parentPath = null, packageJsonPath = null) { @@ -1708,6 +1712,8 @@ E('ERR_SOCKET_CONNECTION_TIMEOUT', E('ERR_SOCKET_DGRAM_IS_CONNECTED', 'Already connected', Error); E('ERR_SOCKET_DGRAM_NOT_CONNECTED', 'Not connected', Error); E('ERR_SOCKET_DGRAM_NOT_RUNNING', 'Not running', Error); +E('ERR_SOURCE_MAP_CORRUPT', `The source map for '%s' does not exist or is corrupt.`, Error); +E('ERR_SOURCE_MAP_MISSING_SOURCE', `Cannot find '%s' imported from the source map for '%s'`, Error); E('ERR_SRI_PARSE', 'Subresource Integrity string %j had an unexpected %j at position %d', SyntaxError); @@ -1889,4 +1895,3 @@ E('ERR_WORKER_UNSERIALIZABLE_ERROR', 'Serializing an uncaught exception failed', Error); E('ERR_WORKER_UNSUPPORTED_OPERATION', '%s is not supported in workers', TypeError); -E('ERR_ZLIB_INITIALIZATION_FAILED', 'Initialization failed', Error); diff --git a/lib/internal/event_target.js b/lib/internal/event_target.js index ecdc1bbba054a3..aa0203b930fbe8 100644 --- a/lib/internal/event_target.js +++ b/lib/internal/event_target.js @@ -111,14 +111,14 @@ class Event { * composed?: boolean, * }} [options] */ - constructor(type, options = kEmptyObject) { + constructor(type, options = undefined) { if (arguments.length === 0) throw new ERR_MISSING_ARGS('type'); - validateObject(options, 'options'); - const { bubbles, cancelable, composed } = options; - this.#cancelable = !!cancelable; - this.#bubbles = !!bubbles; - this.#composed = !!composed; + if (options != null) + validateObject(options, 'options'); + this.#bubbles = !!options?.bubbles; + this.#cancelable = !!options?.cancelable; + this.#composed = !!options?.composed; this[kType] = `${type}`; if (options?.[kTrustEvent]) { @@ -196,7 +196,7 @@ class Event { get currentTarget() { if (!isEvent(this)) throw new ERR_INVALID_THIS('Event'); - return this[kTarget]; + return this[kIsBeingDispatched] ? this[kTarget] : null; } /** diff --git a/lib/internal/fs/glob.js b/lib/internal/fs/glob.js index be3c48da755520..d4b73a0ffca46d 100644 --- a/lib/internal/fs/glob.js +++ b/lib/internal/fs/glob.js @@ -337,7 +337,7 @@ class Glob { const subPatterns = new SafeSet(); const nSymlinks = new SafeSet(); for (const index of pattern.indexes) { - // For each child, chek potential patterns + // For each child, check potential patterns if (this.#cache.seen(entryPath, pattern, index) || this.#cache.seen(entryPath, pattern, index + 1)) { return; } @@ -365,7 +365,7 @@ class Glob { // If next pattern is the last one, add to results this.#results.add(entryPath); } else if (nextMatches && entry.isDirectory()) { - // Pattern mached, meaning two patterns forward + // Pattern matched, meaning two patterns forward // are also potential patterns // e.g **/b/c when entry is a/b - add c to potential patterns subPatterns.add(index + 2); @@ -532,7 +532,7 @@ class Glob { const subPatterns = new SafeSet(); const nSymlinks = new SafeSet(); for (const index of pattern.indexes) { - // For each child, chek potential patterns + // For each child, check potential patterns if (this.#cache.seen(entryPath, pattern, index) || this.#cache.seen(entryPath, pattern, index + 1)) { return; } @@ -566,7 +566,7 @@ class Glob { yield this.#withFileTypes ? entry : entryPath; } } else if (nextMatches && entry.isDirectory()) { - // Pattern mached, meaning two patterns forward + // Pattern matched, meaning two patterns forward // are also potential patterns // e.g **/b/c when entry is a/b - add c to potential patterns subPatterns.add(index + 2); diff --git a/lib/internal/fs/promises.js b/lib/internal/fs/promises.js index 88473afc1a5220..2f37c8d7f8b6f9 100644 --- a/lib/internal/fs/promises.js +++ b/lib/internal/fs/promises.js @@ -914,7 +914,7 @@ async function readdirRecursive(originalPath, options) { const { 0: path, 1: readdir } = ArrayPrototypePop(queue); for (const ent of readdir) { const direntPath = pathModule.join(path, ent); - const stat = binding.internalModuleStat(direntPath); + const stat = binding.internalModuleStat(binding, direntPath); ArrayPrototypePush( result, pathModule.relative(originalPath, direntPath), diff --git a/lib/internal/http2/util.js b/lib/internal/http2/util.js index 77267a52fdc4bc..d1fea4a49f0a18 100644 --- a/lib/internal/http2/util.js +++ b/lib/internal/http2/util.js @@ -216,7 +216,9 @@ const IDX_OPTIONS_MAX_OUTSTANDING_PINGS = 6; const IDX_OPTIONS_MAX_OUTSTANDING_SETTINGS = 7; const IDX_OPTIONS_MAX_SESSION_MEMORY = 8; const IDX_OPTIONS_MAX_SETTINGS = 9; -const IDX_OPTIONS_FLAGS = 10; +const IDX_OPTIONS_STREAM_RESET_RATE = 10; +const IDX_OPTIONS_STREAM_RESET_BURST = 11; +const IDX_OPTIONS_FLAGS = 12; function updateOptionsBuffer(options) { let flags = 0; @@ -270,6 +272,16 @@ function updateOptionsBuffer(options) { optionsBuffer[IDX_OPTIONS_MAX_SETTINGS] = MathMax(1, options.maxSettings); } + if (typeof options.streamResetRate === 'number') { + flags |= (1 << IDX_OPTIONS_STREAM_RESET_RATE); + optionsBuffer[IDX_OPTIONS_STREAM_RESET_RATE] = + MathMax(1, options.streamResetRate); + } + if (typeof options.streamResetBurst === 'number') { + flags |= (1 << IDX_OPTIONS_STREAM_RESET_BURST); + optionsBuffer[IDX_OPTIONS_STREAM_RESET_BURST] = + MathMax(1, options.streamResetBurst); + } optionsBuffer[IDX_OPTIONS_FLAGS] = flags; } diff --git a/lib/internal/idna.js b/lib/internal/idna.js deleted file mode 100644 index ad37bb48efc605..00000000000000 --- a/lib/internal/idna.js +++ /dev/null @@ -1,4 +0,0 @@ -'use strict'; - -const { toASCII, toUnicode } = internalBinding('encoding_binding'); -module.exports = { toASCII, toUnicode }; diff --git a/lib/internal/main/mksnapshot.js b/lib/internal/main/mksnapshot.js index 0d00acd6a168ba..63df8c50087aad 100644 --- a/lib/internal/main/mksnapshot.js +++ b/lib/internal/main/mksnapshot.js @@ -23,10 +23,10 @@ const { emitWarningSync } = require('internal/process/warning'); const { initializeCallbacks, namespace: { - addSerializeCallback, addDeserializeCallback, isBuildingSnapshot, }, + addAfterUserSerializeCallback, } = require('internal/v8/startup_snapshot'); const { @@ -34,6 +34,7 @@ const { } = require('internal/process/pre_execution'); const path = require('path'); +const { getOptionValue } = require('internal/options'); const supportedModules = new SafeSet(new SafeArrayIterator([ // '_http_agent', @@ -140,22 +141,56 @@ function main() { prepareMainThreadExecution(false, false); initializeCallbacks(); - let stackTraceLimitDesc; - addDeserializeCallback(() => { - if (stackTraceLimitDesc !== undefined) { - ObjectDefineProperty(Error, 'stackTraceLimit', stackTraceLimitDesc); + // In a context created for building snapshots, V8 does not install Error.stackTraceLimit and as + // a result, if an error is created during the snapshot building process, error.stack would be + // undefined. To prevent users from tripping over this, install Error.stackTraceLimit based on + // --stack-trace-limit by ourselves (which defaults to 10). + // See https://chromium-review.googlesource.com/c/v8/v8/+/3319481 + const initialStackTraceLimitDesc = { + value: getOptionValue('--stack-trace-limit'), + configurable: true, + writable: true, + enumerable: true, + __proto__: null, + }; + ObjectDefineProperty(Error, 'stackTraceLimit', initialStackTraceLimitDesc); + + let stackTraceLimitDescToRestore; + // Error.stackTraceLimit needs to be removed during serialization, because when V8 deserializes + // the snapshot, it expects Error.stackTraceLimit to be unset so that it can install it as a new property + // using the value of --stack-trace-limit. + addAfterUserSerializeCallback(() => { + const desc = ObjectGetOwnPropertyDescriptor(Error, 'stackTraceLimit'); + + // If it's modified by users, emit a warning. + if (desc && ( + desc.value !== initialStackTraceLimitDesc.value || + desc.configurable !== initialStackTraceLimitDesc.configurable || + desc.writable !== initialStackTraceLimitDesc.writable || + desc.enumerable !== initialStackTraceLimitDesc.enumerable + )) { + process._rawDebug('Error.stackTraceLimit has been modified by the snapshot builder script.'); + // We want to use null-prototype objects to not rely on globally mutable + // %Object.prototype%. + if (desc.configurable) { + stackTraceLimitDescToRestore = desc; + ObjectSetPrototypeOf(stackTraceLimitDescToRestore, null); + process._rawDebug('It will be preserved after snapshot deserialization and override ' + + '--stack-trace-limit passed into the deserialized application.\n' + + 'To allow --stack-trace-limit override in the deserialized application, ' + + 'delete Error.stackTraceLimit.'); + } else { + process._rawDebug('It is not configurable and will crash the application upon deserialization.\n' + + 'To fix the error, make Error.stackTraceLimit configurable.'); + } } + + delete Error.stackTraceLimit; }); - addSerializeCallback(() => { - stackTraceLimitDesc = ObjectGetOwnPropertyDescriptor(Error, 'stackTraceLimit'); - if (stackTraceLimitDesc !== undefined) { - // We want to use null-prototype objects to not rely on globally mutable - // %Object.prototype%. - ObjectSetPrototypeOf(stackTraceLimitDesc, null); - process._rawDebug('Deleting Error.stackTraceLimit from the snapshot. ' + - 'It will be re-installed after deserialization'); - delete Error.stackTraceLimit; + addDeserializeCallback(() => { + if (stackTraceLimitDescToRestore) { + ObjectDefineProperty(Error, 'stackTraceLimit', stackTraceLimitDescToRestore); } }); diff --git a/lib/internal/main/run_main_module.js b/lib/internal/main/run_main_module.js index ccab1595685ada..9b79410bdad88d 100644 --- a/lib/internal/main/run_main_module.js +++ b/lib/internal/main/run_main_module.js @@ -9,14 +9,20 @@ const { markBootstrapComplete, } = require('internal/process/pre_execution'); const { getOptionValue } = require('internal/options'); +const { emitExperimentalWarning } = require('internal/util'); -const mainEntry = prepareMainThreadExecution(true); +const isEntryURL = getOptionValue('--entry-url'); +const mainEntry = prepareMainThreadExecution(!isEntryURL); markBootstrapComplete(); // Necessary to reset RegExp statics before user code runs. RegExpPrototypeExec(/^/, ''); +if (isEntryURL) { + emitExperimentalWarning('--entry-url'); +} + if (getOptionValue('--experimental-default-type') === 'module') { require('internal/modules/run_main').executeUserEntryPoint(mainEntry); } else { diff --git a/lib/internal/main/test_runner.js b/lib/internal/main/test_runner.js index b1f69b07771ac6..87c7dca048b72d 100644 --- a/lib/internal/main/test_runner.js +++ b/lib/internal/main/test_runner.js @@ -31,8 +31,8 @@ if (isUsingInspector() && options.isolation === 'process') { options.globPatterns = ArrayPrototypeSlice(process.argv, 1); debug('test runner configuration:', options); -run(options).on('test:fail', (data) => { - if (data.todo === undefined || data.todo === false) { +run(options).on('test:summary', (data) => { + if (!data.success) { process.exitCode = kGenericUserError; } }); diff --git a/lib/internal/main/watch_mode.js b/lib/internal/main/watch_mode.js index 9be47ed114ae2b..2fb689bd7d5abe 100644 --- a/lib/internal/main/watch_mode.js +++ b/lib/internal/main/watch_mode.js @@ -113,7 +113,10 @@ function reportGracefulTermination() { return () => { clearTimeout(graceTimer); if (reported) { - process.stdout.write(`${clear}${green}Gracefully restarted ${kCommandStr}${white}\n`); + if (!kPreserveOutput) { + process.stdout.write(clear); + } + process.stdout.write(`${green}Gracefully restarted ${kCommandStr}${white}\n`); } }; } diff --git a/lib/internal/main/worker_thread.js b/lib/internal/main/worker_thread.js index fe516ae38297ce..f8c410b5b25cb0 100644 --- a/lib/internal/main/worker_thread.js +++ b/lib/internal/main/worker_thread.js @@ -48,6 +48,8 @@ const { setupMainThreadPort } = require('internal/worker/messaging'); const { onGlobalUncaughtException, + evalScript, + evalModuleEntryPoint, } = require('internal/process/execution'); let debug = require('internal/util/debuglog').debuglog('worker', (fn) => { @@ -55,6 +57,7 @@ let debug = require('internal/util/debuglog').debuglog('worker', (fn) => { }); const assert = require('internal/assert'); +const { getOptionValue } = require('internal/options'); const { exitCodes: { kGenericUserError } } = internalBinding('errors'); prepareWorkerThreadExecution(); @@ -152,8 +155,7 @@ port.on('message', (message) => { break; } - case 'classic': { - const { evalScript } = require('internal/process/execution'); + case 'classic': if (getOptionValue('--input-type') !== 'module') { const name = '[worker eval]'; // This is necessary for CJS module compilation. // TODO: pass this with something really internal. @@ -168,8 +170,8 @@ port.on('message', (message) => { break; } + // eslint-disable-next-line no-fallthrough case 'module': { - const { evalModuleEntryPoint } = require('internal/process/execution'); PromisePrototypeThen(evalModuleEntryPoint(filename), undefined, (e) => { workerOnGlobalUncaughtException(e, true); }); diff --git a/lib/internal/modules/cjs/loader.js b/lib/internal/modules/cjs/loader.js index 451b7c2195e7ad..b0210e4d96348c 100644 --- a/lib/internal/modules/cjs/loader.js +++ b/lib/internal/modules/cjs/loader.js @@ -152,6 +152,7 @@ const { setHasStartedUserCJSExecution, stripBOM, toRealPath, + stripTypeScriptTypes, } = require('internal/modules/helpers'); const packageJsonReader = require('internal/modules/package_json_reader'); const { getOptionValue, getEmbedderOptions } = require('internal/options'); @@ -234,9 +235,9 @@ function stat(filename) { const result = statCache.get(filename); if (result !== undefined) { return result; } } - const result = internalFsBinding.internalModuleStat(filename); + const result = internalFsBinding.internalModuleStat(internalFsBinding, filename); if (statCache !== null && result >= 0) { - // Only set cache when `internalModuleStat(filename)` succeeds. + // Only set cache when `internalModuleStat(internalFsBinding, filename)` succeeds. statCache.set(filename, result); } return result; @@ -1362,7 +1363,6 @@ function loadESMFromCJS(mod, filename) { if (isUnderNodeModules(filename)) { throw new ERR_UNSUPPORTED_NODE_MODULES_TYPE_STRIPPING(filename); } - const { stripTypeScriptTypes } = require('internal/modules/helpers'); source = stripTypeScriptTypes(source, filename); } const cascadedLoader = require('internal/modules/esm/loader').getOrInitializeCascadedLoader(); @@ -1370,7 +1370,7 @@ function loadESMFromCJS(mod, filename) { if (isMain) { require('internal/modules/run_main').runEntryPointWithESMLoader((cascadedLoader) => { const mainURL = pathToFileURL(filename).href; - cascadedLoader.import(mainURL, undefined, { __proto__: null }, true); + return cascadedLoader.import(mainURL, undefined, { __proto__: null }, true); }); // ESM won't be accessible via process.mainModule. setOwnProperty(process, 'mainModule', undefined); @@ -1576,7 +1576,6 @@ function loadCTS(module, filename) { throw new ERR_UNSUPPORTED_NODE_MODULES_TYPE_STRIPPING(filename); } const source = getMaybeCachedSource(module, filename); - const { stripTypeScriptTypes } = require('internal/modules/helpers'); const code = stripTypeScriptTypes(source, filename); module._compile(code, filename, 'commonjs'); } @@ -1592,7 +1591,6 @@ function loadTS(module, filename) { } // If already analyzed the source, then it will be cached. const source = getMaybeCachedSource(module, filename); - const { stripTypeScriptTypes } = require('internal/modules/helpers'); const content = stripTypeScriptTypes(source, filename); let format; const pkg = packageJsonReader.getNearestParentPackageJSON(filename); diff --git a/lib/internal/modules/esm/fetch_module.js b/lib/internal/modules/esm/fetch_module.js index b6fd294eb2b624..07904e685becf8 100644 --- a/lib/internal/modules/esm/fetch_module.js +++ b/lib/internal/modules/esm/fetch_module.js @@ -41,14 +41,14 @@ const cacheForGET = new SafeMap(); // [1] The V8 snapshot doesn't like some C++ APIs to be loaded eagerly. Do it // lazily/at runtime and not top level of an internal module. -// [2] Creating a new agent instead of using the gloabl agent improves +// [2] Creating a new agent instead of using the global agent improves // performance and precludes the agent becoming tainted. /** @type {import('https').Agent} The Cached HTTP Agent for **secure** HTTP requests. */ let HTTPSAgent; /** * Make a HTTPs GET request (handling agent setup if needed, caching the agent to avoid - * redudant instantiations). + * redundant instantiations). * @param {Parameters[0]} input - The URI to fetch. * @param {Parameters[1]} options - See https.get() options. */ @@ -67,7 +67,7 @@ function HTTPSGet(input, options) { let HTTPAgent; /** * Make a HTTP GET request (handling agent setup if needed, caching the agent to avoid - * redudant instantiations). + * redundant instantiations). * @param {Parameters[0]} input - The URI to fetch. * @param {Parameters[1]} options - See http.get() options. */ diff --git a/lib/internal/modules/esm/get_format.js b/lib/internal/modules/esm/get_format.js index a89446df710a94..26d0bace6cdd39 100644 --- a/lib/internal/modules/esm/get_format.js +++ b/lib/internal/modules/esm/get_format.js @@ -91,7 +91,7 @@ function underNodeModules(url) { let typelessPackageJsonFilesWarnedAbout; function warnTypelessPackageJsonFile(pjsonPath, url) { typelessPackageJsonFilesWarnedAbout ??= new SafeSet(); - if (!typelessPackageJsonFilesWarnedAbout.has(pjsonPath)) { + if (!underNodeModules(url) && !typelessPackageJsonFilesWarnedAbout.has(pjsonPath)) { const warning = `Module type of ${url} is not specified and it doesn't parse as CommonJS.\n` + 'Reparsing as ES module because module syntax was detected. This incurs a performance overhead.\n' + `To eliminate this warning, add "type": "module" to ${pjsonPath}.`; @@ -161,14 +161,14 @@ function getFileProtocolModuleFormat(url, context = { __proto__: null }, ignoreE default: { // The user did not pass `--experimental-default-type`. // `source` is undefined when this is called from `defaultResolve`; // but this gets called again from `defaultLoad`/`defaultLoadSync`. - let parsedSource; - if (source) { - const { stripTypeScriptTypes } = require('internal/modules/helpers'); - parsedSource = stripTypeScriptTypes(source, url); - } + // Since experimental-strip-types depends on detect-module, we always return null + // if source is undefined. + if (!source) { return null; } + const { stripTypeScriptTypes, stringify } = require('internal/modules/helpers'); + const stringifiedSource = stringify(source); + const parsedSource = stripTypeScriptTypes(stringifiedSource, fileURLToPath(url)); const detectedFormat = detectModuleFormat(parsedSource, url); - // When source is undefined, default to module-typescript. - const format = detectedFormat ? `${detectedFormat}-typescript` : 'module-typescript'; + const format = `${detectedFormat}-typescript`; if (format === 'module-typescript' && foundPackageJson) { // This module has a .js extension, a package.json with no `type` field, and ESM syntax. // Warn about the missing `type` field so that the user can avoid the performance penalty of detection. diff --git a/lib/internal/modules/esm/loader.js b/lib/internal/modules/esm/loader.js index 0ac3ed855a6278..0337d0ceaa732b 100644 --- a/lib/internal/modules/esm/loader.js +++ b/lib/internal/modules/esm/loader.js @@ -34,6 +34,7 @@ const { kEmptyObject } = require('internal/util'); const { compileSourceTextModule, getDefaultConditions, + forceDefaultLoader, } = require('internal/modules/esm/utils'); const { kImplicitTypeAttribute } = require('internal/modules/esm/assert'); const { ModuleWrap, kEvaluating, kEvaluated } = internalBinding('module_wrap'); @@ -206,13 +207,11 @@ class ModuleLoader { } async eval(source, url, isEntryPoint = false) { - const evalInstance = (url) => { - return compileSourceTextModule(url, source, this); - }; const { ModuleJob } = require('internal/modules/esm/module_job'); + const wrap = compileSourceTextModule(url, source, this); const module = await onImport.tracePromise(async () => { const job = new ModuleJob( - this, url, undefined, evalInstance, false, false); + this, url, undefined, wrap, false, false); this.loadCache.set(url, undefined, job); const { module } = await job.run(isEntryPoint); return module; @@ -230,40 +229,49 @@ class ModuleLoader { } /** - * Get a (possibly still pending) module job from the cache, - * or create one and return its Promise. - * @param {string} specifier The string after `from` in an `import` statement, - * or the first parameter of an `import()` - * expression - * @param {string | undefined} parentURL The URL of the module importing this - * one, unless this is the Node.js entry - * point. - * @param {Record} importAttributes Validations for the - * module import. - * @returns {Promise} The (possibly pending) module job + * Get a (possibly not yet fully linked) module job from the cache, or create one and return its Promise. + * @param {string} specifier The module request of the module to be resolved. Typically, what's + * requested by `import ''` or `import('')`. + * @param {string} [parentURL] The URL of the module where the module request is initiated. + * It's undefined if it's from the root module. + * @param {ImportAttributes} importAttributes Attributes from the import statement or expression. + * @returns {Promise} importAttributes Validations for the - * module import. - * @param {string} [parentURL] The absolute URL of the module importing this - * one, unless this is the Node.js entry point - * @param {string} [format] The format hint possibly returned by the - * `resolve` hook - * @returns {Promise} The (possibly pending) module job + * Translate a loaded module source into a ModuleWrap. This is run synchronously, + * but the translator may return the ModuleWrap in a Promise. + * @param {string} url URL of the module to be translated. + * @param {string} format Format of the module to be translated. This is used to find + * matching translators. + * @param {ModuleSource} source Source of the module to be translated. + * @param {boolean} isMain Whether the module to be translated is the entry point. + * @returns {ModuleWrap | Promise} + */ + #translate(url, format, source, isMain) { + this.validateLoadResult(url, format); + const translator = getTranslators().get(format); + + if (!translator) { + throw new ERR_UNKNOWN_MODULE_FORMAT(format, url); + } + + return FunctionPrototypeCall(translator, this, url, source, isMain); + } + + /** + * Load a module and translate it into a ModuleWrap for require() in imported CJS. + * This is run synchronously, and the translator always return a ModuleWrap synchronously. + * @param {string} url URL of the module to be translated. + * @param {object} loadContext See {@link load} + * @param {boolean} isMain Whether the module to be translated is the entry point. + * @returns {ModuleWrap} */ - #createModuleJob(url, importAttributes, parentURL, format, sync) { - const callTranslator = ({ format: finalFormat, responseURL, source }, isMain) => { - const translator = getTranslators().get(finalFormat); + loadAndTranslateForRequireInImportedCJS(url, loadContext, isMain) { + const { format: formatFromLoad, source } = this.#loadSync(url, loadContext); + + if (formatFromLoad === 'wasm') { // require(wasm) is not supported. + throw new ERR_UNKNOWN_MODULE_FORMAT(formatFromLoad, url); + } - if (!translator) { - throw new ERR_UNKNOWN_MODULE_FORMAT(finalFormat, responseURL); + if (formatFromLoad === 'module' || formatFromLoad === 'module-typescript') { + if (!getOptionValue('--experimental-require-module')) { + throw new ERR_REQUIRE_ESM(url, true); } + } - return FunctionPrototypeCall(translator, this, responseURL, source, isMain); - }; - const context = { format, importAttributes }; + let finalFormat = formatFromLoad; + if (formatFromLoad === 'commonjs') { + finalFormat = 'require-commonjs'; + } + if (formatFromLoad === 'commonjs-typescript') { + finalFormat = 'require-commonjs-typescript'; + } - const moduleProvider = sync ? - (url, isMain) => callTranslator(this.loadSync(url, context), isMain) : - async (url, isMain) => callTranslator(await this.load(url, context), isMain); + const wrap = this.#translate(url, finalFormat, source, isMain); + assert(wrap instanceof ModuleWrap, `Translator used for require(${url}) should not be async`); + return wrap; + } + + /** + * Load a module and translate it into a ModuleWrap for ordinary imported ESM. + * This is run asynchronously. + * @param {string} url URL of the module to be translated. + * @param {object} loadContext See {@link load} + * @param {boolean} isMain Whether the module to be translated is the entry point. + * @returns {Promise} + */ + async loadAndTranslate(url, loadContext, isMain) { + const { format, source } = await this.load(url, loadContext); + return this.#translate(url, format, source, isMain); + } + + /** + * Load a module and translate it into a ModuleWrap, and create a ModuleJob from it. + * This runs synchronously. If isForRequireInImportedCJS is true, the module should be linked + * by the time this returns. Otherwise it may still have pending module requests. + * @param {string} url The URL that was resolved for this module. + * @param {ImportAttributes} importAttributes See {@link getModuleJobForImport} + * @param {string} [parentURL] See {@link getModuleJobForImport} + * @param {string} [format] The format hint possibly returned by the `resolve` hook + * @param {boolean} isForRequireInImportedCJS Whether this module job is created for require() + * in imported CJS. + * @returns {ModuleJobBase} The (possibly pending) module job + */ + #createModuleJob(url, importAttributes, parentURL, format, isForRequireInImportedCJS) { + const context = { format, importAttributes }; const isMain = parentURL === undefined; + let moduleOrModulePromise; + if (isForRequireInImportedCJS) { + moduleOrModulePromise = this.loadAndTranslateForRequireInImportedCJS(url, context, isMain); + } else { + moduleOrModulePromise = this.loadAndTranslate(url, context, isMain); + } + const inspectBrk = ( isMain && getOptionValue('--inspect-brk') @@ -457,10 +517,10 @@ class ModuleLoader { this, url, importAttributes, - moduleProvider, + moduleOrModulePromise, isMain, inspectBrk, - sync, + isForRequireInImportedCJS, ); this.loadCache.set(url, importAttributes.type, job); @@ -479,7 +539,7 @@ class ModuleLoader { */ async import(specifier, parentURL, importAttributes, isEntryPoint = false) { return onImport.tracePromise(async () => { - const moduleJob = await this.getModuleJob(specifier, parentURL, importAttributes); + const moduleJob = await this.getModuleJobForImport(specifier, parentURL, importAttributes); const { module } = await moduleJob.run(isEntryPoint); return module.getNamespace(); }, { @@ -504,39 +564,72 @@ class ModuleLoader { } /** - * Resolve the location of the module. - * @param {string} originalSpecifier The specified URL path of the module to - * be resolved. - * @param {string} [parentURL] The URL path of the module's parent. - * @param {ImportAttributes} importAttributes Attributes from the import - * statement or expression. - * @returns {{ format: string, url: URL['href'] }} + * Resolve a module request to a URL identifying the location of the module. Handles customization hooks, + * if any. + * @param {string|URL} specifier The module request of the module to be resolved. Typically, what's + * requested by `import specifier`, `import(specifier)` or + * `import.meta.resolve(specifier)`. + * @param {string} [parentURL] The URL of the module where the module request is initiated. + * It's undefined if it's from the root module. + * @param {ImportAttributes} importAttributes Attributes from the import statement or expression. + * @returns {Promise<{format: string, url: string}>} */ - resolve(originalSpecifier, parentURL, importAttributes) { - originalSpecifier = `${originalSpecifier}`; - if (this.#customizations) { - return this.#customizations.resolve(originalSpecifier, parentURL, importAttributes); + resolve(specifier, parentURL, importAttributes) { + specifier = `${specifier}`; + if (this.#customizations) { // Only has module.register hooks. + return this.#customizations.resolve(specifier, parentURL, importAttributes); } - const requestKey = this.#resolveCache.serializeKey(originalSpecifier, importAttributes); + return this.#cachedDefaultResolve(specifier, parentURL, importAttributes); + } + + /** + * Either return a cached resolution, or perform the default resolution which is synchronous, and + * cache the result. + * @param {string} specifier See {@link resolve}. + * @param {string} [parentURL] See {@link resolve}. + * @param {ImportAttributes} importAttributes See {@link resolve}. + * @returns {{ format: string, url: string }} + */ + #cachedDefaultResolve(specifier, parentURL, importAttributes) { + const requestKey = this.#resolveCache.serializeKey(specifier, importAttributes); const cachedResult = this.#resolveCache.get(requestKey, parentURL); if (cachedResult != null) { return cachedResult; } - const result = this.defaultResolve(originalSpecifier, parentURL, importAttributes); + const result = this.defaultResolve(specifier, parentURL, importAttributes); this.#resolveCache.set(requestKey, parentURL, result); return result; } /** - * Just like `resolve` except synchronous. This is here specifically to support - * `import.meta.resolve` which must happen synchronously. + * This is the default resolve step for future synchronous hooks, which incorporates asynchronous hooks + * from module.register() which are run in a blocking fashion for it to be synchronous. + * @param {string|URL} specifier See {@link resolveSync}. + * @param {{ parentURL?: string, importAttributes: ImportAttributes}} context See {@link resolveSync}. + * @returns {{ format: string, url: string }} */ - resolveSync(originalSpecifier, parentURL, importAttributes) { - originalSpecifier = `${originalSpecifier}`; + #resolveAndMaybeBlockOnLoaderThread(specifier, context) { if (this.#customizations) { - return this.#customizations.resolveSync(originalSpecifier, parentURL, importAttributes); + return this.#customizations.resolveSync(specifier, context.parentURL, context.importAttributes); } - return this.defaultResolve(originalSpecifier, parentURL, importAttributes); + return this.#cachedDefaultResolve(specifier, context.parentURL, context.importAttributes); + } + + /** + * Similar to {@link resolve}, but the results are always synchronously returned. If there are any + * asynchronous resolve hooks from module.register(), it will block until the results are returned + * from the loader thread for this to be synchornous. + * This is here to support `import.meta.resolve()`, `require()` in imported CJS, and + * future synchronous hooks. + * + * TODO(joyeecheung): consolidate the cache behavior and use this in require(esm). + * @param {string|URL} specifier See {@link resolve}. + * @param {string} [parentURL] See {@link resolve}. + * @param {ImportAttributes} [importAttributes] See {@link resolve}. + * @returns {{ format: string, url: string }} + */ + resolveSync(specifier, parentURL, importAttributes = { __proto__: null }) { + return this.#resolveAndMaybeBlockOnLoaderThread(`${specifier}`, { parentURL, importAttributes }); } /** @@ -558,41 +651,49 @@ class ModuleLoader { } /** - * Provide source that is understood by one of Node's translators. - * @param {URL['href']} url The URL/path of the module to be loaded - * @param {object} [context] Metadata about the module + * Provide source that is understood by one of Node's translators. Handles customization hooks, + * if any. + * @param {string} url The URL of the module to be loaded. + * @param {object} context Metadata about the module * @returns {Promise<{ format: ModuleFormat, source: ModuleSource }>} */ async load(url, context) { + if (this.#customizations) { + return this.#customizations.load(url, context); + } + defaultLoad ??= require('internal/modules/esm/load').defaultLoad; - const result = this.#customizations ? - await this.#customizations.load(url, context) : - await defaultLoad(url, context); - this.validateLoadResult(url, result?.format); - return result; + return defaultLoad(url, context); } - loadSync(url, context) { - defaultLoadSync ??= require('internal/modules/esm/load').defaultLoadSync; - - let result = this.#customizations ? - this.#customizations.loadSync(url, context) : - defaultLoadSync(url, context); - let format = result?.format; - if (format === 'module' || format === 'module-typescript') { - throw new ERR_REQUIRE_ESM(url, true); - } - if (format === 'commonjs') { - format = 'require-commonjs'; - result = { __proto__: result, format }; - } - if (format === 'commonjs-typescript') { - format = 'require-commonjs-typescript'; - result = { __proto__: result, format }; + /** + * This is the default load step for future synchronous hooks, which incorporates asynchronous hooks + * from module.register() which are run in a blocking fashion for it to be synchronous. + * @param {string} url See {@link load} + * @param {object} context See {@link load} + * @returns {{ format: ModuleFormat, source: ModuleSource }} + */ + #loadAndMaybeBlockOnLoaderThread(url, context) { + if (this.#customizations) { + return this.#customizations.loadSync(url, context); } + defaultLoadSync ??= require('internal/modules/esm/load').defaultLoadSync; + return defaultLoadSync(url, context); + } - this.validateLoadResult(url, format); - return result; + /** + * Similar to {@link load} but this is always run synchronously. If there are asynchronous hooks + * from module.register(), this blocks on the loader thread for it to return synchronously. + * + * This is here to support `require()` in imported CJS and future synchronous hooks. + * + * TODO(joyeecheung): consolidate the cache behavior and use this in require(esm). + * @param {string} url See {@link load} + * @param {object} [context] See {@link load} + * @returns {{ format: ModuleFormat, source: ModuleSource }} + */ + #loadSync(url, context) { + return this.#loadAndMaybeBlockOnLoaderThread(url, context); } validateLoadResult(url, format) { @@ -698,7 +799,7 @@ function createModuleLoader() { // Don't spawn a new worker if custom loaders are disabled. For instance, if // we're already in a worker thread created by instantiating // CustomizedModuleLoader; doing so would cause an infinite loop. - if (!require('internal/modules/esm/utils').forceDefaultLoader()) { + if (!forceDefaultLoader()) { const userLoaderPaths = getOptionValue('--experimental-loader'); if (userLoaderPaths.length > 0) { if (!emittedLoaderFlagWarning) { diff --git a/lib/internal/modules/esm/module_job.js b/lib/internal/modules/esm/module_job.js index 2f42909e0c6f82..62206fcc44c2d1 100644 --- a/lib/internal/modules/esm/module_job.js +++ b/lib/internal/modules/esm/module_job.js @@ -8,7 +8,6 @@ const { ObjectSetPrototypeOf, PromisePrototypeThen, PromiseResolve, - ReflectApply, RegExpPrototypeExec, RegExpPrototypeSymbolReplace, SafePromiseAllReturnArrayLike, @@ -56,13 +55,12 @@ const isCommonJSGlobalLikeNotDefinedError = (errorMessage) => ); class ModuleJobBase { - constructor(url, importAttributes, moduleWrapMaybePromise, isMain, inspectBrk) { + constructor(url, importAttributes, isMain, inspectBrk) { this.importAttributes = importAttributes; this.isMain = isMain; this.inspectBrk = inspectBrk; this.url = url; - this.module = moduleWrapMaybePromise; } } @@ -70,21 +68,29 @@ class ModuleJobBase { * its dependencies, over time. */ class ModuleJob extends ModuleJobBase { #loader = null; - // `loader` is the Loader instance used for loading dependencies. + + /** + * @param {ModuleLoader} loader The ESM loader. + * @param {string} url URL of the module to be wrapped in ModuleJob. + * @param {ImportAttributes} importAttributes Import attributes from the import statement. + * @param {ModuleWrap|Promise} moduleOrModulePromise Translated ModuleWrap for the module. + * @param {boolean} isMain Whether the module is the entry point. + * @param {boolean} inspectBrk Whether this module should be evaluated with the + * first line paused in the debugger (because --inspect-brk is passed). + * @param {boolean} isForRequireInImportedCJS Whether this is created for require() in imported CJS. + */ constructor(loader, url, importAttributes = { __proto__: null }, - moduleProvider, isMain, inspectBrk, sync = false) { - const modulePromise = ReflectApply(moduleProvider, loader, [url, isMain]); - super(url, importAttributes, modulePromise, isMain, inspectBrk); + moduleOrModulePromise, isMain, inspectBrk, isForRequireInImportedCJS = false) { + super(url, importAttributes, isMain, inspectBrk); this.#loader = loader; - // Expose the promise to the ModuleWrap directly for linking below. - // `this.module` is also filled in below. - this.modulePromise = modulePromise; - if (sync) { - this.module = this.modulePromise; + // Expose the promise to the ModuleWrap directly for linking below. + if (isForRequireInImportedCJS) { + this.module = moduleOrModulePromise; + assert(this.module instanceof ModuleWrap); this.modulePromise = PromiseResolve(this.module); } else { - this.modulePromise = PromiseResolve(this.modulePromise); + this.modulePromise = moduleOrModulePromise; } // Promise for the list of all dependencyJobs. @@ -123,7 +129,7 @@ class ModuleJob extends ModuleJobBase { for (let idx = 0; idx < moduleRequests.length; idx++) { const { specifier, attributes } = moduleRequests[idx]; - const dependencyJobPromise = this.#loader.getModuleJob( + const dependencyJobPromise = this.#loader.getModuleJobForImport( specifier, this.url, attributes, ); const modulePromise = PromisePrototypeThen(dependencyJobPromise, (job) => { @@ -288,14 +294,33 @@ class ModuleJob extends ModuleJobBase { } } -// This is a fully synchronous job and does not spawn additional threads in any way. -// All the steps are ensured to be synchronous and it throws on instantiating -// an asynchronous graph. +/** + * This is a fully synchronous job and does not spawn additional threads in any way. + * All the steps are ensured to be synchronous and it throws on instantiating + * an asynchronous graph. It also disallows CJS <-> ESM cycles. + * + * This is used for ES modules loaded via require(esm). Modules loaded by require() in + * imported CJS are handled by ModuleJob with the isForRequireInImportedCJS set to true instead. + * The two currently have different caching behaviors. + * TODO(joyeecheung): consolidate this with the isForRequireInImportedCJS variant of ModuleJob. + */ class ModuleJobSync extends ModuleJobBase { #loader = null; + + /** + * @param {ModuleLoader} loader The ESM loader. + * @param {string} url URL of the module to be wrapped in ModuleJob. + * @param {ImportAttributes} importAttributes Import attributes from the import statement. + * @param {ModuleWrap} moduleWrap Translated ModuleWrap for the module. + * @param {boolean} isMain Whether the module is the entry point. + * @param {boolean} inspectBrk Whether this module should be evaluated with the + * first line paused in the debugger (because --inspect-brk is passed). + */ constructor(loader, url, importAttributes, moduleWrap, isMain, inspectBrk) { - super(url, importAttributes, moduleWrap, isMain, inspectBrk, true); + super(url, importAttributes, isMain, inspectBrk, true); + this.#loader = loader; + this.module = moduleWrap; assert(this.module instanceof ModuleWrap); // Store itself into the cache first before linking in case there are circular diff --git a/lib/internal/modules/esm/resolve.js b/lib/internal/modules/esm/resolve.js index 1fbbb6773c9479..35925ef0817273 100644 --- a/lib/internal/modules/esm/resolve.js +++ b/lib/internal/modules/esm/resolve.js @@ -22,6 +22,7 @@ const { StringPrototypeStartsWith, encodeURIComponent, } = primordials; +const assert = require('internal/assert'); const internalFS = require('internal/fs/utils'); const { BuiltinModule } = require('internal/bootstrap/realm'); const { realpathSync } = require('fs'); @@ -117,18 +118,17 @@ function emitInvalidSegmentDeprecation(target, request, match, pjsonUrl, interna * Emits a deprecation warning if the given URL is a module and * the package.json file does not define a "main" or "exports" field. * @param {URL} url - The URL of the module being resolved. - * @param {URL} packageJSONUrl - The URL of the package.json file for the module. + * @param {string} path - The path of the module being resolved. + * @param {string} pkgPath - The path of the parent dir of the package.json file for the module. * @param {string | URL} [base] - The base URL for the module being resolved. * @param {string} [main] - The "main" field from the package.json file. */ -function emitLegacyIndexDeprecation(url, packageJSONUrl, base, main) { +function emitLegacyIndexDeprecation(url, path, pkgPath, base, main) { if (process.noDeprecation) { return; } const format = defaultGetFormatWithoutErrors(url); if (format !== 'module') { return; } - const path = fileURLToPath(url); - const pkgPath = fileURLToPath(new URL('.', packageJSONUrl)); const basePath = fileURLToPath(base); if (!main) { process.emitWarning( @@ -196,20 +196,19 @@ const legacyMainResolveExtensionsIndexes = { * @returns {URL} */ function legacyMainResolve(packageJSONUrl, packageConfig, base) { - const packageJsonUrlString = packageJSONUrl.href; - - if (typeof packageJsonUrlString !== 'string') { - throw new ERR_INVALID_ARG_TYPE('packageJSONUrl', ['URL'], packageJSONUrl); - } + assert(isURL(packageJSONUrl)); + const pkgPath = fileURLToPath(new URL('.', packageJSONUrl)); const baseStringified = isURL(base) ? base.href : base; - const resolvedOption = FSLegacyMainResolve(packageJsonUrlString, packageConfig.main, baseStringified); + const resolvedOption = FSLegacyMainResolve(pkgPath, packageConfig.main, baseStringified); - const baseUrl = resolvedOption <= legacyMainResolveExtensionsIndexes.kResolvedByMainIndexNode ? `./${packageConfig.main}` : ''; - const resolvedUrl = new URL(baseUrl + legacyMainResolveExtensions[resolvedOption], packageJSONUrl); + const maybeMain = resolvedOption <= legacyMainResolveExtensionsIndexes.kResolvedByMainIndexNode ? + packageConfig.main || './' : ''; + const resolvedPath = resolve(pkgPath, maybeMain + legacyMainResolveExtensions[resolvedOption]); + const resolvedUrl = pathToFileURL(resolvedPath); - emitLegacyIndexDeprecation(resolvedUrl, packageJSONUrl, base, packageConfig.main); + emitLegacyIndexDeprecation(resolvedUrl, resolvedPath, pkgPath, base, packageConfig.main); return resolvedUrl; } @@ -237,14 +236,15 @@ function finalizeResolution(resolved, base, preserveSymlinks) { try { path = fileURLToPath(resolved); } catch (err) { - const { setOwnProperty } = require('internal/util'); setOwnProperty(err, 'input', `${resolved}`); setOwnProperty(err, 'module', `${base}`); throw err; } - const stats = internalFsBinding.internalModuleStat(StringPrototypeEndsWith(path, '/') ? - StringPrototypeSlice(path, -1) : path); + const stats = internalFsBinding.internalModuleStat( + internalFsBinding, + StringPrototypeEndsWith(internalFsBinding, path, '/') ? StringPrototypeSlice(path, -1) : path, + ); // Check for stats.isDirectory() if (stats === 1) { @@ -791,8 +791,8 @@ function packageResolve(specifier, base, conditions) { // ResolveSelf const packageConfig = packageJsonReader.getPackageScopeConfig(base); if (packageConfig.exists) { - const packageJSONUrl = pathToFileURL(packageConfig.pjsonPath); if (packageConfig.exports != null && packageConfig.name === packageName) { + const packageJSONUrl = pathToFileURL(packageConfig.pjsonPath); return packageExportsResolve( packageJSONUrl, packageSubpath, packageConfig, base, conditions); } @@ -804,6 +804,7 @@ function packageResolve(specifier, base, conditions) { let lastPath; do { const stat = internalFsBinding.internalModuleStat( + internalFsBinding, StringPrototypeSlice(packageJSONPath, 0, packageJSONPath.length - 13), ); // Check for !stat.isDirectory() @@ -834,8 +835,6 @@ function packageResolve(specifier, base, conditions) { // Cross-platform root check. } while (packageJSONPath.length !== lastPath.length); - // eslint can't handle the above code. - // eslint-disable-next-line no-unreachable throw new ERR_MODULE_NOT_FOUND(packageName, fileURLToPath(base), null); } diff --git a/lib/internal/modules/esm/translators.js b/lib/internal/modules/esm/translators.js index 8f88214f558c52..9b89c3e1d52a0f 100644 --- a/lib/internal/modules/esm/translators.js +++ b/lib/internal/modules/esm/translators.js @@ -18,16 +18,6 @@ const { globalThis: { WebAssembly }, } = primordials; -/** @type {import('internal/util/types')} */ -let _TYPES = null; -/** - * Lazily loads and returns the internal/util/types module. - */ -function lazyTypes() { - if (_TYPES !== null) { return _TYPES; } - return _TYPES = require('internal/util/types'); -} - const { compileFunctionForCJSLoader, } = internalBinding('contextify'); @@ -37,7 +27,9 @@ const assert = require('internal/assert'); const { readFileSync } = require('fs'); const { dirname, extname, isAbsolute } = require('path'); const { + assertBufferSource, loadBuiltinModule, + stringify, stripTypeScriptTypes, stripBOM, urlToFilename, @@ -57,7 +49,6 @@ let debug = require('internal/util/debuglog').debuglog('esm', (fn) => { const { emitExperimentalWarning, kEmptyObject, setOwnProperty, isWindows } = require('internal/util'); const { ERR_UNKNOWN_BUILTIN_MODULE, - ERR_INVALID_RETURN_PROPERTY_VALUE, } = require('internal/errors').codes; const { maybeCacheSourceMap } = require('internal/source_map/source_map_cache'); const moduleWrap = internalBinding('module_wrap'); @@ -77,28 +68,11 @@ function getSource(url) { /** @type {import('deps/cjs-module-lexer/lexer.js').parse} */ let cjsParse; /** - * Initializes the CommonJS module lexer parser. - * If WebAssembly is available, it uses the optimized version from the dist folder. - * Otherwise, it falls back to the JavaScript version from the lexer folder. + * Initializes the CommonJS module lexer parser using the JavaScript version. + * TODO(joyeecheung): Use `require('internal/deps/cjs-module-lexer/dist/lexer').initSync()` + * when cjs-module-lexer 1.4.0 is rolled in. */ -async function initCJSParse() { - if (typeof WebAssembly === 'undefined') { - initCJSParseSync(); - } else { - const { parse, init } = - require('internal/deps/cjs-module-lexer/dist/lexer'); - try { - await init(); - cjsParse = parse; - } catch { - initCJSParseSync(); - } - } -} - function initCJSParseSync() { - // TODO(joyeecheung): implement a binding that directly compiles using - // v8::WasmModuleObject::Compile() synchronously. if (cjsParse === undefined) { cjsParse = require('internal/deps/cjs-module-lexer/lexer').parse; } @@ -107,44 +81,6 @@ function initCJSParseSync() { const translators = new SafeMap(); exports.translators = translators; -let DECODER = null; -/** - * Asserts that the given body is a buffer source (either a string, array buffer, or typed array). - * Throws an error if the body is not a buffer source. - * @param {string | ArrayBufferView | ArrayBuffer} body - The body to check. - * @param {boolean} allowString - Whether or not to allow a string as a valid buffer source. - * @param {string} hookName - The name of the hook being called. - * @throws {ERR_INVALID_RETURN_PROPERTY_VALUE} If the body is not a buffer source. - */ -function assertBufferSource(body, allowString, hookName) { - if (allowString && typeof body === 'string') { - return; - } - const { isArrayBufferView, isAnyArrayBuffer } = lazyTypes(); - if (isArrayBufferView(body) || isAnyArrayBuffer(body)) { - return; - } - throw new ERR_INVALID_RETURN_PROPERTY_VALUE( - `${allowString ? 'string, ' : ''}array buffer, or typed array`, - hookName, - 'source', - body, - ); -} - -/** - * Converts a buffer or buffer-like object to a string. - * @param {string | ArrayBuffer | ArrayBufferView} body - The buffer or buffer-like object to convert to a string. - * @returns {string} The resulting string. - */ -function stringify(body) { - if (typeof body === 'string') { return body; } - assertBufferSource(body, false, 'load'); - const { TextDecoder } = require('internal/encoding'); - DECODER = DECODER === null ? new TextDecoder() : DECODER; - return DECODER.decode(body); -} - /** * Converts a URL to a file path if the URL protocol is 'file:'. * @param {string} url - The URL to convert. @@ -206,7 +142,7 @@ function loadCJSModule(module, source, url, filename, isMain) { } specifier = `${pathToFileURL(path)}`; } - const job = cascadedLoader.getModuleJobSync(specifier, url, importAttributes); + const job = cascadedLoader.getModuleJobForRequireInImportedCJS(specifier, url, importAttributes); job.runSync(); return cjsCache.get(job.url).exports; }; @@ -287,11 +223,9 @@ function createCJSModuleWrap(url, source, isMain, loadCJS = loadCJSModule) { translators.set('commonjs-sync', function requireCommonJS(url, source, isMain) { initCJSParseSync(); - assert(!isMain); // This is only used by imported CJS modules. return createCJSModuleWrap(url, source, isMain, (module, source, url, filename, isMain) => { assert(module === CJSModule._cache[filename]); - assert(!isMain); wrapModuleLoad(filename, null, isMain); }); }); @@ -299,6 +233,7 @@ translators.set('commonjs-sync', function requireCommonJS(url, source, isMain) { // Handle CommonJS modules referenced by `require` calls. // This translator function must be sync, as `require` is sync. translators.set('require-commonjs', (url, source, isMain) => { + initCJSParseSync(); assert(cjsParse); return createCJSModuleWrap(url, source); @@ -315,10 +250,9 @@ translators.set('require-commonjs-typescript', (url, source, isMain) => { // Handle CommonJS modules referenced by `import` statements or expressions, // or as the initial entry point when the ESM loader handles a CommonJS entry. -translators.set('commonjs', async function commonjsStrategy(url, source, - isMain) { +translators.set('commonjs', function commonjsStrategy(url, source, isMain) { if (!cjsParse) { - await initCJSParse(); + initCJSParseSync(); } // For backward-compatibility, it's possible to return a nullish value for @@ -336,7 +270,6 @@ translators.set('commonjs', async function commonjsStrategy(url, source, // Continue regardless of error. } return createCJSModuleWrap(url, source, isMain, cjsLoader); - }); /** @@ -442,7 +375,7 @@ translators.set('json', function jsonStrategy(url, source) { modulePath = isWindows ? StringPrototypeReplaceAll(pathname, '/', '\\') : pathname; module = CJSModule._cache[modulePath]; - if (module && module.loaded) { + if (module?.loaded) { const exports = module.exports; return new ModuleWrap(url, undefined, ['default'], function() { this.setExport('default', exports); @@ -456,7 +389,7 @@ translators.set('json', function jsonStrategy(url, source) { // export, we have to check again if the module already exists or not. // TODO: remove CJS loader from here as well. module = CJSModule._cache[modulePath]; - if (module && module.loaded) { + if (module?.loaded) { const exports = module.exports; return new ModuleWrap(url, undefined, ['default'], function() { this.setExport('default', exports); @@ -497,8 +430,9 @@ translators.set('wasm', async function(url, source) { let compiled; try { - // TODO(joyeecheung): implement a binding that directly compiles using - // v8::WasmModuleObject::Compile() synchronously. + // TODO(joyeecheung): implement a translator that just uses + // compiled = new WebAssembly.Module(source) to compile it + // synchronously. compiled = await WebAssembly.compile(source); } catch (err) { err.message = errPath(url) + ': ' + err.message; @@ -525,7 +459,7 @@ translators.set('wasm', async function(url, source) { // Strategy for loading a commonjs TypeScript module translators.set('commonjs-typescript', function(url, source) { emitExperimentalWarning('Type Stripping'); - assertBufferSource(source, false, 'load'); + assertBufferSource(source, true, 'load'); const code = stripTypeScriptTypes(stringify(source), url); debug(`Translating TypeScript ${url}`); return FunctionPrototypeCall(translators.get('commonjs'), this, url, code, false); @@ -534,7 +468,7 @@ translators.set('commonjs-typescript', function(url, source) { // Strategy for loading an esm TypeScript module translators.set('module-typescript', function(url, source) { emitExperimentalWarning('Type Stripping'); - assertBufferSource(source, false, 'load'); + assertBufferSource(source, true, 'load'); const code = stripTypeScriptTypes(stringify(source), url); debug(`Translating TypeScript ${url}`); return FunctionPrototypeCall(translators.get('module'), this, url, code, false); diff --git a/lib/internal/modules/esm/utils.js b/lib/internal/modules/esm/utils.js index d393d4336a0c1e..2799af0f8dd492 100644 --- a/lib/internal/modules/esm/utils.js +++ b/lib/internal/modules/esm/utils.js @@ -83,6 +83,9 @@ function initializeDefaultConditions() { ...userConditions, ]); defaultConditionsSet = new SafeSet(defaultConditions); + if (getOptionValue('--experimental-require-module')) { + defaultConditionsSet.add('module-sync'); + } } /** diff --git a/lib/internal/modules/helpers.js b/lib/internal/modules/helpers.js index 9d361c0f6c861c..1add21bb541cf4 100644 --- a/lib/internal/modules/helpers.js +++ b/lib/internal/modules/helpers.js @@ -16,6 +16,8 @@ const { } = primordials; const { ERR_INVALID_ARG_TYPE, + ERR_INVALID_RETURN_PROPERTY_VALUE, + ERR_INVALID_TYPESCRIPT_SYNTAX, } = require('internal/errors').codes; const { BuiltinModule } = require('internal/bootstrap/realm'); @@ -39,6 +41,7 @@ const { enableCompileCache: _enableCompileCache, getCompileCacheDir: _getCompileCacheDir, compileCacheStatus: _compileCacheStatus, + flushCompileCache, } = internalBinding('modules'); let debug = require('internal/util/debuglog').debuglog('module', (fn) => { @@ -80,6 +83,9 @@ function initializeCjsConditions() { ...addonConditions, ...userConditions, ]); + if (getOptionValue('--experimental-require-module')) { + cjsConditions.add('module-sync'); + } } /** @@ -310,44 +316,37 @@ function getBuiltinModule(id) { return normalizedId ? require(normalizedId) : undefined; } -/** - * TypeScript parsing function, by default Amaro.transformSync. - * @type {Function} - */ -let typeScriptParser; /** * The TypeScript parsing mode, either 'strip-only' or 'transform'. * @type {string} */ -let typeScriptParsingMode; -/** - * Whether source maps are enabled for TypeScript parsing. - * @type {boolean} - */ -let sourceMapEnabled; +const getTypeScriptParsingMode = getLazy(() => + (getOptionValue('--experimental-transform-types') ? 'transform' : 'strip-only'), +); /** * Load the TypeScript parser. - * @param {Function} parser - A function that takes a string of TypeScript code * and returns an object with a `code` property. * @returns {Function} The TypeScript parser function. */ -function loadTypeScriptParser(parser) { - if (typeScriptParser) { - return typeScriptParser; - } +const loadTypeScriptParser = getLazy(() => { + const amaro = require('internal/deps/amaro/dist/index'); + return amaro.transformSync; +}); - if (parser) { - typeScriptParser = parser; - } else { - const amaro = require('internal/deps/amaro/dist/index'); - // Default option for Amaro is to perform Type Stripping only. - typeScriptParsingMode = getOptionValue('--experimental-transform-types') ? 'transform' : 'strip-only'; - sourceMapEnabled = getOptionValue('--enable-source-maps'); - // Curry the transformSync function with the default options. - typeScriptParser = amaro.transformSync; +/** + * + * @param {string} source the source code + * @param {object} options the options to pass to the parser + * @returns {TransformOutput} an object with a `code` property. + */ +function parseTypeScript(source, options) { + const parse = loadTypeScriptParser(); + try { + return parse(source, options); + } catch (error) { + throw new ERR_INVALID_TYPESCRIPT_SYNTAX(error); } - return typeScriptParser; } /** @@ -362,14 +361,13 @@ function loadTypeScriptParser(parser) { */ function stripTypeScriptTypes(source, filename) { assert(typeof source === 'string'); - const parse = loadTypeScriptParser(); const options = { __proto__: null, - mode: typeScriptParsingMode, - sourceMap: sourceMapEnabled, + mode: getTypeScriptParsingMode(), + sourceMap: getOptionValue('--enable-source-maps'), filename, }; - const { code, map } = parse(source, options); + const { code, map } = parseTypeScript(source, options); if (map) { // TODO(@marco-ippolito) When Buffer.transcode supports utf8 to // base64 transformation, we should change this line. @@ -429,10 +427,61 @@ function getCompileCacheDir() { return _getCompileCacheDir() || undefined; } +/** @type {import('internal/util/types')} */ +let _TYPES = null; +/** + * Lazily loads and returns the internal/util/types module. + */ +function lazyTypes() { + if (_TYPES !== null) { return _TYPES; } + return _TYPES = require('internal/util/types'); +} + +/** + * Asserts that the given body is a buffer source (either a string, array buffer, or typed array). + * Throws an error if the body is not a buffer source. + * @param {string | ArrayBufferView | ArrayBuffer} body - The body to check. + * @param {boolean} allowString - Whether or not to allow a string as a valid buffer source. + * @param {string} hookName - The name of the hook being called. + * @throws {ERR_INVALID_RETURN_PROPERTY_VALUE} If the body is not a buffer source. + */ +function assertBufferSource(body, allowString, hookName) { + if (allowString && typeof body === 'string') { + return; + } + const { isArrayBufferView, isAnyArrayBuffer } = lazyTypes(); + if (isArrayBufferView(body) || isAnyArrayBuffer(body)) { + return; + } + throw new ERR_INVALID_RETURN_PROPERTY_VALUE( + `${allowString ? 'string, ' : ''}array buffer, or typed array`, + hookName, + 'source', + body, + ); +} + +let DECODER = null; + +/** + * Converts a buffer or buffer-like object to a string. + * @param {string | ArrayBuffer | ArrayBufferView} body - The buffer or buffer-like object to convert to a string. + * @returns {string} The resulting string. + */ +function stringify(body) { + if (typeof body === 'string') { return body; } + assertBufferSource(body, false, 'load'); + const { TextDecoder } = require('internal/encoding'); + DECODER = DECODER === null ? new TextDecoder() : DECODER; + return DECODER.decode(body); +} + module.exports = { addBuiltinLibsToObject, constants, enableCompileCache, + assertBufferSource, + flushCompileCache, getBuiltinModule, getCjsConditions, getCompileCacheDir, @@ -442,6 +491,7 @@ module.exports = { makeRequireFunction, normalizeReferrerURL, stripTypeScriptTypes, + stringify, stripBOM, toRealPath, hasStartedUserCJSExecution() { diff --git a/lib/internal/modules/run_main.js b/lib/internal/modules/run_main.js index 1e1a1ea46fc6c1..ab4783a7982b9f 100644 --- a/lib/internal/modules/run_main.js +++ b/lib/internal/modules/run_main.js @@ -8,7 +8,7 @@ const { const { getNearestParentPackageJSONType } = internalBinding('modules'); const { getOptionValue } = require('internal/options'); const path = require('path'); -const { pathToFileURL } = require('internal/url'); +const { pathToFileURL, URL } = require('internal/url'); const { kEmptyObject, getCWDURL } = require('internal/util'); const { hasUncaughtExceptionCaptureCallback, @@ -48,7 +48,6 @@ function resolveMainPath(main) { } catch (err) { if (defaultType === 'module' && err?.code === 'ENOENT') { const { decorateErrorWithCommonJSHints } = require('internal/modules/esm/resolve'); - const { getCWDURL } = require('internal/util'); decorateErrorWithCommonJSHints(err, mainPath, getCWDURL()); } throw err; @@ -155,9 +154,14 @@ function runEntryPointWithESMLoader(callback) { * @param {string} main - First positional CLI argument, such as `'entry.js'` from `node entry.js` */ function executeUserEntryPoint(main = process.argv[1]) { - const resolvedMain = resolveMainPath(main); - const useESMLoader = shouldUseESMLoader(resolvedMain); - let mainURL; + let useESMLoader; + let resolvedMain; + if (getOptionValue('--entry-url')) { + useESMLoader = true; + } else { + resolvedMain = resolveMainPath(main); + useESMLoader = shouldUseESMLoader(resolvedMain); + } // Unless we know we should use the ESM loader to handle the entry point per the checks in `shouldUseESMLoader`, first // try to run the entry point via the CommonJS loader; and if that fails under certain conditions, retry as ESM. if (!useESMLoader) { @@ -166,9 +170,7 @@ function executeUserEntryPoint(main = process.argv[1]) { wrapModuleLoad(main, null, true); } else { const mainPath = resolvedMain || main; - if (mainURL === undefined) { - mainURL = pathToFileURL(mainPath).href; - } + const mainURL = getOptionValue('--entry-url') ? new URL(mainPath, getCWDURL()) : pathToFileURL(mainPath); runEntryPointWithESMLoader((cascadedLoader) => { // Note that if the graph contains unsettled TLA, this may never resolve diff --git a/lib/internal/options.js b/lib/internal/options.js index 68acdb2a7b378e..1192b46c9ede82 100644 --- a/lib/internal/options.js +++ b/lib/internal/options.js @@ -17,24 +17,15 @@ let embedderOptions; // complete so that we don't accidentally include runtime-dependent // states into a runtime-independent snapshot. function getCLIOptionsFromBinding() { - if (!optionsDict) { - optionsDict = getCLIOptionsValues(); - } - return optionsDict; + return optionsDict ??= getCLIOptionsValues(); } function getCLIOptionsInfoFromBinding() { - if (!cliInfo) { - cliInfo = getCLIOptionsInfo(); - } - return cliInfo; + return cliInfo ??= getCLIOptionsInfo(); } function getEmbedderOptions() { - if (!embedderOptions) { - embedderOptions = getEmbedderOptionsFromBinding(); - } - return embedderOptions; + return embedderOptions ??= getEmbedderOptionsFromBinding(); } function refreshOptions() { @@ -42,8 +33,7 @@ function refreshOptions() { } function getOptionValue(optionName) { - const optionsDict = getCLIOptionsFromBinding(); - return optionsDict[optionName]; + return getCLIOptionsFromBinding()[optionName]; } function getAllowUnauthorized() { diff --git a/lib/internal/perf/performance.js b/lib/internal/perf/performance.js index 836dcaa87d8b31..9e08bc5db7ea49 100644 --- a/lib/internal/perf/performance.js +++ b/lib/internal/perf/performance.js @@ -97,13 +97,16 @@ class Performance extends EventTarget { return filterBufferMapByNameAndType(); } - getEntriesByName(name) { + getEntriesByName(name, type = undefined) { validateInternalField(this, kPerformanceBrand, 'Performance'); if (arguments.length === 0) { throw new ERR_MISSING_ARGS('name'); } name = `${name}`; - return filterBufferMapByNameAndType(name, undefined); + if (type !== undefined) { + type = `${type}`; + } + return filterBufferMapByNameAndType(name, type); } getEntriesByType(type) { diff --git a/lib/internal/process/finalization.js b/lib/internal/process/finalization.js index 74b1cbdacd151d..e5f748c37642fd 100644 --- a/lib/internal/process/finalization.js +++ b/lib/internal/process/finalization.js @@ -64,7 +64,7 @@ function createFinalization() { const fn = ref.fn; // This should always happen, however GC is - // undeterministic so it might not happen. + // indeterministic so it might not happen. /* istanbul ignore else */ if (obj !== undefined) { fn(obj, event); diff --git a/lib/internal/process/pre_execution.js b/lib/internal/process/pre_execution.js index cb96fd1bc4fcdc..848ff7f142fc70 100644 --- a/lib/internal/process/pre_execution.js +++ b/lib/internal/process/pre_execution.js @@ -33,6 +33,7 @@ const { emitExperimentalWarning, SymbolAsyncDispose, SymbolDispose, + deprecate, } = require('internal/util'); const { @@ -47,6 +48,7 @@ const { addSerializeCallback, isBuildingSnapshot, }, + runDeserializeCallbacks, } = require('internal/v8/startup_snapshot'); function prepareMainThreadExecution(expandArgv1 = false, initializeModules = true) { @@ -147,7 +149,7 @@ function prepareExecution(options) { initializeClusterIPC(); // TODO(joyeecheung): do this for worker threads as well. - require('internal/v8/startup_snapshot').runDeserializeCallbacks(); + runDeserializeCallbacks(); } else { assert(!internalBinding('worker').isMainThread); // The setup should be called in LOAD_SCRIPT message handler. @@ -524,7 +526,6 @@ function setupNetworkInspection() { // this is used to deprecate APIs implemented in C++ where the deprecation // utilities are not easily accessible. function initializeDeprecations() { - const { deprecate } = require('internal/util'); const pendingDeprecation = getOptionValue('--pending-deprecation'); // DEP0103: access to `process.binding('util').isX` type checkers @@ -586,8 +587,6 @@ function initializeDeprecations() { function setupChildProcessIpcChannel() { if (process.env.NODE_CHANNEL_FD) { - const assert = require('internal/assert'); - const fd = NumberParseInt(process.env.NODE_CHANNEL_FD, 10); assert(fd >= 0); diff --git a/lib/internal/process/warning.js b/lib/internal/process/warning.js index 162d0ca8153d1c..364e88d27769e5 100644 --- a/lib/internal/process/warning.js +++ b/lib/internal/process/warning.js @@ -26,7 +26,6 @@ const { validateString } = require('internal/validators'); let fs; let fd; let warningFile; -let options; let traceWarningHelperShown = false; function resetForSerialization() { @@ -42,15 +41,9 @@ function lazyOption() { // This will load `warningFile` only once. If the flag is not set, // `warningFile` will be set to an empty string. if (warningFile === undefined) { - options = require('internal/options'); - if (options.getOptionValue('--diagnostic-dir') !== '') { - warningFile = options.getOptionValue('--diagnostic-dir'); - } - if (options.getOptionValue('--redirect-warnings') !== '') { - warningFile = options.getOptionValue('--redirect-warnings'); - } else { - warningFile = ''; - } + const diagnosticDir = getOptionValue('--diagnostic-dir'); + const redirectWarnings = getOptionValue('--redirect-warnings'); + warningFile = diagnosticDir || redirectWarnings || ''; } return warningFile; } diff --git a/lib/internal/quic/quic.js b/lib/internal/quic/quic.js new file mode 100644 index 00000000000000..0d5d0bbb56deb2 --- /dev/null +++ b/lib/internal/quic/quic.js @@ -0,0 +1,2548 @@ +'use strict'; + +// TODO(@jasnell) Temporarily ignoring c8 covrerage for this file while tests +// are still being developed. +/* c8 ignore start */ + +const { + ArrayBuffer, + ArrayIsArray, + ArrayPrototypePush, + BigUint64Array, + DataView, + DataViewPrototypeGetBigInt64, + DataViewPrototypeGetBigUint64, + DataViewPrototypeGetUint8, + DataViewPrototypeSetUint8, + ObjectDefineProperties, + Symbol, + Uint8Array, +} = primordials; + +const { + assertCrypto, + customInspectSymbol: kInspect, + SymbolAsyncDispose, +} = require('internal/util'); +const { inspect } = require('internal/util/inspect'); +assertCrypto(); + +const { + Endpoint: Endpoint_, + setCallbacks, + + // The constants to be exposed to end users for various options. + CC_ALGO_RENO, + CC_ALGO_CUBIC, + CC_ALGO_BBR, + CC_ALGO_RENO_STR, + CC_ALGO_CUBIC_STR, + CC_ALGO_BBR_STR, + PREFERRED_ADDRESS_IGNORE, + PREFERRED_ADDRESS_USE, + DEFAULT_PREFERRED_ADDRESS_POLICY, + DEFAULT_CIPHERS, + DEFAULT_GROUPS, + STREAM_DIRECTION_BIDIRECTIONAL, + STREAM_DIRECTION_UNIDIRECTIONAL, + + // Internal constants for use by the implementation. + // These are not exposed to end users. + CLOSECONTEXT_CLOSE: kCloseContextClose, + CLOSECONTEXT_BIND_FAILURE: kCloseContextBindFailure, + CLOSECONTEXT_LISTEN_FAILURE: kCloseContextListenFailure, + CLOSECONTEXT_RECEIVE_FAILURE: kCloseContextReceiveFailure, + CLOSECONTEXT_SEND_FAILURE: kCloseContextSendFailure, + CLOSECONTEXT_START_FAILURE: kCloseContextStartFailure, + + // All of the IDX_STATS_* constants are the index positions of the stats + // fields in the relevant BigUint64Array's that underlie the *Stats objects. + // These are not exposed to end users. + IDX_STATS_ENDPOINT_CREATED_AT, + IDX_STATS_ENDPOINT_DESTROYED_AT, + IDX_STATS_ENDPOINT_BYTES_RECEIVED, + IDX_STATS_ENDPOINT_BYTES_SENT, + IDX_STATS_ENDPOINT_PACKETS_RECEIVED, + IDX_STATS_ENDPOINT_PACKETS_SENT, + IDX_STATS_ENDPOINT_SERVER_SESSIONS, + IDX_STATS_ENDPOINT_CLIENT_SESSIONS, + IDX_STATS_ENDPOINT_SERVER_BUSY_COUNT, + IDX_STATS_ENDPOINT_RETRY_COUNT, + IDX_STATS_ENDPOINT_VERSION_NEGOTIATION_COUNT, + IDX_STATS_ENDPOINT_STATELESS_RESET_COUNT, + IDX_STATS_ENDPOINT_IMMEDIATE_CLOSE_COUNT, + + IDX_STATS_SESSION_CREATED_AT, + IDX_STATS_SESSION_CLOSING_AT, + IDX_STATS_SESSION_DESTROYED_AT, + IDX_STATS_SESSION_HANDSHAKE_COMPLETED_AT, + IDX_STATS_SESSION_HANDSHAKE_CONFIRMED_AT, + IDX_STATS_SESSION_GRACEFUL_CLOSING_AT, + IDX_STATS_SESSION_BYTES_RECEIVED, + IDX_STATS_SESSION_BYTES_SENT, + IDX_STATS_SESSION_BIDI_IN_STREAM_COUNT, + IDX_STATS_SESSION_BIDI_OUT_STREAM_COUNT, + IDX_STATS_SESSION_UNI_IN_STREAM_COUNT, + IDX_STATS_SESSION_UNI_OUT_STREAM_COUNT, + IDX_STATS_SESSION_LOSS_RETRANSMIT_COUNT, + IDX_STATS_SESSION_MAX_BYTES_IN_FLIGHT, + IDX_STATS_SESSION_BYTES_IN_FLIGHT, + IDX_STATS_SESSION_BLOCK_COUNT, + IDX_STATS_SESSION_CWND, + IDX_STATS_SESSION_LATEST_RTT, + IDX_STATS_SESSION_MIN_RTT, + IDX_STATS_SESSION_RTTVAR, + IDX_STATS_SESSION_SMOOTHED_RTT, + IDX_STATS_SESSION_SSTHRESH, + IDX_STATS_SESSION_DATAGRAMS_RECEIVED, + IDX_STATS_SESSION_DATAGRAMS_SENT, + IDX_STATS_SESSION_DATAGRAMS_ACKNOWLEDGED, + IDX_STATS_SESSION_DATAGRAMS_LOST, + + IDX_STATS_STREAM_CREATED_AT, + IDX_STATS_STREAM_RECEIVED_AT, + IDX_STATS_STREAM_ACKED_AT, + IDX_STATS_STREAM_CLOSING_AT, + IDX_STATS_STREAM_DESTROYED_AT, + IDX_STATS_STREAM_BYTES_RECEIVED, + IDX_STATS_STREAM_BYTES_SENT, + IDX_STATS_STREAM_MAX_OFFSET, + IDX_STATS_STREAM_MAX_OFFSET_ACK, + IDX_STATS_STREAM_MAX_OFFSET_RECV, + IDX_STATS_STREAM_FINAL_SIZE, + + IDX_STATE_SESSION_PATH_VALIDATION, + IDX_STATE_SESSION_VERSION_NEGOTIATION, + IDX_STATE_SESSION_DATAGRAM, + IDX_STATE_SESSION_SESSION_TICKET, + IDX_STATE_SESSION_CLOSING, + IDX_STATE_SESSION_GRACEFUL_CLOSE, + IDX_STATE_SESSION_SILENT_CLOSE, + IDX_STATE_SESSION_STATELESS_RESET, + IDX_STATE_SESSION_DESTROYED, + IDX_STATE_SESSION_HANDSHAKE_COMPLETED, + IDX_STATE_SESSION_HANDSHAKE_CONFIRMED, + IDX_STATE_SESSION_STREAM_OPEN_ALLOWED, + IDX_STATE_SESSION_PRIORITY_SUPPORTED, + IDX_STATE_SESSION_WRAPPED, + IDX_STATE_SESSION_LAST_DATAGRAM_ID, + + IDX_STATE_ENDPOINT_BOUND, + IDX_STATE_ENDPOINT_RECEIVING, + IDX_STATE_ENDPOINT_LISTENING, + IDX_STATE_ENDPOINT_CLOSING, + IDX_STATE_ENDPOINT_BUSY, + IDX_STATE_ENDPOINT_PENDING_CALLBACKS, + + IDX_STATE_STREAM_ID, + IDX_STATE_STREAM_FIN_SENT, + IDX_STATE_STREAM_FIN_RECEIVED, + IDX_STATE_STREAM_READ_ENDED, + IDX_STATE_STREAM_WRITE_ENDED, + IDX_STATE_STREAM_DESTROYED, + IDX_STATE_STREAM_PAUSED, + IDX_STATE_STREAM_RESET, + IDX_STATE_STREAM_HAS_READER, + IDX_STATE_STREAM_WANTS_BLOCK, + IDX_STATE_STREAM_WANTS_HEADERS, + IDX_STATE_STREAM_WANTS_RESET, + IDX_STATE_STREAM_WANTS_TRAILERS, +} = internalBinding('quic'); + +const { + isArrayBuffer, + isArrayBufferView, +} = require('util/types'); + +const { + Buffer, +} = require('buffer'); + +const { + codes: { + ERR_INVALID_ARG_TYPE, + ERR_INVALID_ARG_VALUE, + ERR_INVALID_STATE, + ERR_QUIC_CONNECTION_FAILED, + ERR_QUIC_ENDPOINT_CLOSED, + ERR_QUIC_OPEN_STREAM_FAILED, + }, +} = require('internal/errors'); + +const { + InternalSocketAddress, + SocketAddress, + kHandle: kSocketAddressHandle, +} = require('internal/socketaddress'); + +const { + isKeyObject, + isCryptoKey, +} = require('internal/crypto/keys'); + +const { + kHandle: kKeyObjectHandle, + kKeyObject: kKeyObjectInner, +} = require('internal/crypto/util'); + +const { + validateFunction, + validateObject, +} = require('internal/validators'); + +const kEmptyObject = { __proto__: null }; +const kEnumerable = { __proto__: null, enumerable: true }; + +const kBlocked = Symbol('kBlocked'); +const kDatagram = Symbol('kDatagram'); +const kDatagramStatus = Symbol('kDatagramStatus'); +const kError = Symbol('kError'); +const kFinishClose = Symbol('kFinishClose'); +const kHandshake = Symbol('kHandshake'); +const kHeaders = Symbol('kHeaders'); +const kOwner = Symbol('kOwner'); +const kNewSession = Symbol('kNewSession'); +const kNewStream = Symbol('kNewStream'); +const kPathValidation = Symbol('kPathValidation'); +const kReset = Symbol('kReset'); +const kSessionTicket = Symbol('kSessionTicket'); +const kTrailers = Symbol('kTrailers'); +const kVersionNegotiation = Symbol('kVersionNegotiation'); + +/** + * @typedef {import('../socketaddress.js').SocketAddress} SocketAddress + * @typedef {import('../crypto/keys.js').KeyObject} KeyObject + * @typedef {import('../crypto/keys.js').CryptoKey} CryptoKey + */ + +/** + * @typedef {object} EndpointOptions + * @property {SocketAddress} [address] The local address to bind to + * @property {bigint|number} [retryTokenExpiration] The retry token expiration + * @property {bigint|number} [tokenExpiration] The token expiration + * @property {bigint|number} [maxConnectionsPerHost] The maximum number of connections per host + * @property {bigint|number} [maxConnectionsTotal] The maximum number of total connections + * @property {bigint|number} [maxStatelessResetsPerHost] The maximum number of stateless resets per host + * @property {bigint|number} [addressLRUSize] The size of the address LRU cache + * @property {bigint|number} [maxRetries] The maximum number of retries + * @property {bigint|number} [maxPayloadSize] The maximum payload size + * @property {bigint|number} [unacknowledgedPacketThreshold] The unacknowledged packet threshold + * @property {bigint|number} [handshakeTimeout] The handshake timeout + * @property {bigint|number} [maxStreamWindow] The maximum stream window + * @property {bigint|number} [maxWindow] The maximum window + * @property {number} [rxDiagnosticLoss] The receive diagnostic loss (range 0.0-1.0) + * @property {number} [txDiagnosticLoss] The transmit diagnostic loss (range 0.0-1.0) + * @property {number} [udpReceiveBufferSize] The UDP receive buffer size + * @property {number} [udpSendBufferSize] The UDP send buffer size + * @property {number} [udpTTL] The UDP TTL + * @property {boolean} [noUdpPayloadSizeShaping] Disable UDP payload size shaping + * @property {boolean} [validateAddress] Validate the address + * @property {boolean} [disableActiveMigration] Disable active migration + * @property {boolean} [ipv6Only] Use IPv6 only + * @property {'reno'|'cubic'|'bbr'|number} [cc] The congestion control algorithm + * @property {ArrayBufferView} [resetTokenSecret] The reset token secret + * @property {ArrayBufferView} [tokenSecret] The token secret + */ + +/** + * @typedef {object} TlsOptions + * @property {string} [sni] The server name indication + * @property {string} [alpn] The application layer protocol negotiation + * @property {string} [ciphers] The ciphers + * @property {string} [groups] The groups + * @property {boolean} [keylog] Enable key logging + * @property {boolean} [verifyClient] Verify the client + * @property {boolean} [tlsTrace] Enable TLS tracing + * @property {boolean} [verifyPrivateKey] Verify the private key + * @property {KeyObject|CryptoKey|Array} [keys] The keys + * @property {ArrayBuffer|ArrayBufferView|Array} [certs] The certificates + * @property {ArrayBuffer|ArrayBufferView|Array} [ca] The certificate authority + * @property {ArrayBuffer|ArrayBufferView|Array} [crl] The certificate revocation list + */ + +/** + * @typedef {object} TransportParams + * @property {SocketAddress} [preferredAddressIpv4] The preferred IPv4 address + * @property {SocketAddress} [preferredAddressIpv6] The preferred IPv6 address + * @property {bigint|number} [initialMaxStreamDataBidiLocal] The initial maximum stream data bidirectional local + * @property {bigint|number} [initialMaxStreamDataBidiRemote] The initial maximum stream data bidirectional remote + * @property {bigint|number} [initialMaxStreamDataUni] The initial maximum stream data unidirectional + * @property {bigint|number} [initialMaxData] The initial maximum data + * @property {bigint|number} [initialMaxStreamsBidi] The initial maximum streams bidirectional + * @property {bigint|number} [initialMaxStreamsUni] The initial maximum streams unidirectional + * @property {bigint|number} [maxIdleTimeout] The maximum idle timeout + * @property {bigint|number} [activeConnectionIDLimit] The active connection ID limit + * @property {bigint|number} [ackDelayExponent] The acknowledgment delay exponent + * @property {bigint|number} [maxAckDelay] The maximum acknowledgment delay + * @property {bigint|number} [maxDatagramFrameSize] The maximum datagram frame size + * @property {boolean} [disableActiveMigration] Disable active migration + */ + +/** + * @typedef {object} ApplicationOptions + * @property {bigint|number} [maxHeaderPairs] The maximum header pairs + * @property {bigint|number} [maxHeaderLength] The maximum header length + * @property {bigint|number} [maxFieldSectionSize] The maximum field section size + * @property {bigint|number} [qpackMaxDTableCapacity] The qpack maximum dynamic table capacity + * @property {bigint|number} [qpackEncoderMaxDTableCapacity] The qpack encoder maximum dynamic table capacity + * @property {bigint|number} [qpackBlockedStreams] The qpack blocked streams + * @property {boolean} [enableConnectProtocol] Enable the connect protocol + * @property {boolean} [enableDatagrams] Enable datagrams + */ + +/** + * @typedef {object} SessionOptions + * @property {number} [version] The version + * @property {number} [minVersion] The minimum version + * @property {'use'|'ignore'|'default'} [preferredAddressPolicy] The preferred address policy + * @property {ApplicationOptions} [application] The application options + * @property {TransportParams} [transportParams] The transport parameters + * @property {TlsOptions} [tls] The TLS options + * @property {boolean} [qlog] Enable qlog + * @property {ArrayBufferView} [sessionTicket] The session ticket + */ + +/** + * @typedef {object} Datagrams + * @property {ReadableStream} readable The readable stream + * @property {WritableStream} writable The writable stream + */ + +/** + * @typedef {object} Path + * @property {SocketAddress} local The local address + * @property {SocketAddress} remote The remote address + */ + +/** + * Called when the Endpoint receives a new server-side Session. + * @callback OnSessionCallback + * @param {Session} session + * @param {Endpoint} endpoint + * @returns {void} + */ + +/** + * @callback OnStreamCallback + * @param {QuicStream} stream + * @param {Session} session + * @returns {void} + */ + +/** + * @callback OnDatagramCallback + * @param {Uint8Array} datagram + * @param {Session} session + * @param {boolean} early + * @returns {void} + */ + +/** + * @callback OnDatagramStatusCallback + * @param {bigint} id + * @param {'lost'|'acknowledged'} status + * @param {Session} session + * @returns {void} + */ + +/** + * @callback OnPathValidationCallback + * @param {'aborted'|'failure'|'success'} result + * @param {SocketAddress} newLocalAddress + * @param {SocketAddress} newRemoteAddress + * @param {SocketAddress} oldLocalAddress + * @param {SocketAddress} oldRemoteAddress + * @param {boolean} preferredAddress + * @param {Session} session + * @returns {void} + */ + +/** + * @callback OnSessionTicketCallback + * @param {object} ticket + * @param {Session} session + * @returns {void} + */ + +/** + * @callback OnVersionNegotiationCallback + * @param {number} version + * @param {number[]} requestedVersions + * @param {number[]} supportedVersions + * @param {Session} session + * @returns {void} + */ + +/** + * @callback OnHandshakeCallback + * @param {string} sni + * @param {string} alpn + * @param {string} cipher + * @param {string} cipherVersion + * @param {string} validationErrorReason + * @param {number} validationErrorCode + * @param {boolean} earlyDataAccepted + * @param {Session} session + * @returns {void} + */ + +/** + * @callback OnBlockedCallback + * @param {QuicStream} stream + * @returns {void} + */ + +/** + * @callback OnStreamErrorCallback + * @param {any} error + * @param {QuicStream} stream + * @returns {void} + */ + +/** + * @callback OnHeadersCallback + * @param {object} headers + * @param {string} kind + * @param {QuicStream} stream + * @returns {void} + */ + +/** + * @callback OnTrailersCallback + * @param {QuicStream} stream + * @returns {void} + */ + +/** + * Provdes the callback configuration for Sessions. + * @typedef {object} SessionCallbackConfiguration + * @property {OnStreamCallback} onstream The stream callback + * @property {OnDatagramCallback} [ondatagram] The datagram callback + * @property {OnDatagramStatusCallback} [ondatagramstatus] The datagram status callback + * @property {OnPathValidationCallback} [onpathvalidation] The path validation callback + * @property {OnSessionTicketCallback} [onsessionticket] The session ticket callback + * @property {OnVersionNegotiationCallback} [onversionnegotiation] The version negotiation callback + * @property {OnHandshakeCallback} [onhandshake] The handshake callback + */ + +/** + * @typedef {object} StreamCallbackConfiguration + * @property {OnStreamErrorCallback} onerror The error callback + * @property {OnBlockedCallback} [onblocked] The blocked callback + * @property {OnStreamErrorCallback} [onreset] The reset callback + * @property {OnHeadersCallback} [onheaders] The headers callback + * @property {OnTrailersCallback} [ontrailers] The trailers callback + */ + +/** + * Provides the callback configuration for the Endpoint. + * @typedef {object} EndpointCallbackConfiguration + * @property {OnSessionCallback} onsession The session callback + * @property {SessionCallbackConfiguration} session The session callback configuration + * @property {StreamCallbackConfiguration} stream The stream callback configuration + */ + +setCallbacks({ + onEndpointClose(context, status) { + this[kOwner][kFinishClose](context, status); + }, + onSessionNew(session) { + this[kOwner][kNewSession](session); + }, + onSessionClose(errorType, code, reason) { + this[kOwner][kFinishClose](errorType, code, reason); + }, + onSessionDatagram(uint8Array, early) { + this[kOwner][kDatagram](uint8Array, early); + }, + onSessionDatagramStatus(id, status) { + this[kOwner][kDatagramStatus](id, status); + }, + onSessionHandshake(sni, alpn, cipher, cipherVersion, + validationErrorReason, + validationErrorCode, + earlyDataAccepted) { + this[kOwner][kHandshake](sni, alpn, cipher, cipherVersion, + validationErrorReason, validationErrorCode, + earlyDataAccepted); + }, + onSessionPathValidation(...args) { + this[kOwner][kPathValidation](...args); + }, + onSessionTicket(ticket) { + this[kOwner][kSessionTicket](ticket); + }, + onSessionVersionNegotiation(version, + requestedVersions, + supportedVersions) { + this[kOwner][kVersionNegotiation](version, requestedVersions, supportedVersions); + // Note that immediately following a version negotiation event, the + // session will be destroyed. + }, + onStreamCreated(stream, direction) { + const session = this[kOwner]; + // The event is ignored if the session has been destroyed. + if (session.destroyed) { + stream.destroy(); + return; + }; + session[kNewStream](stream, direction); + }, + onStreamBlocked() { + this[kOwner][kBlocked](); + }, + onStreamClose(error) { + this[kOwner][kError](error); + }, + onStreamReset(error) { + this[kOwner][kReset](error); + }, + onStreamHeaders(headers, kind) { + this[kOwner][kHeaders](headers, kind); + }, + onStreamTrailers() { + this[kOwner][kTrailers](); + }, +}); + +/** + * @param {'use'|'ignore'|'default'} policy + * @returns {number} + */ +function getPreferredAddressPolicy(policy = 'default') { + switch (policy) { + case 'use': return PREFERRED_ADDRESS_USE; + case 'ignore': return PREFERRED_ADDRESS_IGNORE; + case 'default': return DEFAULT_PREFERRED_ADDRESS_POLICY; + } + throw new ERR_INVALID_ARG_VALUE('options.preferredAddressPolicy', policy); +} + +/** + * @param {TlsOptions} tls + */ +function processTlsOptions(tls) { + validateObject(tls, 'options.tls'); + const { + sni, + alpn, + ciphers, + groups, + keylog, + verifyClient, + tlsTrace, + verifyPrivateKey, + keys, + certs, + ca, + crl, + } = tls; + + const keyHandles = []; + if (keys !== undefined) { + const keyInputs = ArrayIsArray(keys) ? keys : [keys]; + for (const key of keyInputs) { + if (isKeyObject(key)) { + if (key.type !== 'private') { + throw new ERR_INVALID_ARG_VALUE('options.tls.keys', key, 'must be a private key'); + } + ArrayPrototypePush(keyHandles, key[kKeyObjectHandle]); + } else if (isCryptoKey(key)) { + if (key.type !== 'private') { + throw new ERR_INVALID_ARG_VALUE('options.tls.keys', key, 'must be a private key'); + } + ArrayPrototypePush(keyHandles, key[kKeyObjectInner][kKeyObjectHandle]); + } else { + throw new ERR_INVALID_ARG_TYPE('options.tls.keys', ['KeyObject', 'CryptoKey'], key); + } + } + } + + return { + sni, + alpn, + ciphers, + groups, + keylog, + verifyClient, + tlsTrace, + verifyPrivateKey, + keys: keyHandles, + certs, + ca, + crl, + }; +} + +class QuicStreamStats { + /** @type {BigUint64Array} */ + #handle; + + /** + * @param {ArrayBuffer} buffer + */ + constructor(buffer) { + if (!isArrayBuffer(buffer)) { + throw new ERR_INVALID_ARG_TYPE('buffer', ['ArrayBuffer'], buffer); + } + this.#handle = new BigUint64Array(buffer); + } + + /** @type {bigint} */ + get createdAt() { + return this.#handle[IDX_STATS_STREAM_CREATED_AT]; + } + + /** @type {bigint} */ + get receivedAt() { + return this.#handle[IDX_STATS_STREAM_RECEIVED_AT]; + } + + /** @type {bigint} */ + get ackedAt() { + return this.#handle[IDX_STATS_STREAM_ACKED_AT]; + } + + /** @type {bigint} */ + get closingAt() { + return this.#handle[IDX_STATS_STREAM_CLOSING_AT]; + } + + /** @type {bigint} */ + get destroyedAt() { + return this.#handle[IDX_STATS_STREAM_DESTROYED_AT]; + } + + /** @type {bigint} */ + get bytesReceived() { + return this.#handle[IDX_STATS_STREAM_BYTES_RECEIVED]; + } + + /** @type {bigint} */ + get bytesSent() { + return this.#handle[IDX_STATS_STREAM_BYTES_SENT]; + } + + /** @type {bigint} */ + get maxOffset() { + return this.#handle[IDX_STATS_STREAM_MAX_OFFSET]; + } + + /** @type {bigint} */ + get maxOffsetAcknowledged() { + return this.#handle[IDX_STATS_STREAM_MAX_OFFSET_ACK]; + } + + /** @type {bigint} */ + get maxOffsetReceived() { + return this.#handle[IDX_STATS_STREAM_MAX_OFFSET_RECV]; + } + + /** @type {bigint} */ + get finalSize() { + return this.#handle[IDX_STATS_STREAM_FINAL_SIZE]; + } + + toJSON() { + return { + __proto__: null, + createdAt: `${this.createdAt}`, + receivedAt: `${this.receivedAt}`, + ackedAt: `${this.ackedAt}`, + closingAt: `${this.closingAt}`, + destroyedAt: `${this.destroyedAt}`, + bytesReceived: `${this.bytesReceived}`, + bytesSent: `${this.bytesSent}`, + maxOffset: `${this.maxOffset}`, + maxOffsetAcknowledged: `${this.maxOffsetAcknowledged}`, + maxOffsetReceived: `${this.maxOffsetReceived}`, + finalSize: `${this.finalSize}`, + }; + } + + [kInspect](depth, options) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1, + }; + + return `StreamStats ${inspect({ + createdAt: this.createdAt, + receivedAt: this.receivedAt, + ackedAt: this.ackedAt, + closingAt: this.closingAt, + destroyedAt: this.destroyedAt, + bytesReceived: this.bytesReceived, + bytesSent: this.bytesSent, + maxOffset: this.maxOffset, + maxOffsetAcknowledged: this.maxOffsetAcknowledged, + maxOffsetReceived: this.maxOffsetReceived, + finalSize: this.finalSize, + }, opts)}`; + } + + [kFinishClose]() { + // Snapshot the stats into a new BigUint64Array since the underlying + // buffer will be destroyed. + this.#handle = new BigUint64Array(this.#handle); + } +} + +class QuicStreamState { + /** @type {DataView} */ + #handle; + + /** + * @param {ArrayBuffer} buffer + */ + constructor(buffer) { + if (!isArrayBuffer(buffer)) { + throw new ERR_INVALID_ARG_TYPE('buffer', ['ArrayBuffer'], buffer); + } + this.#handle = new DataView(buffer); + } + + /** @type {bigint} */ + get id() { + return DataViewPrototypeGetBigInt64(this.#handle, IDX_STATE_STREAM_ID); + } + + /** @type {boolean} */ + get finSent() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_STREAM_FIN_SENT); + } + + /** @type {boolean} */ + get finReceived() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_STREAM_FIN_RECEIVED); + } + + /** @type {boolean} */ + get readEnded() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_STREAM_READ_ENDED); + } + + /** @type {boolean} */ + get writeEnded() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_STREAM_WRITE_ENDED); + } + + /** @type {boolean} */ + get destroyed() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_STREAM_DESTROYED); + } + + /** @type {boolean} */ + get paused() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_STREAM_PAUSED); + } + + /** @type {boolean} */ + get reset() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_STREAM_RESET); + } + + /** @type {boolean} */ + get hasReader() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_STREAM_HAS_READER); + } + + /** @type {boolean} */ + get wantsBlock() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_STREAM_WANTS_BLOCK); + } + + /** @type {boolean} */ + set wantsBlock(val) { + DataViewPrototypeSetUint8(this.#handle, IDX_STATE_STREAM_WANTS_BLOCK, val ? 1 : 0); + } + + /** @type {boolean} */ + get wantsHeaders() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_STREAM_WANTS_HEADERS); + } + + /** @type {boolean} */ + set wantsHeaders(val) { + DataViewPrototypeSetUint8(this.#handle, IDX_STATE_STREAM_WANTS_HEADERS, val ? 1 : 0); + } + + /** @type {boolean} */ + get wantsReset() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_STREAM_WANTS_RESET); + } + + /** @type {boolean} */ + set wantsReset(val) { + DataViewPrototypeSetUint8(this.#handle, IDX_STATE_STREAM_WANTS_RESET, val ? 1 : 0); + } + + /** @type {boolean} */ + get wantsTrailers() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_STREAM_WANTS_TRAILERS); + } + + /** @type {boolean} */ + set wantsTrailers(val) { + DataViewPrototypeSetUint8(this.#handle, IDX_STATE_STREAM_WANTS_TRAILERS, val ? 1 : 0); + } + + toJSON() { + return { + __proto__: null, + id: `${this.id}`, + finSent: this.finSent, + finReceived: this.finReceived, + readEnded: this.readEnded, + writeEnded: this.writeEnded, + destroyed: this.destroyed, + paused: this.paused, + reset: this.reset, + hasReader: this.hasReader, + wantsBlock: this.wantsBlock, + wantsHeaders: this.wantsHeaders, + wantsReset: this.wantsReset, + wantsTrailers: this.wantsTrailers, + }; + } + + [kInspect](depth, options) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1, + }; + + return `StreamState ${inspect({ + id: this.id, + finSent: this.finSent, + finReceived: this.finReceived, + readEnded: this.readEnded, + writeEnded: this.writeEnded, + destroyed: this.destroyed, + paused: this.paused, + reset: this.reset, + hasReader: this.hasReader, + wantsBlock: this.wantsBlock, + wantsHeaders: this.wantsHeaders, + wantsReset: this.wantsReset, + wantsTrailers: this.wantsTrailers, + }, opts)}`; + } + + [kFinishClose]() { + // When the stream is destroyed, the state is no longer available. + this.#handle = new DataView(new ArrayBuffer(0)); + } +} + +class QuicStream { + /** @type {object} */ + #handle; + /** @type {Session} */ + #session; + /** @type {QuicStreamStats} */ + #stats; + /** @type {QuicStreamState} */ + #state; + /** @type {number} */ + #direction; + /** @type {OnStreamErrorCallback} */ + #onerror; + /** @type {OnBlockedCallback|undefined} */ + #onblocked; + /** @type {OnStreamErrorCallback|undefined} */ + #onreset; + /** @type {OnHeadersCallback|undefined} */ + #onheaders; + /** @type {OnTrailersCallback|undefined} */ + #ontrailers; + + /** + * @param {StreamCallbackConfiguration} config + * @param {object} handle + * @param {Session} session + */ + constructor(config, handle, session, direction) { + validateObject(config, 'config'); + this.#stats = new QuicStreamStats(handle.stats); + this.#state = new QuicStreamState(handle.stats); + const { + onblocked, + onerror, + onreset, + onheaders, + ontrailers, + } = config; + if (onblocked !== undefined) { + validateFunction(onblocked, 'config.onblocked'); + this.#state.wantsBlock = true; + this.#onblocked = onblocked; + } + if (onreset !== undefined) { + validateFunction(onreset, 'config.onreset'); + this.#state.wantsReset = true; + this.#onreset = onreset; + } + if (onheaders !== undefined) { + validateFunction(onheaders, 'config.onheaders'); + this.#state.wantsHeaders = true; + this.#onheaders = onheaders; + } + if (ontrailers !== undefined) { + validateFunction(ontrailers, 'config.ontrailers'); + this.#state.wantsTrailers = true; + this.#ontrailers = ontrailers; + } + validateFunction(onerror, 'config.onerror'); + this.#onerror = onerror; + this.#handle = handle; + this.#session = session; + this.#direction = direction; + this.#handle[kOwner] = true; + } + + /** @type {QuicStreamStats} */ + get stats() { return this.#stats; } + + /** @type {QuicStreamState} */ + get state() { return this.#state; } + + /** @type {Session} */ + get session() { return this.#session; } + + /** @type {bigint} */ + get id() { return this.#state.id; } + + /** @type {'bidi'|'uni'} */ + get direction() { + return this.#direction === 0 ? 'bidi' : 'uni'; + } + + [kBlocked]() { + this.#onblocked(this); + } + + [kError](error) { + this.#onerror(error, this); + } + + [kReset](error) { + this.#onreset(error, this); + } + + [kHeaders](headers, kind) { + this.#onheaders(headers, kind, this); + } + + [kTrailers]() { + this.#ontrailers(this); + } + + [kInspect](depth, options) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1, + }; + + return `Stream ${inspect({ + id: this.id, + direction: this.direction, + stats: this.stats, + state: this.state, + session: this.session, + }, opts)}`; + } +} + +class SessionStats { + /** @type {BigUint64Array} */ + #handle; + + /** + * @param {BigUint64Array} buffer + */ + constructor(buffer) { + if (!isArrayBuffer(buffer)) { + throw new ERR_INVALID_ARG_TYPE('buffer', ['ArrayBuffer'], buffer); + } + this.#handle = new BigUint64Array(buffer); + } + + /** @type {bigint} */ + get createdAt() { + return this.#handle[IDX_STATS_SESSION_CREATED_AT]; + } + + /** @type {bigint} */ + get closingAt() { + return this.#handle[IDX_STATS_SESSION_CLOSING_AT]; + } + + /** @type {bigint} */ + get destroyedAt() { + return this.#handle[IDX_STATS_SESSION_DESTROYED_AT]; + } + + /** @type {bigint} */ + get handshakeCompletedAt() { + return this.#handle[IDX_STATS_SESSION_HANDSHAKE_COMPLETED_AT]; + } + + /** @type {bigint} */ + get handshakeConfirmedAt() { + return this.#handle[IDX_STATS_SESSION_HANDSHAKE_CONFIRMED_AT]; + } + + /** @type {bigint} */ + get gracefulClosingAt() { + return this.#handle[IDX_STATS_SESSION_GRACEFUL_CLOSING_AT]; + } + + /** @type {bigint} */ + get bytesReceived() { + return this.#handle[IDX_STATS_SESSION_BYTES_RECEIVED]; + } + + /** @type {bigint} */ + get bytesSent() { + return this.#handle[IDX_STATS_SESSION_BYTES_SENT]; + } + + /** @type {bigint} */ + get bidiInStreamCount() { + return this.#handle[IDX_STATS_SESSION_BIDI_IN_STREAM_COUNT]; + } + + /** @type {bigint} */ + get bidiOutStreamCount() { + return this.#handle[IDX_STATS_SESSION_BIDI_OUT_STREAM_COUNT]; + } + + /** @type {bigint} */ + get uniInStreamCount() { + return this.#handle[IDX_STATS_SESSION_UNI_IN_STREAM_COUNT]; + } + + /** @type {bigint} */ + get uniOutStreamCount() { + return this.#handle[IDX_STATS_SESSION_UNI_OUT_STREAM_COUNT]; + } + + /** @type {bigint} */ + get lossRetransmitCount() { + return this.#handle[IDX_STATS_SESSION_LOSS_RETRANSMIT_COUNT]; + } + + /** @type {bigint} */ + get maxBytesInFlights() { + return this.#handle[IDX_STATS_SESSION_MAX_BYTES_IN_FLIGHT]; + } + + /** @type {bigint} */ + get bytesInFlight() { + return this.#handle[IDX_STATS_SESSION_BYTES_IN_FLIGHT]; + } + + /** @type {bigint} */ + get blockCount() { + return this.#handle[IDX_STATS_SESSION_BLOCK_COUNT]; + } + + /** @type {bigint} */ + get cwnd() { + return this.#handle[IDX_STATS_SESSION_CWND]; + } + + /** @type {bigint} */ + get latestRtt() { + return this.#handle[IDX_STATS_SESSION_LATEST_RTT]; + } + + /** @type {bigint} */ + get minRtt() { + return this.#handle[IDX_STATS_SESSION_MIN_RTT]; + } + + /** @type {bigint} */ + get rttVar() { + return this.#handle[IDX_STATS_SESSION_RTTVAR]; + } + + /** @type {bigint} */ + get smoothedRtt() { + return this.#handle[IDX_STATS_SESSION_SMOOTHED_RTT]; + } + + /** @type {bigint} */ + get ssthresh() { + return this.#handle[IDX_STATS_SESSION_SSTHRESH]; + } + + /** @type {bigint} */ + get datagramsReceived() { + return this.#handle[IDX_STATS_SESSION_DATAGRAMS_RECEIVED]; + } + + /** @type {bigint} */ + get datagramsSent() { + return this.#handle[IDX_STATS_SESSION_DATAGRAMS_SENT]; + } + + /** @type {bigint} */ + get datagramsAcknowledged() { + return this.#handle[IDX_STATS_SESSION_DATAGRAMS_ACKNOWLEDGED]; + } + + /** @type {bigint} */ + get datagramsLost() { + return this.#handle[IDX_STATS_SESSION_DATAGRAMS_LOST]; + } + + toJSON() { + return { + __proto__: null, + createdAt: `${this.createdAt}`, + closingAt: `${this.closingAt}`, + destroyedAt: `${this.destroyedAt}`, + handshakeCompletedAt: `${this.handshakeCompletedAt}`, + handshakeConfirmedAt: `${this.handshakeConfirmedAt}`, + gracefulClosingAt: `${this.gracefulClosingAt}`, + bytesReceived: `${this.bytesReceived}`, + bytesSent: `${this.bytesSent}`, + bidiInStreamCount: `${this.bidiInStreamCount}`, + bidiOutStreamCount: `${this.bidiOutStreamCount}`, + uniInStreamCount: `${this.uniInStreamCount}`, + uniOutStreamCount: `${this.uniOutStreamCount}`, + lossRetransmitCount: `${this.lossRetransmitCount}`, + maxBytesInFlights: `${this.maxBytesInFlights}`, + bytesInFlight: `${this.bytesInFlight}`, + blockCount: `${this.blockCount}`, + cwnd: `${this.cwnd}`, + latestRtt: `${this.latestRtt}`, + minRtt: `${this.minRtt}`, + rttVar: `${this.rttVar}`, + smoothedRtt: `${this.smoothedRtt}`, + ssthresh: `${this.ssthresh}`, + datagramsReceived: `${this.datagramsReceived}`, + datagramsSent: `${this.datagramsSent}`, + datagramsAcknowledged: `${this.datagramsAcknowledged}`, + datagramsLost: `${this.datagramsLost}`, + }; + } + + [kInspect](depth, options) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1, + }; + + return `SessionStats ${inspect({ + createdAt: this.createdAt, + closingAt: this.closingAt, + destroyedAt: this.destroyedAt, + handshakeCompletedAt: this.handshakeCompletedAt, + handshakeConfirmedAt: this.handshakeConfirmedAt, + gracefulClosingAt: this.gracefulClosingAt, + bytesReceived: this.bytesReceived, + bytesSent: this.bytesSent, + bidiInStreamCount: this.bidiInStreamCount, + bidiOutStreamCount: this.bidiOutStreamCount, + uniInStreamCount: this.uniInStreamCount, + uniOutStreamCount: this.uniOutStreamCount, + lossRetransmitCount: this.lossRetransmitCount, + maxBytesInFlights: this.maxBytesInFlights, + bytesInFlight: this.bytesInFlight, + blockCount: this.blockCount, + cwnd: this.cwnd, + latestRtt: this.latestRtt, + minRtt: this.minRtt, + rttVar: this.rttVar, + smoothedRtt: this.smoothedRtt, + ssthresh: this.ssthresh, + datagramsReceived: this.datagramsReceived, + datagramsSent: this.datagramsSent, + datagramsAcknowledged: this.datagramsAcknowledged, + datagramsLost: this.datagramsLost, + }, opts)}`; + } + + [kFinishClose]() { + // Snapshot the stats into a new BigUint64Array since the underlying + // buffer will be destroyed. + this.#handle = new BigUint64Array(this.#handle); + } +} + +class SessionState { + /** @type {DataView} */ + #handle; + + /** + * @param {ArrayBuffer} buffer + */ + constructor(buffer) { + if (!isArrayBuffer(buffer)) { + throw new ERR_INVALID_ARG_TYPE('buffer', ['ArrayBuffer'], buffer); + } + this.#handle = new DataView(buffer); + } + + /** @type {boolean} */ + get hasPathValidationListener() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_PATH_VALIDATION); + } + + /** @type {boolean} */ + set hasPathValidationListener(val) { + DataViewPrototypeSetUint8(this.#handle, IDX_STATE_SESSION_PATH_VALIDATION, val ? 1 : 0); + } + + /** @type {boolean} */ + get hasVersionNegotiationListener() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_VERSION_NEGOTIATION); + } + + /** @type {boolean} */ + set hasVersionNegotiationListener(val) { + DataViewPrototypeSetUint8(this.#handle, IDX_STATE_SESSION_VERSION_NEGOTIATION, val ? 1 : 0); + } + + /** @type {boolean} */ + get hasDatagramListener() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_DATAGRAM); + } + + /** @type {boolean} */ + set hasDatagramListener(val) { + DataViewPrototypeSetUint8(this.#handle, IDX_STATE_SESSION_DATAGRAM, val ? 1 : 0); + } + + /** @type {boolean} */ + get hasSessionTicketListener() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_SESSION_TICKET); + } + + /** @type {boolean} */ + set hasSessionTicketListener(val) { + DataViewPrototypeSetUint8(this.#handle, IDX_STATE_SESSION_SESSION_TICKET, val ? 1 : 0); + } + + /** @type {boolean} */ + get isClosing() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_CLOSING); + } + + /** @type {boolean} */ + get isGracefulClose() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_GRACEFUL_CLOSE); + } + + /** @type {boolean} */ + get isSilentClose() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_SILENT_CLOSE); + } + + /** @type {boolean} */ + get isStatelessReset() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_STATELESS_RESET); + } + + /** @type {boolean} */ + get isDestroyed() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_DESTROYED); + } + + /** @type {boolean} */ + get isHandshakeCompleted() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_HANDSHAKE_COMPLETED); + } + + /** @type {boolean} */ + get isHandshakeConfirmed() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_HANDSHAKE_CONFIRMED); + } + + /** @type {boolean} */ + get isStreamOpenAllowed() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_STREAM_OPEN_ALLOWED); + } + + /** @type {boolean} */ + get isPrioritySupported() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_PRIORITY_SUPPORTED); + } + + /** @type {boolean} */ + get isWrapped() { + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_SESSION_WRAPPED); + } + + /** @type {bigint} */ + get lastDatagramId() { + return DataViewPrototypeGetBigUint64(this.#handle, IDX_STATE_SESSION_LAST_DATAGRAM_ID); + } + + toJSON() { + return { + __proto__: null, + hasPathValidationListener: this.hasPathValidationListener, + hasVersionNegotiationListener: this.hasVersionNegotiationListener, + hasDatagramListener: this.hasDatagramListener, + hasSessionTicketListener: this.hasSessionTicketListener, + isClosing: this.isClosing, + isGracefulClose: this.isGracefulClose, + isSilentClose: this.isSilentClose, + isStatelessReset: this.isStatelessReset, + isDestroyed: this.isDestroyed, + isHandshakeCompleted: this.isHandshakeCompleted, + isHandshakeConfirmed: this.isHandshakeConfirmed, + isStreamOpenAllowed: this.isStreamOpenAllowed, + isPrioritySupported: this.isPrioritySupported, + isWrapped: this.isWrapped, + lastDatagramId: `${this.lastDatagramId}`, + }; + } + + [kInspect](depth, options) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1, + }; + + return `SessionState ${inspect({ + hasPathValidationListener: this.hasPathValidationListener, + hasVersionNegotiationListener: this.hasVersionNegotiationListener, + hasDatagramListener: this.hasDatagramListener, + hasSessionTicketListener: this.hasSessionTicketListener, + isClosing: this.isClosing, + isGracefulClose: this.isGracefulClose, + isSilentClose: this.isSilentClose, + isStatelessReset: this.isStatelessReset, + isDestroyed: this.isDestroyed, + isHandshakeCompleted: this.isHandshakeCompleted, + isHandshakeConfirmed: this.isHandshakeConfirmed, + isStreamOpenAllowed: this.isStreamOpenAllowed, + isPrioritySupported: this.isPrioritySupported, + isWrapped: this.isWrapped, + lastDatagramId: this.lastDatagramId, + }, opts)}`; + } + + [kFinishClose]() { + // Snapshot the state into a new DataView since the underlying + // buffer will be destroyed. + this.#handle = new DataView(new ArrayBuffer(0)); + } +} + +class Session { + /** @type {Endpoint} */ + #endpoint = undefined; + /** @type {boolean} */ + #isPendingClose = false; + /** @type {object|undefined} */ + #handle; + /** @type {PromiseWithResolvers} */ + #pendingClose; + /** @type {SocketAddress|undefined} */ + #remoteAddress = undefined; + /** @type {SessionState} */ + #state; + /** @type {SessionStats} */ + #stats; + /** @type {QuicStream[]} */ + #streams = []; + /** @type {OnStreamCallback} */ + #onstream; + /** @type {OnDatagramCallback|undefined} */ + #ondatagram; + /** @type {OnDatagramStatusCallback|undefined} */ + #ondatagramstatus; + /** @type {OnPathValidationCallback|undefined} */ + #onpathvalidation; + /** @type {OnSessionTicketCallback|undefined} */ + #onsessionticket; + /** @type {OnVersionNegotiationCallback|undefined} */ + #onversionnegotiation; + /** @type {OnHandshakeCallback} */ + #onhandshake; + /** @type {StreamCallbackConfiguration} */ + #streamConfig; + + /** + * @param {SessionCallbackConfiguration} config + * @param {StreamCallbackConfiguration} streamConfig + * @param {object} [handle] + * @param {Endpoint} [endpoint] + */ + constructor(config, streamConfig, handle, endpoint) { + validateObject(config, 'config'); + this.#stats = new SessionStats(handle.stats); + this.#state = new SessionState(handle.state); + const { + ondatagram, + ondatagramstatus, + onhandshake, + onpathvalidation, + onsessionticket, + onstream, + onversionnegotiation, + } = config; + if (ondatagram !== undefined) { + validateFunction(ondatagram, 'config.ondatagram'); + validateFunction(ondatagramstatus, 'config.ondatagramstatus'); + this.#state.hasDatagramListener = true; + this.#ondatagram = ondatagram; + this.#ondatagramstatus = ondatagramstatus; + } + validateFunction(onhandshake, 'config.onhandshake'); + if (onpathvalidation !== undefined) { + validateFunction(onpathvalidation, 'config.onpathvalidation'); + this.#state.hasPathValidationListener = true; + this.#onpathvalidation = onpathvalidation; + } + if (onsessionticket !== undefined) { + validateFunction(onsessionticket, 'config.onsessionticket'); + this.#state.hasSessionTicketListener = true; + this.#onsessionticket = onsessionticket; + } + validateFunction(onstream, 'config.onstream'); + if (onversionnegotiation !== undefined) { + validateFunction(onversionnegotiation, 'config.onversionnegotiation'); + this.#state.hasVersionNegotiationListener = true; + this.#onversionnegotiation = onversionnegotiation; + } + this.#onhandshake = onhandshake; + this.#onstream = onstream; + this.#handle = handle; + this.#endpoint = endpoint; + this.#pendingClose = Promise.withResolvers(); // eslint-disable-line node-core/prefer-primordials + this.#handle[kOwner] = this; + } + + /** @type {boolean} */ + get #isClosedOrClosing() { + return this.#handle === undefined || this.#isPendingClose; + } + + /** @type {SessionStats} */ + get stats() { return this.#stats; } + + /** @type {SessionState} */ + get state() { return this.#state; } + + /** @type {Endpoint} */ + get endpoint() { return this.#endpoint; } + + /** @type {Path} */ + get path() { + if (this.#isClosedOrClosing) return undefined; + if (this.#remoteAddress === undefined) { + const addr = this.#handle.getRemoteAddress(); + if (addr !== undefined) { + this.#remoteAddress = new InternalSocketAddress(addr); + } + } + return { + local: this.#endpoint.address, + remote: this.#remoteAddress, + }; + } + + /** + * @returns {QuicStream} + */ + openBidirectionalStream() { + if (this.#isClosedOrClosing) { + throw new ERR_INVALID_STATE('Session is closed'); + } + if (!this.state.isStreamOpenAllowed) { + throw new ERR_QUIC_OPEN_STREAM_FAILED(); + } + const handle = this.#handle.openStream(STREAM_DIRECTION_BIDIRECTIONAL); + if (handle === undefined) { + throw new ERR_QUIC_OPEN_STREAM_FAILED(); + } + const stream = new QuicStream(this.#streamConfig, handle, this, 0); + ArrayPrototypePush(this.#streams, stream); + return stream; + } + + /** + * @returns {QuicStream} + */ + openUnidirectionalStream() { + if (this.#isClosedOrClosing) { + throw new ERR_INVALID_STATE('Session is closed'); + } + if (!this.state.isStreamOpenAllowed) { + throw new ERR_QUIC_OPEN_STREAM_FAILED(); + } + const handle = this.#handle.openStream(STREAM_DIRECTION_UNIDIRECTIONAL); + if (handle === undefined) { + throw new ERR_QUIC_OPEN_STREAM_FAILED(); + } + const stream = new QuicStream(this.#streamConfig, handle, this, 1); + ArrayPrototypePush(this.#streams, stream); + return stream; + } + + /** + * Initiate a key update. + */ + updateKey() { + if (this.#isClosedOrClosing) { + throw new ERR_INVALID_STATE('Session is closed'); + } + this.#handle.updateKey(); + } + + /** + * Gracefully closes the session. Any streams created on the session will be + * allowed to complete gracefully and any datagrams that have already been + * queued for sending will be allowed to complete. Once all streams have been + * completed and all datagrams have been sent, the session will be closed. + * New streams will not be allowed to be created. The returned promise will + * be resolved when the session closes, or will be rejected if the session + * closes abruptly due to an error. + * @returns {Promise} + */ + close() { + if (!this.#isClosedOrClosing) { + this.#isPendingClose = true; + this.#handle?.gracefulClose(); + } + return this.closed; + } + + /** + * A promise that is resolved when the session is closed, or is rejected if + * the session is closed abruptly due to an error. + * @type {Promise} + */ + get closed() { return this.#pendingClose.promise; } + + /** @type {boolean} */ + get destroyed() { return this.#handle === undefined; } + + /** + * Forcefully closes the session abruptly without waiting for streams to be + * completed naturally. Any streams that are still open will be immediately + * destroyed and any queued datagrams will be dropped. If an error is given, + * the closed promise will be rejected with that error. If no error is given, + * the closed promise will be resolved. + * @param {any} error + */ + destroy(error) { + // TODO(@jasnell): Implement. + } + + /** + * Send a datagram. The id of the sent datagram will be returned. The status + * of the sent datagram will be reported via the datagram-status event if + * possible. + * + * If a string is given it will be encoded as UTF-8. + * + * If an ArrayBufferView is given, the view will be copied. + * @param {ArrayBufferView|string} datagram The datagram payload + * @returns {bigint} The datagram ID + */ + sendDatagram(datagram) { + if (this.#isClosedOrClosing) { + throw new ERR_INVALID_STATE('Session is closed'); + } + if (typeof datagram === 'string') { + datagram = Buffer.from(datagram, 'utf8'); + } else { + if (!isArrayBufferView(datagram)) { + throw new ERR_INVALID_ARG_TYPE('datagram', + ['ArrayBufferView', 'string'], + datagram); + } + datagram = new Uint8Array(datagram.buffer.transfer(), + datagram.byteOffset, + datagram.byteLength); + } + return this.#handle.sendDatagram(datagram); + } + + /** + * @param {Uint8Array} u8 + * @param {boolean} early + */ + [kDatagram](u8, early) { + if (this.destroyed) return; + this.#ondatagram(u8, this, early); + } + + /** + * @param {bigint} id + * @param {'lost'|'acknowledged'} status + */ + [kDatagramStatus](id, status) { + if (this.destroyed) return; + this.#ondatagramstatus(id, status, this); + } + + /** + * @param {'aborted'|'failure'|'success'} result + * @param {SocketAddress} newLocalAddress + * @param {SocketAddress} newRemoteAddress + * @param {SocketAddress} oldLocalAddress + * @param {SocketAddress} oldRemoteAddress + * @param {boolean} preferredAddress + */ + [kPathValidation](result, newLocalAddress, newRemoteAddress, oldLocalAddress, + oldRemoteAddress, preferredAddress) { + if (this.destroyed) return; + this.#onpathvalidation(result, newLocalAddress, newRemoteAddress, + oldLocalAddress, oldRemoteAddress, preferredAddress, + this); + } + + /** + * @param {object} ticket + */ + [kSessionTicket](ticket) { + if (this.destroyed) return; + this.#onsessionticket(ticket, this); + } + + /** + * @param {number} version + * @param {number[]} requestedVersions + * @param {number[]} supportedVersions + */ + [kVersionNegotiation](version, requestedVersions, supportedVersions) { + if (this.destroyed) return; + this.#onversionnegotiation(version, requestedVersions, supportedVersions, this); + } + + /** + * @param {string} sni + * @param {string} alpn + * @param {string} cipher + * @param {string} cipherVersion + * @param {string} validationErrorReason + * @param {number} validationErrorCode + * @param {boolean} earlyDataAccepted + */ + [kHandshake](sni, alpn, cipher, cipherVersion, validationErrorReason, + validationErrorCode, earlyDataAccepted) { + if (this.destroyed) return; + this.#onhandshake(sni, alpn, cipher, cipherVersion, validationErrorReason, + validationErrorCode, earlyDataAccepted, this); + } + + /** + * @param {object} handle + * @param {number} direction + */ + [kNewStream](handle, direction) { + const stream = new QuicStream(this.#streamConfig, handle, this, direction); + ArrayPrototypePush(this.#streams, stream); + this.#onstream(stream, this); + } + + [kInspect](depth, options) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1, + }; + + return `Session ${inspect({ + closed: this.closed, + closing: this.#isPendingClose, + destroyed: this.destroyed, + endpoint: this.endpoint, + path: this.path, + state: this.state, + stats: this.stats, + streams: this.#streams, + }, opts)}`; + } + + [kFinishClose](errorType, code, reason) { + // TODO(@jasnell): Finish the implementation + } + + async [SymbolAsyncDispose]() { await this.close(); } +} + +class EndpointStats { + /** @type {BigUint64Array} */ + #handle; + + /** + * @param {ArrayBuffer} buffer + */ + constructor(buffer) { + if (!isArrayBuffer(buffer)) { + throw new ERR_INVALID_ARG_TYPE('buffer', ['ArrayBuffer'], buffer); + } + this.#handle = new BigUint64Array(buffer); + } + + /** @type {bigint} */ + get createdAt() { + return this.#handle[IDX_STATS_ENDPOINT_CREATED_AT]; + } + + /** @type {bigint} */ + get destroyedAt() { + return this.#handle[IDX_STATS_ENDPOINT_DESTROYED_AT]; + } + + /** @type {bigint} */ + get bytesReceived() { + return this.#handle[IDX_STATS_ENDPOINT_BYTES_RECEIVED]; + } + + /** @type {bigint} */ + get bytesSent() { + return this.#handle[IDX_STATS_ENDPOINT_BYTES_SENT]; + } + + /** @type {bigint} */ + get packetsReceived() { + return this.#handle[IDX_STATS_ENDPOINT_PACKETS_RECEIVED]; + } + + /** @type {bigint} */ + get packetsSent() { + return this.#handle[IDX_STATS_ENDPOINT_PACKETS_SENT]; + } + + /** @type {bigint} */ + get serverSessions() { + return this.#handle[IDX_STATS_ENDPOINT_SERVER_SESSIONS]; + } + + /** @type {bigint} */ + get clientSessions() { + return this.#handle[IDX_STATS_ENDPOINT_CLIENT_SESSIONS]; + } + + /** @type {bigint} */ + get serverBusyCount() { + return this.#handle[IDX_STATS_ENDPOINT_SERVER_BUSY_COUNT]; + } + + /** @type {bigint} */ + get retryCount() { + return this.#handle[IDX_STATS_ENDPOINT_RETRY_COUNT]; + } + + /** @type {bigint} */ + get versionNegotiationCount() { + return this.#handle[IDX_STATS_ENDPOINT_VERSION_NEGOTIATION_COUNT]; + } + + /** @type {bigint} */ + get statelessResetCount() { + return this.#handle[IDX_STATS_ENDPOINT_STATELESS_RESET_COUNT]; + } + + /** @type {bigint} */ + get immediateCloseCount() { + return this.#handle[IDX_STATS_ENDPOINT_IMMEDIATE_CLOSE_COUNT]; + } + + toJSON() { + return { + __proto__: null, + createdAt: `${this.createdAt}`, + destroyedAt: `${this.destroyedAt}`, + bytesReceived: `${this.bytesReceived}`, + bytesSent: `${this.bytesSent}`, + packetsReceived: `${this.packetsReceived}`, + packetsSent: `${this.packetsSent}`, + serverSessions: `${this.serverSessions}`, + clientSessions: `${this.clientSessions}`, + serverBusyCount: `${this.serverBusyCount}`, + retryCount: `${this.retryCount}`, + versionNegotiationCount: `${this.versionNegotiationCount}`, + statelessResetCount: `${this.statelessResetCount}`, + immediateCloseCount: `${this.immediateCloseCount}`, + }; + } + + [kInspect](depth, options) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1, + }; + + return `EndpointStats ${inspect({ + createdAt: this.createdAt, + destroyedAt: this.destroyedAt, + bytesReceived: this.bytesReceived, + bytesSent: this.bytesSent, + packetsReceived: this.packetsReceived, + packetsSent: this.packetsSent, + serverSessions: this.serverSessions, + clientSessions: this.clientSessions, + serverBusyCount: this.serverBusyCount, + retryCount: this.retryCount, + versionNegotiationCount: this.versionNegotiationCount, + statelessResetCount: this.statelessResetCount, + immediateCloseCount: this.immediateCloseCount, + }, opts)}`; + } + + [kFinishClose]() { + // Snapshot the stats into a new BigUint64Array since the underlying + // buffer will be destroyed. + this.#handle = new BigUint64Array(this.#handle); + } +} + +class EndpointState { + /** @type {DataView} */ + #handle; + + /** + * @param {ArrayBuffer} buffer + */ + constructor(buffer) { + if (!isArrayBuffer(buffer)) { + throw new ERR_INVALID_ARG_TYPE('buffer', ['ArrayBuffer'], buffer); + } + this.#handle = new DataView(buffer); + } + + #assertNotClosed() { + if (this.#handle.byteLength === 0) { + throw new ERR_INVALID_STATE('Endpoint is closed'); + } + } + + /** @type {boolean} */ + get isBound() { + this.#assertNotClosed(); + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_ENDPOINT_BOUND); + } + + /** @type {boolean} */ + get isReceiving() { + this.#assertNotClosed(); + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_ENDPOINT_RECEIVING); + } + + /** @type {boolean} */ + get isListening() { + this.#assertNotClosed(); + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_ENDPOINT_LISTENING); + } + + /** @type {boolean} */ + get isClosing() { + this.#assertNotClosed(); + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_ENDPOINT_CLOSING); + } + + /** @type {boolean} */ + get isBusy() { + this.#assertNotClosed(); + return !!DataViewPrototypeGetUint8(this.#handle, IDX_STATE_ENDPOINT_BUSY); + } + + /** + * The number of underlying callbacks that are pending. If the session + * is closing, these are the number of callbacks that the session is + * waiting on before it can be closed. + * @type {bigint} + */ + get pendingCallbacks() { + this.#assertNotClosed(); + return DataViewPrototypeGetBigUint64(this.#handle, IDX_STATE_ENDPOINT_PENDING_CALLBACKS); + } + + toJSON() { + if (this.#handle.byteLength === 0) return {}; + return { + __proto__: null, + isBound: this.isBound, + isReceiving: this.isReceiving, + isListening: this.isListening, + isClosing: this.isClosing, + isBusy: this.isBusy, + pendingCallbacks: `${this.pendingCallbacks}`, + }; + } + + [kInspect](depth, options) { + if (depth < 0) + return this; + + if (this.#handle.byteLength === 0) { + return 'EndpointState { }'; + } + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1, + }; + + return `EndpointState ${inspect({ + isBound: this.isBound, + isReceiving: this.isReceiving, + isListening: this.isListening, + isClosing: this.isClosing, + isBusy: this.isBusy, + pendingCallbacks: this.pendingCallbacks, + }, opts)}`; + } + + [kFinishClose]() { + // Snapshot the state into a new DataView since the underlying + // buffer will be destroyed. + if (this.#handle.byteLength === 0) return; + this.#handle = new DataView(new ArrayBuffer(0)); + } +} + +function validateStreamConfig(config, name = 'config') { + validateObject(config, name); + if (config.onerror !== undefined) + validateFunction(config.onerror, `${name}.onerror`); + if (config.onblocked !== undefined) + validateFunction(config.onblocked, `${name}.onblocked`); + if (config.onreset !== undefined) + validateFunction(config.onreset, `${name}.onreset`); + if (config.onheaders !== undefined) + validateFunction(config.onheaders, `${name}.onheaders`); + if (config.ontrailers !== undefined) + validateFunction(config.ontrailers, `${name}.ontrailers`); + return config; +} + +function validateSessionConfig(config, name = 'config') { + validateObject(config, name); + if (config.onstream !== undefined) + validateFunction(config.onstream, `${name}.onstream`); + if (config.ondatagram !== undefined) + validateFunction(config.ondatagram, `${name}.ondatagram`); + if (config.ondatagramstatus !== undefined) + validateFunction(config.ondatagramstatus, `${name}.ondatagramstatus`); + if (config.onpathvalidation !== undefined) + validateFunction(config.onpathvalidation, `${name}.onpathvalidation`); + if (config.onsessionticket !== undefined) + validateFunction(config.onsessionticket, `${name}.onsessionticket`); + if (config.onversionnegotiation !== undefined) + validateFunction(config.onversionnegotiation, `${name}.onversionnegotiation`); + if (config.onhandshake !== undefined) + validateFunction(config.onhandshake, `${name}.onhandshake`); + return config; +} + +function validateEndpointConfig(config) { + validateObject(config, 'config'); + validateFunction(config.onsession, 'config.onsession'); + if (config.session !== undefined) + validateSessionConfig(config.session, 'config.session'); + if (config.stream !== undefined) + validateStreamConfig(config.stream, 'config.stream'); + return config; +} + +class Endpoint { + /** @type {SocketAddress|undefined} */ + #address = undefined; + /** @type {boolean} */ + #busy = false; + /** @type {object} */ + #handle; + /** @type {boolean} */ + #isPendingClose = false; + /** @type {boolean} */ + #listening = false; + /** @type {PromiseWithResolvers} */ + #pendingClose; + /** @type {any} */ + #pendingError = undefined; + /** @type {Session[]} */ + #sessions = []; + /** @type {EndpointState} */ + #state; + /** @type {EndpointStats} */ + #stats; + /** @type {OnSessionCallback} */ + #onsession; + /** @type {SessionCallbackConfiguration} */ + #sessionConfig; + /** @type {StreamCallbackConfiguration */ + #streamConfig; + + /** + * @param {EndpointCallbackConfiguration} config + * @param {EndpointOptions} [options] + */ + constructor(config, options = kEmptyObject) { + validateEndpointConfig(config); + this.#onsession = config.onsession; + this.#sessionConfig = config.session; + this.#streamConfig = config.stream; + + validateObject(options, 'options'); + let { address } = options; + const { + retryTokenExpiration, + tokenExpiration, + maxConnectionsPerHost, + maxConnectionsTotal, + maxStatelessResetsPerHost, + addressLRUSize, + maxRetries, + maxPayloadSize, + unacknowledgedPacketThreshold, + handshakeTimeout, + maxStreamWindow, + maxWindow, + rxDiagnosticLoss, + txDiagnosticLoss, + udpReceiveBufferSize, + udpSendBufferSize, + udpTTL, + noUdpPayloadSizeShaping, + validateAddress, + disableActiveMigration, + ipv6Only, + cc, + resetTokenSecret, + tokenSecret, + } = options; + + // All of the other options will be validated internally by the C++ code + if (address !== undefined && !SocketAddress.isSocketAddress(address)) { + if (typeof address === 'object' && address !== null) { + address = new SocketAddress(address); + } else { + throw new ERR_INVALID_ARG_TYPE('options.address', 'SocketAddress', address); + } + } + + this.#pendingClose = Promise.withResolvers(); // eslint-disable-line node-core/prefer-primordials + this.#handle = new Endpoint_({ + __proto__: null, + address: address?.[kSocketAddressHandle], + retryTokenExpiration, + tokenExpiration, + maxConnectionsPerHost, + maxConnectionsTotal, + maxStatelessResetsPerHost, + addressLRUSize, + maxRetries, + maxPayloadSize, + unacknowledgedPacketThreshold, + handshakeTimeout, + maxStreamWindow, + maxWindow, + rxDiagnosticLoss, + txDiagnosticLoss, + udpReceiveBufferSize, + udpSendBufferSize, + udpTTL, + noUdpPayloadSizeShaping, + validateAddress, + disableActiveMigration, + ipv6Only, + cc, + resetTokenSecret, + tokenSecret, + }); + this.#handle[kOwner] = this; + this.#stats = new EndpointStats(this.#handle.stats); + this.#state = new EndpointState(this.#handle.state); + } + + /** @type {EndpointStats} */ + get stats() { return this.#stats; } + + /** @type {EndpointState} */ + get state() { return this.#state; } + + get #isClosedOrClosing() { + return this.#handle === undefined || this.#isPendingClose; + } + + /** + * When an endpoint is marked as busy, it will not accept new connections. + * Existing connections will continue to work. + * @type {boolean} + */ + get busy() { return this.#busy; } + + /** + * @type {boolean} + */ + set busy(val) { + if (this.#isClosedOrClosing) { + throw new ERR_INVALID_STATE('Endpoint is closed'); + } + // The val is allowed to be any truthy value + val = !!val; + // Non-op if there is no change + if (val !== this.#busy) { + this.#busy = val; + this.#handle.markBusy(this.#busy); + } + } + + /** + * The local address the endpoint is bound to (if any) + * @type {SocketAddress|undefined} + */ + get address() { + if (this.#isClosedOrClosing) return undefined; + if (this.#address === undefined) { + const addr = this.#handle.address(); + if (addr !== undefined) this.#address = new InternalSocketAddress(addr); + } + return this.#address; + } + + /** + * Configures the endpoint to listen for incoming connections. + * @param {SessionOptions} [options] + */ + listen(options = kEmptyObject) { + if (this.#isClosedOrClosing) { + throw new ERR_INVALID_STATE('Endpoint is closed'); + } + if (this.#listening) { + throw new ERR_INVALID_STATE('Endpoint is already listening'); + } + + validateObject(options, 'options'); + + const { + version, + minVersion, + preferredAddressPolicy = 'default', + application = kEmptyObject, + transportParams = kEmptyObject, + tls = kEmptyObject, + qlog = false, + } = options; + + this.#handle.listen({ + version, + minVersion, + preferredAddressPolicy: getPreferredAddressPolicy(preferredAddressPolicy), + application, + transportParams, + tls: processTlsOptions(tls), + qlog, + }); + this.#listening = true; + } + + /** + * Initiates a session with a remote endpoint. + * @param {SocketAddress} address + * @param {SessionOptions} [options] + * @returns {Session} + */ + connect(address, options = kEmptyObject) { + if (this.#isClosedOrClosing) { + throw new ERR_INVALID_STATE('Endpoint is closed'); + } + if (this.#busy) { + throw new ERR_INVALID_STATE('Endpoint is busy'); + } + + if (address === undefined || !SocketAddress.isSocketAddress(address)) { + if (typeof address === 'object' && address !== null) { + address = new SocketAddress(address); + } else { + throw new ERR_INVALID_ARG_TYPE('address', 'SocketAddress', address); + } + } + + validateObject(options, 'options'); + const { + version, + minVersion, + preferredAddressPolicy = 'default', + application = kEmptyObject, + transportParams = kEmptyObject, + tls = kEmptyObject, + qlog = false, + sessionTicket, + } = options; + + const handle = this.#handle.connect(address[kSocketAddressHandle], { + version, + minVersion, + preferredAddressPolicy: getPreferredAddressPolicy(preferredAddressPolicy), + application, + transportParams, + tls: processTlsOptions(tls), + qlog, + }, sessionTicket); + + if (handle === undefined) { + throw new ERR_QUIC_CONNECTION_FAILED(); + } + const session = new Session(this.#sessionConfig, this.#streamConfig, handle, this); + ArrayPrototypePush(this.#sessions, session); + return session; + } + + /** + * Gracefully closes the endpoint. Any existing sessions will be permitted to + * end gracefully, after which the endpoint will be closed. New sessions will + * not be accepted or created. The returned promise will be resolved when + * closing is complete, or will be rejected if the endpoint is closed abruptly + * due to an error. + * @returns {Promise} + */ + close() { + if (!this.#isClosedOrClosing) { + this.#isPendingClose = true; + this.#handle?.closeGracefully(); + } + return this.closed; + } + + /** + * Returns a promise that is resolved when the endpoint is closed or rejects + * if the endpoint is closed abruptly due to an error. The closed property + * is set to the same promise that is returned by the close() method. + * @type {Promise} + */ + get closed() { return this.#pendingClose.promise; } + + /** @type {boolean} */ + get destroyed() { return this.#handle === undefined; } + + /** + * Forcefully terminates the endpoint by immediately destroying all sessions + * after calling close. If an error is given, the closed promise will be + * rejected with that error. If no error is given, the closed promise will + * be resolved. + * @param {any} error + */ + destroy(error) { + if (!this.#isClosedOrClosing) { + // Start closing the endpoint. + this.#pendingError = error; + // Trigger a graceful close of the endpoint that'll ensure that the + // endpoint is closed down after all sessions are closed... + this.close(); + } + // Now, force all sessions to be abruptly closed... + for (const session of this.#sessions) { + session.destroy(error); + } + } + + ref() { + if (this.#handle !== undefined) this.#handle.ref(true); + } + + unref() { + if (this.#handle !== undefined) this.#handle.ref(false); + } + + [kFinishClose](context, status) { + if (this.#handle === undefined) return; + this.#isPendingClose = false; + this.#address = undefined; + this.#busy = false; + this.#listening = false; + this.#isPendingClose = false; + this.#stats[kFinishClose](); + this.#state[kFinishClose](); + this.#sessions = []; + + switch (context) { + case kCloseContextClose: { + if (this.#pendingError !== undefined) { + this.#pendingClose.reject(this.#pendingError); + } else { + this.#pendingClose.resolve(); + } + break; + } + case kCloseContextBindFailure: { + this.#pendingClose.reject( + new ERR_QUIC_ENDPOINT_CLOSED('Bind failure', status)); + break; + } + case kCloseContextListenFailure: { + this.#pendingClose.reject( + new ERR_QUIC_ENDPOINT_CLOSED('Listen failure', status)); + break; + } + case kCloseContextReceiveFailure: { + this.#pendingClose.reject( + new ERR_QUIC_ENDPOINT_CLOSED('Receive failure', status)); + break; + } + case kCloseContextSendFailure: { + this.#pendingClose.reject( + new ERR_QUIC_ENDPOINT_CLOSED('Send failure', status)); + break; + } + case kCloseContextStartFailure: { + this.#pendingClose.reject( + new ERR_QUIC_ENDPOINT_CLOSED('Start failure', status)); + break; + } + } + + this.#pendingError = undefined; + this.#pendingClose.resolve = undefined; + this.#pendingClose.reject = undefined; + this.#handle = undefined; + } + + [kNewSession](handle) { + const session = new Session(this.#sessionConfig, this.#streamConfig, handle, this); + ArrayPrototypePush(this.#sessions, session); + this.#onsession(session, this); + } + + async [SymbolAsyncDispose]() { await this.close(); } + + [kInspect](depth, options) { + if (depth < 0) + return this; + + const opts = { + ...options, + depth: options.depth == null ? null : options.depth - 1, + }; + + return `Endpoint ${inspect({ + address: this.address, + busy: this.busy, + closed: this.closed, + closing: this.#isPendingClose, + destroyed: this.destroyed, + listening: this.#listening, + sessions: this.#sessions, + stats: this.stats, + state: this.state, + }, opts)}`; + } +}; + +ObjectDefineProperties(QuicStreamStats.prototype, { + createdAt: kEnumerable, + receivedAt: kEnumerable, + ackedAt: kEnumerable, + closingAt: kEnumerable, + destroyedAt: kEnumerable, + bytesReceived: kEnumerable, + bytesSent: kEnumerable, + maxOffset: kEnumerable, + maxOffsetAcknowledged: kEnumerable, + maxOffsetReceived: kEnumerable, + finalSize: kEnumerable, +}); +ObjectDefineProperties(QuicStreamState.prototype, { + id: kEnumerable, + finSent: kEnumerable, + finReceived: kEnumerable, + readEnded: kEnumerable, + writeEnded: kEnumerable, + destroyed: kEnumerable, + paused: kEnumerable, + reset: kEnumerable, + hasReader: kEnumerable, + wantsBlock: kEnumerable, + wantsHeaders: kEnumerable, + wantsReset: kEnumerable, + wantsTrailers: kEnumerable, +}); +ObjectDefineProperties(QuicStream.prototype, { + stats: kEnumerable, + state: kEnumerable, + session: kEnumerable, + id: kEnumerable, +}); +ObjectDefineProperties(SessionStats.prototype, { + createdAt: kEnumerable, + closingAt: kEnumerable, + destroyedAt: kEnumerable, + handshakeCompletedAt: kEnumerable, + handshakeConfirmedAt: kEnumerable, + gracefulClosingAt: kEnumerable, + bytesReceived: kEnumerable, + bytesSent: kEnumerable, + bidiInStreamCount: kEnumerable, + bidiOutStreamCount: kEnumerable, + uniInStreamCount: kEnumerable, + uniOutStreamCount: kEnumerable, + lossRetransmitCount: kEnumerable, + maxBytesInFlights: kEnumerable, + bytesInFlight: kEnumerable, + blockCount: kEnumerable, + cwnd: kEnumerable, + latestRtt: kEnumerable, + minRtt: kEnumerable, + rttVar: kEnumerable, + smoothedRtt: kEnumerable, + ssthresh: kEnumerable, + datagramsReceived: kEnumerable, + datagramsSent: kEnumerable, + datagramsAcknowledged: kEnumerable, + datagramsLost: kEnumerable, +}); +ObjectDefineProperties(SessionState.prototype, { + hasPathValidationListener: kEnumerable, + hasVersionNegotiationListener: kEnumerable, + hasDatagramListener: kEnumerable, + hasSessionTicketListener: kEnumerable, + isClosing: kEnumerable, + isGracefulClose: kEnumerable, + isSilentClose: kEnumerable, + isStatelessReset: kEnumerable, + isDestroyed: kEnumerable, + isHandshakeCompleted: kEnumerable, + isHandshakeConfirmed: kEnumerable, + isStreamOpenAllowed: kEnumerable, + isPrioritySupported: kEnumerable, + isWrapped: kEnumerable, + lastDatagramId: kEnumerable, +}); +ObjectDefineProperties(Session.prototype, { + closed: kEnumerable, + destroyed: kEnumerable, + endpoint: kEnumerable, + path: kEnumerable, + state: kEnumerable, + stats: kEnumerable, +}); +ObjectDefineProperties(EndpointStats.prototype, { + createdAt: kEnumerable, + destroyedAt: kEnumerable, + bytesReceived: kEnumerable, + bytesSent: kEnumerable, + packetsReceived: kEnumerable, + packetsSent: kEnumerable, + serverSessions: kEnumerable, + clientSessions: kEnumerable, + serverBusyCount: kEnumerable, + retryCount: kEnumerable, + versionNegotiationCount: kEnumerable, + statelessResetCount: kEnumerable, + immediateCloseCount: kEnumerable, +}); +ObjectDefineProperties(EndpointState.prototype, { + isBound: kEnumerable, + isReceiving: kEnumerable, + isListening: kEnumerable, + isClosing: kEnumerable, + isBusy: kEnumerable, + pendingCallbacks: kEnumerable, +}); +ObjectDefineProperties(Endpoint.prototype, { + address: kEnumerable, + busy: kEnumerable, + closed: kEnumerable, + destroyed: kEnumerable, + state: kEnumerable, + stats: kEnumerable, +}); +ObjectDefineProperties(Endpoint, { + CC_ALGO_RENO: { + __proto__: null, + value: CC_ALGO_RENO, + writable: false, + configurable: false, + enumerable: true, + }, + CC_ALGO_CUBIC: { + __proto__: null, + value: CC_ALGO_CUBIC, + writable: false, + configurable: false, + enumerable: true, + }, + CC_ALGO_BBR: { + __proto__: null, + value: CC_ALGO_BBR, + writable: false, + configurable: false, + enumerable: true, + }, + CC_ALGO_RENO_STR: { + __proto__: null, + value: CC_ALGO_RENO_STR, + writable: false, + configurable: false, + enumerable: true, + }, + CC_ALGO_CUBIC_STR: { + __proto__: null, + value: CC_ALGO_CUBIC_STR, + writable: false, + configurable: false, + enumerable: true, + }, + CC_ALGO_BBR_STR: { + __proto__: null, + value: CC_ALGO_BBR_STR, + writable: false, + configurable: false, + enumerable: true, + }, +}); +ObjectDefineProperties(Session, { + DEFAULT_CIPHERS: { + __proto__: null, + value: DEFAULT_CIPHERS, + writable: false, + configurable: false, + enumerable: true, + }, + DEFAULT_GROUPS: { + __proto__: null, + value: DEFAULT_GROUPS, + writable: false, + configurable: false, + enumerable: true, + }, +}); + +module.exports = { + Endpoint, + Session, + QuicStream, + // Exported for testing + kFinishClose, + SessionState, + SessionStats, + QuicStreamState, + QuicStreamStats, + EndpointState, + EndpointStats, +}; + +/* c8 ignore stop */ diff --git a/lib/internal/readline/interface.js b/lib/internal/readline/interface.js index 7c1a15f3c70012..aaa1dea7a8cbcd 100644 --- a/lib/internal/readline/interface.js +++ b/lib/internal/readline/interface.js @@ -261,7 +261,7 @@ function InterfaceConstructor(input, output, completer, terminal) { function onkeypress(s, key) { self[kTtyWrite](s, key); - if (key && key.sequence) { + if (key?.sequence) { // If the key.sequence is half of a surrogate pair // (>= 0xd800 and <= 0xdfff), refresh the line so // the character is displayed appropriately. @@ -345,7 +345,7 @@ class Interface extends InterfaceConstructor { super(input, output, completer, terminal); } get columns() { - if (this.output && this.output.columns) return this.output.columns; + if (this.output?.columns) return this.output.columns; return Infinity; } diff --git a/lib/internal/repl/utils.js b/lib/internal/repl/utils.js index ce0caf6d2a3eb7..27e1011ec9daf6 100644 --- a/lib/internal/repl/utils.js +++ b/lib/internal/repl/utils.js @@ -436,9 +436,9 @@ function setupPreview(repl, contextSymbol, bufferSymbol, active) { // Line breaks are very rare and probably only occur in case of error // messages with line breaks. - const lineBreakPos = StringPrototypeIndexOf(inspected, '\n'); - if (lineBreakPos !== -1) { - inspected = `${StringPrototypeSlice(inspected, 0, lineBreakPos)}`; + const lineBreakMatch = RegExpPrototypeExec(/[\r\n\v]/, inspected); + if (lineBreakMatch !== null) { + inspected = `${StringPrototypeSlice(inspected, 0, lineBreakMatch.index)}`; } const result = repl.useColors ? diff --git a/lib/internal/source_map/source_map.js b/lib/internal/source_map/source_map.js index 926ea191cfc332..c5f7c3c5918ae9 100644 --- a/lib/internal/source_map/source_map.js +++ b/lib/internal/source_map/source_map.js @@ -269,7 +269,7 @@ class SourceMap { ArrayPrototypePush(sources, url); this.#sources[url] = true; - if (map.sourcesContent && map.sourcesContent[i]) + if (map.sourcesContent?.[i]) this.#sourceContentByURL[url] = map.sourcesContent[i]; } diff --git a/lib/internal/source_map/source_map_cache.js b/lib/internal/source_map/source_map_cache.js index 9defc32da8e1e6..b24ad24438441f 100644 --- a/lib/internal/source_map/source_map_cache.js +++ b/lib/internal/source_map/source_map_cache.js @@ -20,6 +20,7 @@ const { setSourceMapsEnabled: setSourceMapsNative, } = internalBinding('errors'); const { + defaultPrepareStackTrace, setInternalPrepareStackTrace, } = require('internal/errors'); const { getLazy } = require('internal/util'); @@ -64,9 +65,6 @@ function setSourceMapsEnabled(val) { setInternalPrepareStackTrace(prepareStackTraceWithSourceMaps); } else if (sourceMapsEnabled !== undefined) { // Reset prepare stack trace callback only when disabling source maps. - const { - defaultPrepareStackTrace, - } = require('internal/errors'); setInternalPrepareStackTrace(defaultPrepareStackTrace); } diff --git a/lib/internal/source_map/source_map_cache_map.js b/lib/internal/source_map/source_map_cache_map.js index e8adfe83708316..f5aca17d6dcb84 100644 --- a/lib/internal/source_map/source_map_cache_map.js +++ b/lib/internal/source_map/source_map_cache_map.js @@ -26,7 +26,7 @@ const { * Obsolete `weakModuleMap` entries are removed by the `finalizationRegistry` * callback. This pattern decouples the strong url reference to the source map * data and allow the cache to be reclaimed eagerly, without depending on an - * undeterministic callback of a finalization registry. + * indeterministic callback of a finalization registry. */ class SourceMapCacheMap { /** diff --git a/lib/internal/streams/from.js b/lib/internal/streams/from.js index aa7e031d3e48d4..fad5e68c10d8c4 100644 --- a/lib/internal/streams/from.js +++ b/lib/internal/streams/from.js @@ -26,10 +26,10 @@ function from(Readable, iterable, opts) { } let isAsync; - if (iterable && iterable[SymbolAsyncIterator]) { + if (iterable?.[SymbolAsyncIterator]) { isAsync = true; iterator = iterable[SymbolAsyncIterator](); - } else if (iterable && iterable[SymbolIterator]) { + } else if (iterable?.[SymbolIterator]) { isAsync = false; iterator = iterable[SymbolIterator](); } else { diff --git a/lib/internal/streams/pipeline.js b/lib/internal/streams/pipeline.js index d885455ee05ff6..24d0b037567e14 100644 --- a/lib/internal/streams/pipeline.js +++ b/lib/internal/streams/pipeline.js @@ -447,7 +447,7 @@ function pipe(src, dst, finish, finishOnlyHandleError, { end }) { if ( err && err.code === 'ERR_STREAM_PREMATURE_CLOSE' && - (rState && rState.ended && !rState.errored && !rState.errorEmitted) + (rState?.ended && !rState.errored && !rState.errorEmitted) ) { // Some readable streams will emit 'close' before 'end'. However, since // this is on the readable side 'end' should still be emitted if the diff --git a/lib/internal/streams/readable.js b/lib/internal/streams/readable.js index 17f8e53ad56339..5a6fe1db000f5c 100644 --- a/lib/internal/streams/readable.js +++ b/lib/internal/streams/readable.js @@ -266,10 +266,10 @@ function ReadableState(options, stream, isDuplex) { // Object stream flag. Used to make read(n) ignore n and to // make all the buffer merging and length checks go away. - if (options && options.objectMode) + if (options?.objectMode) this[kState] |= kObjectMode; - if (isDuplex && options && options.readableObjectMode) + if (isDuplex && options?.readableObjectMode) this[kState] |= kObjectMode; // The point at which it stops calling _read() to fill the buffer @@ -305,7 +305,7 @@ function ReadableState(options, stream, isDuplex) { // type: null | Writable | Set. this.awaitDrainWriters = null; - if (options && options.encoding) { + if (options?.encoding) { this.decoder = new StringDecoder(options.encoding); this.encoding = options.encoding; } @@ -791,7 +791,7 @@ function onEofChunk(stream, state) { const decoder = (state[kState] & kDecoder) !== 0 ? state[kDecoderValue] : null; if (decoder) { const chunk = decoder.end(); - if (chunk && chunk.length) { + if (chunk?.length) { state.buffer.push(chunk); state.length += (state[kState] & kObjectMode) !== 0 ? 1 : chunk.length; } @@ -1461,7 +1461,7 @@ ObjectDefineProperties(Readable.prototype, { __proto__: null, enumerable: false, get: function() { - return this._readableState && this._readableState.buffer; + return this._readableState?.buffer; }, }, diff --git a/lib/internal/streams/utils.js b/lib/internal/streams/utils.js index 4b359279556592..45f55316104fe8 100644 --- a/lib/internal/streams/utils.js +++ b/lib/internal/streams/utils.js @@ -290,8 +290,7 @@ function willEmitClose(stream) { const state = wState || rState; return (!state && isServerResponse(stream)) || !!( - state && - state.autoDestroy && + state?.autoDestroy && state.emitClose && state.closed === false ); diff --git a/lib/internal/streams/writable.js b/lib/internal/streams/writable.js index 83178c2429c791..b74b60882017f6 100644 --- a/lib/internal/streams/writable.js +++ b/lib/internal/streams/writable.js @@ -306,10 +306,10 @@ function WritableState(options, stream, isDuplex) { // instead of a V8 slot per field. this[kState] = kSync | kConstructed | kEmitClose | kAutoDestroy; - if (options && options.objectMode) + if (options?.objectMode) this[kState] |= kObjectMode; - if (isDuplex && options && options.writableObjectMode) + if (isDuplex && options?.writableObjectMode) this[kState] |= kObjectMode; // The point at which write() starts returning false @@ -1068,7 +1068,7 @@ ObjectDefineProperties(Writable.prototype, { __proto__: null, get() { const state = this._writableState; - return state && state.highWaterMark; + return state?.highWaterMark; }, }, @@ -1084,7 +1084,7 @@ ObjectDefineProperties(Writable.prototype, { __proto__: null, get() { const state = this._writableState; - return state && state.length; + return state?.length; }, }, diff --git a/lib/internal/test_runner/coverage.js b/lib/internal/test_runner/coverage.js index 7ef57020728302..17e02de6577a5c 100644 --- a/lib/internal/test_runner/coverage.js +++ b/lib/internal/test_runner/coverage.js @@ -30,6 +30,12 @@ const { tmpdir } = require('os'); const { join, resolve, relative, matchesGlob } = require('path'); const { fileURLToPath } = require('internal/url'); const { kMappings, SourceMap } = require('internal/source_map/source_map'); +const { + codes: { + ERR_SOURCE_MAP_CORRUPT, + ERR_SOURCE_MAP_MISSING_SOURCE, + }, +} = require('internal/errors'); const kCoverageFileRegex = /^coverage-(\d+)-(\d{13})-(\d+)\.json$/; const kIgnoreRegex = /\/\* node:coverage ignore next (?\d+ )?\*\//; const kLineEndingRegex = /\r?\n$/u; @@ -348,6 +354,7 @@ class TestCoverage { continue; } const { data, lineLengths } = sourceMapCache[url]; + if (!data) throw new ERR_SOURCE_MAP_CORRUPT(url); let offset = 0; const executedLines = ArrayPrototypeMap(lineLengths, (length, i) => { const coverageLine = new CoverageLine(i + 1, offset, null, length + 1); @@ -390,6 +397,9 @@ class TestCoverage { newUrl ??= startEntry?.originalSource; const mappedLines = this.getLines(newUrl); + if (!mappedLines) { + throw new ERR_SOURCE_MAP_MISSING_SOURCE(newUrl, url); + } const mappedStartOffset = this.entryToOffset(startEntry, mappedLines); const mappedEndOffset = this.entryToOffset(endEntry, mappedLines) + 1; for (let l = startEntry.originalLine; l <= endEntry.originalLine; l++) { diff --git a/lib/internal/test_runner/harness.js b/lib/internal/test_runner/harness.js index 50ecb290bfa65a..86f4f40bf823db 100644 --- a/lib/internal/test_runner/harness.js +++ b/lib/internal/test_runner/harness.js @@ -52,7 +52,7 @@ function createTestTree(rootTestOptions, globalOptions) { resetCounters() { harness.counters = { __proto__: null, - all: 0, + tests: 0, failed: 0, passed: 0, cancelled: 0, @@ -62,6 +62,7 @@ function createTestTree(rootTestOptions, globalOptions) { suites: 0, }; }, + success: true, counters: null, shouldColorizeTestFiles: shouldColorizeTestFiles(globalOptions.destinations), teardown: null, @@ -130,6 +131,7 @@ function createProcessEventHandler(eventName, rootTest) { } rootTest.diagnostic(msg); + rootTest.harness.success = false; process.exitCode = kGenericUserError; return; } @@ -152,6 +154,7 @@ function configureCoverage(rootTest, globalOptions) { const msg = `Warning: Code coverage could not be enabled. ${err}`; rootTest.diagnostic(msg); + rootTest.harness.success = false; process.exitCode = kGenericUserError; } } @@ -167,6 +170,7 @@ function collectCoverage(rootTest, coverage) { summary = coverage.summary(); } catch (err) { rootTest.diagnostic(`Warning: Could not report code coverage. ${err}`); + rootTest.harness.success = false; process.exitCode = kGenericUserError; } @@ -174,6 +178,7 @@ function collectCoverage(rootTest, coverage) { coverage.cleanup(); } catch (err) { rootTest.diagnostic(`Warning: Could not clean up code coverage. ${err}`); + rootTest.harness.success = false; process.exitCode = kGenericUserError; } @@ -248,14 +253,16 @@ function lazyBootstrapRoot() { if (!globalRoot) { // This is where the test runner is bootstrapped when node:test is used // without the --test flag or the run() API. + const entryFile = process.argv?.[1]; const rootTestOptions = { __proto__: null, - entryFile: process.argv?.[1], + entryFile, + loc: entryFile ? [1, 1, entryFile] : undefined, }; const globalOptions = parseCommandLine(); createTestTree(rootTestOptions, globalOptions); - globalRoot.reporter.on('test:fail', (data) => { - if (data.todo === undefined || data.todo === false) { + globalRoot.reporter.on('test:summary', (data) => { + if (!data.success) { process.exitCode = kGenericUserError; } }); diff --git a/lib/internal/test_runner/mock/loader.js b/lib/internal/test_runner/mock/loader.js index 25d9b31d0cb74d..bfdfe93741c2cc 100644 --- a/lib/internal/test_runner/mock/loader.js +++ b/lib/internal/test_runner/mock/loader.js @@ -137,9 +137,9 @@ async function load(url, context, nextLoad) { async function createSourceFromMock(mock, format) { // Create mock implementation from provided exports. const { exportNames, hasDefaultExport, url } = mock; - const useESM = format === 'module'; + const useESM = format === 'module' || format === 'module-typescript'; const source = `${testImportSource(useESM)} -if (!$__test.mock._mockExports.has('${url}')) { +if (!$__test.mock._mockExports.has(${JSONStringify(url)})) { throw new Error(${JSONStringify(`mock exports not found for "${url}"`)}); } diff --git a/lib/internal/test_runner/mock/mock.js b/lib/internal/test_runner/mock/mock.js index 5d3bce816aa69b..99fe1baebb771b 100644 --- a/lib/internal/test_runner/mock/mock.js +++ b/lib/internal/test_runner/mock/mock.js @@ -70,7 +70,7 @@ const kMockUnknownMessage = 3; const kWaitTimeout = 5_000; const kBadExportsMessage = 'Cannot create mock because named exports ' + 'cannot be applied to the provided default export.'; -const kSupportedFormats = ['builtin', 'commonjs', 'module']; +const kSupportedFormats = ['builtin', 'commonjs', 'module', 'module-typescript', 'commonjs-typescript']; let sharedModuleState; class MockFunctionContext { @@ -517,7 +517,10 @@ class MockTracker { // Get the file that called this function. We need four stack frames: // vm context -> getStructuredStack() -> this function -> actual caller. - const caller = pathToFileURL(getStructuredStack()[3]?.getFileName()).href; + const filename = getStructuredStack()[3]?.getFileName(); + // If the caller is already a file URL, use it as is. Otherwise, convert it. + const hasFileProtocol = StringPrototypeStartsWith(filename, 'file://'); + const caller = hasFileProtocol ? filename : pathToFileURL(filename).href; const { format, url } = sharedState.moduleLoader.resolveSync( mockSpecifier, caller, null, ); diff --git a/lib/internal/test_runner/mock/mock_timers.js b/lib/internal/test_runner/mock/mock_timers.js index dc77fc429935b6..fa91c9759ecc15 100644 --- a/lib/internal/test_runner/mock/mock_timers.js +++ b/lib/internal/test_runner/mock/mock_timers.js @@ -377,7 +377,7 @@ class MockTimers { return FunctionPrototypeToString(MockDate[kMock].#nativeDateDescriptor.value); }; - // We need to polute the prototype of this + // We need to pollute the prototype of this ObjectDefineProperties(MockDate, { __proto__: null, [kMock]: { diff --git a/lib/internal/test_runner/reporter/dot.js b/lib/internal/test_runner/reporter/dot.js index 1e581f7a337845..45ff047bc4e5a0 100644 --- a/lib/internal/test_runner/reporter/dot.js +++ b/lib/internal/test_runner/reporter/dot.js @@ -12,10 +12,10 @@ module.exports = async function* dot(source) { const failedTests = []; for await (const { type, data } of source) { if (type === 'test:pass') { - yield `${colors.green}.${colors.clear}`; + yield `${colors.green}.${colors.reset}`; } if (type === 'test:fail') { - yield `${colors.red}X${colors.clear}`; + yield `${colors.red}X${colors.reset}`; ArrayPrototypePush(failedTests, data); } if ((type === 'test:fail' || type === 'test:pass') && ++count === columns) { diff --git a/lib/internal/test_runner/reporter/utils.js b/lib/internal/test_runner/reporter/utils.js index 42c4ffa3cdaaec..9b6cc96a185a9a 100644 --- a/lib/internal/test_runner/reporter/utils.js +++ b/lib/internal/test_runner/reporter/utils.js @@ -72,16 +72,14 @@ function formatTestReport(type, data, prefix = '', indent = '', hasChildren = fa title += ` # ${typeof todo === 'string' && todo.length ? todo : 'TODO'}`; } const error = formatError(data.details?.error, indent); - if (hasChildren) { - // If this test has had children - it was already reported, so slightly modify the output - const err = !error || data.details?.error?.failureType === 'subtestsFailed' ? '' : `\n${error}`; - return `${prefix}${indent}${color}${reporterUnicodeSymbolMap['arrow:right']}${colors.white}${title}${err}`; - } + const err = hasChildren ? + (!error || data.details?.error?.failureType === 'subtestsFailed' ? '' : `\n${error}`) : + error; if (skip !== undefined) { color = colors.gray; symbol = reporterUnicodeSymbolMap['hyphen:minus']; } - return `${prefix}${indent}${color}${symbol}${title}${colors.white}${error}`; + return `${prefix}${indent}${color}${symbol}${title}${colors.white}${err}`; } module.exports = { diff --git a/lib/internal/test_runner/reporter/v8-serializer.js b/lib/internal/test_runner/reporter/v8-serializer.js index cfeedfdff378c2..c75bfcdac478cf 100644 --- a/lib/internal/test_runner/reporter/v8-serializer.js +++ b/lib/internal/test_runner/reporter/v8-serializer.js @@ -16,7 +16,7 @@ module.exports = async function* v8Reporter(source) { for await (const item of source) { const originalError = item.data.details?.error; if (originalError) { - // Error is overriden with a serialized version, so that it can be + // Error is overridden with a serialized version, so that it can be // deserialized in the parent process. // Error is restored after serialization. item.data.details.error = serializeError(originalError); diff --git a/lib/internal/test_runner/runner.js b/lib/internal/test_runner/runner.js index e2237dd73f468d..2241df6b378253 100644 --- a/lib/internal/test_runner/runner.js +++ b/lib/internal/test_runner/runner.js @@ -10,6 +10,7 @@ const { ArrayPrototypeJoin, ArrayPrototypeMap, ArrayPrototypePush, + ArrayPrototypePushApply, ArrayPrototypeShift, ArrayPrototypeSlice, ArrayPrototypeSome, @@ -55,6 +56,7 @@ const { validateObject, validateOneOf, validateInteger, + validateStringArray, } = require('internal/validators'); const { getInspectPort, isUsingInspector, isInspectorMessage } = require('internal/util/inspector'); const { isRegExp } = require('internal/util/types'); @@ -129,7 +131,13 @@ function filterExecArgv(arg, i, arr) { !ArrayPrototypeSome(kFilterArgValues, (p) => arg === p || (i > 0 && arr[i - 1] === p) || StringPrototypeStartsWith(arg, `${p}=`)); } -function getRunArgs(path, { forceExit, inspectPort, testNamePatterns, testSkipPatterns, only }) { +function getRunArgs(path, { forceExit, + inspectPort, + testNamePatterns, + testSkipPatterns, + only, + argv: suppliedArgs, + execArgv }) { const argv = ArrayPrototypeFilter(process.execArgv, filterExecArgv); if (forceExit === true) { ArrayPrototypePush(argv, '--test-force-exit'); @@ -147,12 +155,17 @@ function getRunArgs(path, { forceExit, inspectPort, testNamePatterns, testSkipPa ArrayPrototypePush(argv, '--test-only'); } + ArrayPrototypePushApply(argv, execArgv); + if (path === kIsolatedProcessName) { - ArrayPrototypePush(argv, '--test', ...ArrayPrototypeSlice(process.argv, 1)); + ArrayPrototypePush(argv, '--test'); + ArrayPrototypePushApply(argv, ArrayPrototypeSlice(process.argv, 1)); } else { ArrayPrototypePush(argv, path); } + ArrayPrototypePushApply(argv, suppliedArgs); + return argv; } @@ -274,7 +287,7 @@ class FileTest extends Test { ArrayPrototypePush(this.#rawBuffer, readData); } this.#rawBufferSize += dataLength; - this.#proccessRawBuffer(); + this.#processRawBuffer(); if (partialV8Header) { ArrayPrototypePush(this.#rawBuffer, TypedArrayPrototypeSubarray(v8Header, 0, 1)); @@ -283,10 +296,10 @@ class FileTest extends Test { } #drainRawBuffer() { while (this.#rawBuffer.length > 0) { - this.#proccessRawBuffer(); + this.#processRawBuffer(); } } - #proccessRawBuffer() { + #processRawBuffer() { // This method is called when it is known that there is at least one message let bufferHead = this.#rawBuffer[0]; let headerIndex = bufferHead.indexOf(v8Header); @@ -524,7 +537,13 @@ function watchFiles(testFiles, opts) { function run(options = kEmptyObject) { validateObject(options, 'options'); - let { testNamePatterns, testSkipPatterns, shard } = options; + let { + testNamePatterns, + testSkipPatterns, + shard, + coverageExcludeGlobs, + coverageIncludeGlobs, + } = options; const { concurrency, timeout, @@ -537,6 +556,12 @@ function run(options = kEmptyObject) { setup, only, globPatterns, + coverage = false, + lineCoverage = 0, + branchCoverage = 0, + functionCoverage = 0, + execArgv = [], + argv = [], } = options; if (files != null) { @@ -615,6 +640,25 @@ function run(options = kEmptyObject) { }); } validateOneOf(isolation, 'options.isolation', ['process', 'none']); + validateBoolean(coverage, 'options.coverage'); + if (coverageExcludeGlobs != null) { + if (!ArrayIsArray(coverageExcludeGlobs)) { + coverageExcludeGlobs = [coverageExcludeGlobs]; + } + validateStringArray(coverageExcludeGlobs, 'options.coverageExcludeGlobs'); + } + if (coverageIncludeGlobs != null) { + if (!ArrayIsArray(coverageIncludeGlobs)) { + coverageIncludeGlobs = [coverageIncludeGlobs]; + } + validateStringArray(coverageIncludeGlobs, 'options.coverageIncludeGlobs'); + } + validateInteger(lineCoverage, 'options.lineCoverage', 0, 100); + validateInteger(branchCoverage, 'options.branchCoverage', 0, 100); + validateInteger(functionCoverage, 'options.functionCoverage', 0, 100); + + validateStringArray(argv, 'options.argv'); + validateStringArray(execArgv, 'options.execArgv'); const rootTestOptions = { __proto__: null, concurrency, timeout, signal }; const globalOptions = { @@ -623,6 +667,12 @@ function run(options = kEmptyObject) { // behavior has relied on it, so removing it must be done in a semver major. ...parseCommandLine(), setup, // This line can be removed when parseCommandLine() is removed here. + coverage, + coverageExcludeGlobs, + coverageIncludeGlobs, + lineCoverage: lineCoverage, + branchCoverage: branchCoverage, + functionCoverage: functionCoverage, }; const root = createTestTree(rootTestOptions, globalOptions); @@ -652,6 +702,8 @@ function run(options = kEmptyObject) { forceExit, cwd, isolation, + argv, + execArgv, }; if (isolation === 'process') { diff --git a/lib/internal/test_runner/test.js b/lib/internal/test_runner/test.js index 029887bf18799b..3a2a2b1383ff30 100644 --- a/lib/internal/test_runner/test.js +++ b/lib/internal/test_runner/test.js @@ -8,18 +8,21 @@ const { ArrayPrototypeSome, ArrayPrototypeSplice, ArrayPrototypeUnshift, + ArrayPrototypeUnshiftApply, FunctionPrototype, MathMax, Number, NumberPrototypeToFixed, ObjectDefineProperty, ObjectSeal, + Promise, PromisePrototypeThen, PromiseResolve, ReflectApply, RegExpPrototypeExec, SafeMap, SafePromiseAll, + SafePromiseAllReturnVoid, SafePromisePrototypeFinally, SafePromiseRace, SafeSet, @@ -46,6 +49,7 @@ const { createDeferredCallback, countCompletedTest, isTestFailureError, + reporterScope, } = require('internal/test_runner/utils'); const { createDeferredPromise, @@ -556,7 +560,7 @@ class Test extends AsyncResource { const map = lazyFindSourceMap(this.loc.file); const entry = map?.findEntry(this.loc.line - 1, this.loc.column - 1); - if (entry !== undefined) { + if (entry?.originalSource !== undefined) { this.loc.line = entry.originalLine + 1; this.loc.column = entry.originalColumn + 1; this.loc.file = entry.originalSource; @@ -732,9 +736,9 @@ class Test extends AsyncResource { computeInheritedHooks() { if (this.parent.hooks.beforeEach.length > 0) { - ArrayPrototypeUnshift( + ArrayPrototypeUnshiftApply( this.hooks.beforeEach, - ...ArrayPrototypeSlice(this.parent.hooks.beforeEach), + ArrayPrototypeSlice(this.parent.hooks.beforeEach), ); } @@ -973,10 +977,25 @@ class Test extends AsyncResource { // any remaining ref'ed handles, then do that now. It is theoretically // possible that a ref'ed handle could asynchronously create more tests, // but the user opted into this behavior. - this.reporter.once('close', () => { - process.exit(); - }); + const promises = []; + + for (let i = 0; i < reporterScope.reporters.length; i++) { + const { destination } = reporterScope.reporters[i]; + + ArrayPrototypePush(promises, new Promise((resolve) => { + destination.on('unpipe', () => { + if (!destination.closed && typeof destination.close === 'function') { + destination.close(resolve); + } else { + resolve(); + } + }); + })); + } + this.harness.teardown(); + await SafePromiseAllReturnVoid(promises); + process.exit(); } } @@ -1043,14 +1062,15 @@ class Test extends AsyncResource { reporter.diagnostic(nesting, loc, diagnostics[i]); } - reporter.diagnostic(nesting, loc, `tests ${harness.counters.all}`); + const duration = this.duration(); + reporter.diagnostic(nesting, loc, `tests ${harness.counters.tests}`); reporter.diagnostic(nesting, loc, `suites ${harness.counters.suites}`); reporter.diagnostic(nesting, loc, `pass ${harness.counters.passed}`); reporter.diagnostic(nesting, loc, `fail ${harness.counters.failed}`); reporter.diagnostic(nesting, loc, `cancelled ${harness.counters.cancelled}`); reporter.diagnostic(nesting, loc, `skipped ${harness.counters.skipped}`); reporter.diagnostic(nesting, loc, `todo ${harness.counters.todo}`); - reporter.diagnostic(nesting, loc, `duration_ms ${this.duration()}`); + reporter.diagnostic(nesting, loc, `duration_ms ${duration}`); if (coverage) { const coverages = [ @@ -1067,6 +1087,7 @@ class Test extends AsyncResource { for (let i = 0; i < coverages.length; i++) { const { threshold, actual, name } = coverages[i]; if (actual < threshold) { + harness.success = false; process.exitCode = kGenericUserError; reporter.diagnostic(nesting, loc, `Error: ${NumberPrototypeToFixed(actual, 2)}% ${name} coverage does not meet threshold of ${threshold}%.`); } @@ -1075,6 +1096,10 @@ class Test extends AsyncResource { reporter.coverage(nesting, loc, coverage); } + reporter.summary( + nesting, loc?.file, harness.success, harness.counters, duration, + ); + if (harness.watching) { this.reported = false; harness.resetCounters(); diff --git a/lib/internal/test_runner/tests_stream.js b/lib/internal/test_runner/tests_stream.js index 08d4397ae64a3c..ecbc407e01f318 100644 --- a/lib/internal/test_runner/tests_stream.js +++ b/lib/internal/test_runner/tests_stream.js @@ -132,6 +132,16 @@ class TestsStream extends Readable { }); } + summary(nesting, file, success, counts, duration_ms) { + this[kEmitMessage]('test:summary', { + __proto__: null, + success, + counts, + duration_ms, + file, + }); + } + end() { this.#tryPush(null); } diff --git a/lib/internal/test_runner/utils.js b/lib/internal/test_runner/utils.js index 041c9b68b600da..8a8461b06ea8ec 100644 --- a/lib/internal/test_runner/utils.js +++ b/lib/internal/test_runner/utils.js @@ -1,8 +1,10 @@ 'use strict'; const { ArrayPrototypeFlatMap, + ArrayPrototypeForEach, ArrayPrototypeJoin, ArrayPrototypeMap, + ArrayPrototypePop, ArrayPrototypePush, ArrayPrototypeReduce, ArrayPrototypeSome, @@ -24,7 +26,7 @@ const { } = primordials; const { AsyncResource } = require('async_hooks'); -const { relative } = require('path'); +const { relative, sep } = require('path'); const { createWriteStream } = require('fs'); const { pathToFileURL } = require('internal/url'); const { createDeferredPromise } = require('internal/util'); @@ -52,8 +54,11 @@ const kMultipleCallbackInvocations = 'multipleCallbackInvocations'; const kRegExpPattern = /^\/(.*)\/([a-z]*)$/; const kPatterns = ['test', 'test/**/*', 'test-*', '*[._-]test']; -const kDefaultPattern = `**/{${ArrayPrototypeJoin(kPatterns, ',')}}.?(c|m)js`; - +const kFileExtensions = ['js', 'mjs', 'cjs']; +if (getOptionValue('--experimental-strip-types')) { + ArrayPrototypePush(kFileExtensions, 'ts', 'mts', 'cts'); +} +const kDefaultPattern = `**/{${ArrayPrototypeJoin(kPatterns, ',')}}.{${ArrayPrototypeJoin(kFileExtensions, ',')}}`; function createDeferredCallback() { let calledCount = 0; @@ -142,7 +147,8 @@ function shouldColorizeTestFiles(destinations) { async function getReportersMap(reporters, destinations) { return SafePromiseAllReturnArrayLike(reporters, async (name, i) => { - const destination = kBuiltinDestinations.get(destinations[i]) ?? createWriteStream(destinations[i]); + const destination = kBuiltinDestinations.get(destinations[i]) ?? + createWriteStream(destinations[i], { __proto__: null, flush: true }); // Load the test reporter passed to --test-reporter let reporter = tryBuiltinReporter(name); @@ -299,6 +305,8 @@ function parseCommandLine() { const { reporter, destination } = reportersMap[i]; compose(rootReporter, reporter).pipe(destination); } + + reporterScope.reporters = reportersMap; }); globalTestOptions = { @@ -355,12 +363,14 @@ function countCompletedTest(test, harness = test.root.harness) { harness.counters.todo++; } else if (test.cancelled) { harness.counters.cancelled++; + harness.success = false; } else if (!test.passed) { harness.counters.failed++; + harness.success = false; } else { harness.counters.passed++; } - harness.counters.all++; + harness.counters.tests++; } @@ -409,6 +419,36 @@ const kColumns = ['line %', 'branch %', 'funcs %']; const kColumnsKeys = ['coveredLinePercent', 'coveredBranchPercent', 'coveredFunctionPercent']; const kSeparator = ' | '; +function buildFileTree(summary) { + const tree = { __proto__: null }; + let treeDepth = 1; + let longestFile = 0; + + ArrayPrototypeForEach(summary.files, (file) => { + let longestPart = 0; + const parts = StringPrototypeSplit(relative(summary.workingDirectory, file.path), sep); + let current = tree; + + ArrayPrototypeForEach(parts, (part, index) => { + if (!current[part]) { + current[part] = { __proto__: null }; + } + current = current[part]; + // If this is the last part, add the file to the tree + if (index === parts.length - 1) { + current.file = file; + } + // Keep track of the longest part for padding + longestPart = MathMax(longestPart, part.length); + }); + + treeDepth = MathMax(treeDepth, parts.length); + longestFile = MathMax(longestPart, longestFile); + }); + + return { __proto__: null, tree, treeDepth, longestFile }; +} + function getCoverageReport(pad, summary, symbol, color, table) { const prefix = `${pad}${symbol}`; let report = `${color}${prefix}start of coverage report\n`; @@ -418,11 +458,19 @@ function getCoverageReport(pad, summary, symbol, color, table) { let uncoveredLinesPadLength; let tableWidth; + // Create a tree of file paths + const { tree, treeDepth, longestFile } = buildFileTree(summary); if (table) { - // Get expected column sizes - filePadLength = table && ArrayPrototypeReduce(summary.files, (acc, file) => - MathMax(acc, relative(summary.workingDirectory, file.path).length), 0); + // Calculate expected column sizes based on the tree + filePadLength = table && longestFile; + filePadLength += (treeDepth - 1); + if (color) { + filePadLength += 2; + } filePadLength = MathMax(filePadLength, 'file'.length); + if (filePadLength > (process.stdout.columns / 2)) { + filePadLength = MathFloor(process.stdout.columns / 2); + } const fileWidth = filePadLength + 2; columnPadLengths = ArrayPrototypeMap(kColumns, (column) => (table ? MathMax(column.length, 6) : 0)); @@ -435,26 +483,17 @@ function getCoverageReport(pad, summary, symbol, color, table) { tableWidth = fileWidth + columnsWidth + uncoveredLinesWidth; - // Fit with sensible defaults const availableWidth = (process.stdout.columns || Infinity) - prefix.length; const columnsExtras = tableWidth - availableWidth; if (table && columnsExtras > 0) { - // Ensure file name is sufficiently visible - const minFilePad = MathMin(8, filePadLength); - filePadLength -= MathFloor(columnsExtras * 0.2); - filePadLength = MathMax(filePadLength, minFilePad); - - // Get rest of available space, subtracting margins + filePadLength = MathMin(availableWidth * 0.5, filePadLength); uncoveredLinesPadLength = MathMax(availableWidth - columnsWidth - (filePadLength + 2) - 2, 1); - - // Update table width tableWidth = availableWidth; } else { uncoveredLinesPadLength = Infinity; } } - function getCell(string, width, pad, truncate, coverage) { if (!table) return string; @@ -469,35 +508,85 @@ function getCoverageReport(pad, summary, symbol, color, table) { return result; } - // Head - if (table) report += addTableLine(prefix, tableWidth); - report += `${prefix}${getCell('file', filePadLength, StringPrototypePadEnd, truncateEnd)}${kSeparator}` + - `${ArrayPrototypeJoin(ArrayPrototypeMap(kColumns, (column, i) => getCell(column, columnPadLengths[i], StringPrototypePadStart)), kSeparator)}${kSeparator}` + - `${getCell('uncovered lines', uncoveredLinesPadLength, false, truncateEnd)}\n`; - if (table) report += addTableLine(prefix, tableWidth); + function writeReportLine({ file, depth = 0, coveragesColumns, fileCoverage, uncoveredLines }) { + const fileColumn = `${prefix}${StringPrototypeRepeat(' ', depth)}${getCell(file, filePadLength - depth, StringPrototypePadEnd, truncateStart, fileCoverage)}`; + const coverageColumns = ArrayPrototypeJoin(ArrayPrototypeMap(coveragesColumns, (coverage, j) => { + const coverageText = typeof coverage === 'number' ? NumberPrototypeToFixed(coverage, 2) : coverage; + return getCell(coverageText, columnPadLengths[j], StringPrototypePadStart, false, coverage); + }), kSeparator); - // Body - for (let i = 0; i < summary.files.length; ++i) { - const file = summary.files[i]; - const relativePath = relative(summary.workingDirectory, file.path); + const uncoveredLinesColumn = getCell(uncoveredLines, uncoveredLinesPadLength, false, truncateEnd); - let fileCoverage = 0; - const coverages = ArrayPrototypeMap(kColumnsKeys, (columnKey) => { - const percent = file[columnKey]; - fileCoverage += percent; - return percent; - }); - fileCoverage /= kColumnsKeys.length; + return `${fileColumn}${kSeparator}${coverageColumns}${kSeparator}${uncoveredLinesColumn}\n`; + } - report += `${prefix}${getCell(relativePath, filePadLength, StringPrototypePadEnd, truncateStart, fileCoverage)}${kSeparator}` + - `${ArrayPrototypeJoin(ArrayPrototypeMap(coverages, (coverage, j) => getCell(NumberPrototypeToFixed(coverage, 2), columnPadLengths[j], StringPrototypePadStart, false, coverage)), kSeparator)}${kSeparator}` + - `${getCell(formatUncoveredLines(getUncoveredLines(file.lines), table), uncoveredLinesPadLength, false, truncateEnd)}\n`; + function printCoverageBodyTree(tree, depth = 0) { + for (const key in tree) { + if (tree[key].file) { + const file = tree[key].file; + const fileName = ArrayPrototypePop(StringPrototypeSplit(file.path, sep)); + + let fileCoverage = 0; + const coverages = ArrayPrototypeMap(kColumnsKeys, (columnKey) => { + const percent = file[columnKey]; + fileCoverage += percent; + return percent; + }); + fileCoverage /= kColumnsKeys.length; + + const uncoveredLines = formatUncoveredLines(getUncoveredLines(file.lines), table); + + report += writeReportLine({ + __proto__: null, + file: fileName, + depth: depth, + coveragesColumns: coverages, + fileCoverage: fileCoverage, + uncoveredLines: uncoveredLines, + }); + } else { + report += writeReportLine({ + __proto__: null, + file: key, + depth: depth, + coveragesColumns: ArrayPrototypeMap(columnPadLengths, () => ''), + fileCoverage: undefined, + uncoveredLines: '', + }); + printCoverageBodyTree(tree[key], depth + 1); + } + } } - // Foot + // -------------------------- Coverage Report -------------------------- + if (table) report += addTableLine(prefix, tableWidth); + + // Print the header + report += writeReportLine({ + __proto__: null, + file: 'file', + coveragesColumns: kColumns, + fileCoverage: undefined, + uncoveredLines: 'uncovered lines', + }); + + if (table) report += addTableLine(prefix, tableWidth); + + // Print the body + printCoverageBodyTree(tree); + if (table) report += addTableLine(prefix, tableWidth); - report += `${prefix}${getCell('all files', filePadLength, StringPrototypePadEnd, truncateEnd)}${kSeparator}` + - `${ArrayPrototypeJoin(ArrayPrototypeMap(kColumnsKeys, (columnKey, j) => getCell(NumberPrototypeToFixed(summary.totals[columnKey], 2), columnPadLengths[j], StringPrototypePadStart, false, summary.totals[columnKey])), kSeparator)} |\n`; + + // Print the footer + const allFilesCoverages = ArrayPrototypeMap(kColumnsKeys, (columnKey) => summary.totals[columnKey]); + report += writeReportLine({ + __proto__: null, + file: 'all files', + coveragesColumns: allFilesCoverages, + fileCoverage: undefined, + uncoveredLines: '', + }); + if (table) report += addTableLine(prefix, tableWidth); report += `${prefix}end of coverage report\n`; diff --git a/lib/internal/tty.js b/lib/internal/tty.js index 4d3c768f65b6cd..b0bfac122781cc 100644 --- a/lib/internal/tty.js +++ b/lib/internal/tty.js @@ -60,9 +60,9 @@ const TERM_ENVS = { 'mosh': COLORS_16m, 'putty': COLORS_16, 'st': COLORS_16, - // https://github.com/da-x/rxvt-unicode/tree/v9.22-with-24bit-color + // http://lists.schmorp.de/pipermail/rxvt-unicode/2016q2/002261.html 'rxvt-unicode-24bit': COLORS_16m, - // https://gist.github.com/XVilka/8346728#gistcomment-2823421 + // https://bugs.launchpad.net/terminator/+bug/1030562 'terminator': COLORS_16m, }; diff --git a/lib/internal/util.js b/lib/internal/util.js index 9141811afa23e2..0973d4efdf2257 100644 --- a/lib/internal/util.js +++ b/lib/internal/util.js @@ -768,9 +768,9 @@ function SideEffectFreeRegExpPrototypeExec(regex, string) { return FunctionPrototypeCall(RegExpFromAnotherRealm.prototype.exec, regex, string); } -const crossRelmRegexes = new SafeWeakMap(); -function getCrossRelmRegex(regex) { - const cached = crossRelmRegexes.get(regex); +const crossRealmRegexes = new SafeWeakMap(); +function getCrossRealmRegex(regex) { + const cached = crossRealmRegexes.get(regex); if (cached) return cached; let flagString = ''; @@ -783,17 +783,17 @@ function getCrossRelmRegex(regex) { if (RegExpPrototypeGetSticky(regex)) flagString += 'y'; const { RegExp: RegExpFromAnotherRealm } = getInternalGlobal(); - const crossRelmRegex = new RegExpFromAnotherRealm(RegExpPrototypeGetSource(regex), flagString); - crossRelmRegexes.set(regex, crossRelmRegex); - return crossRelmRegex; + const crossRealmRegex = new RegExpFromAnotherRealm(RegExpPrototypeGetSource(regex), flagString); + crossRealmRegexes.set(regex, crossRealmRegex); + return crossRealmRegex; } function SideEffectFreeRegExpPrototypeSymbolReplace(regex, string, replacement) { - return getCrossRelmRegex(regex)[SymbolReplace](string, replacement); + return getCrossRealmRegex(regex)[SymbolReplace](string, replacement); } function SideEffectFreeRegExpPrototypeSymbolSplit(regex, string, limit = undefined) { - return getCrossRelmRegex(regex)[SymbolSplit](string, limit); + return getCrossRealmRegex(regex)[SymbolSplit](string, limit); } /** diff --git a/lib/internal/util/colors.js b/lib/internal/util/colors.js index 31e3e9b22585c5..054f9c4b4090d8 100644 --- a/lib/internal/util/colors.js +++ b/lib/internal/util/colors.js @@ -13,6 +13,7 @@ module.exports = { red: '', gray: '', clear: '', + reset: '', hasColors: false, shouldColorize(stream) { if (process.env.FORCE_COLOR !== undefined) { @@ -32,6 +33,7 @@ module.exports = { module.exports.red = hasColors ? '\u001b[31m' : ''; module.exports.gray = hasColors ? '\u001b[90m' : ''; module.exports.clear = hasColors ? '\u001bc' : ''; + module.exports.reset = hasColors ? '\u001b[0m' : ''; module.exports.hasColors = hasColors; } }, diff --git a/lib/internal/util/debuglog.js b/lib/internal/util/debuglog.js index 0ffb79041641ac..271c9d1497d88f 100644 --- a/lib/internal/util/debuglog.js +++ b/lib/internal/util/debuglog.js @@ -173,7 +173,7 @@ function formatTime(ms) { } function safeTraceLabel(label) { - return label.replace(/\\/g, '\\\\'); + return label.replace(/\\/g, '\\\\').replaceAll('"', '\\"'); } /** diff --git a/lib/internal/util/inspect.js b/lib/internal/util/inspect.js index 674cb43ab1f01f..c10db2954db662 100644 --- a/lib/internal/util/inspect.js +++ b/lib/internal/util/inspect.js @@ -241,14 +241,17 @@ const meta = [ ]; // Regex used for ansi escape code splitting -// Adopted from https://github.com/chalk/ansi-regex/blob/HEAD/index.js -// License: MIT, authors: @sindresorhus, Qix-, arjunmehta and LitoMore +// Ref: https://github.com/chalk/ansi-regex/blob/f338e1814144efb950276aac84135ff86b72dc8e/index.js +// License: MIT by Sindre Sorhus // Matches all ansi escape code sequences in a string -const ansiPattern = '[\\u001B\\u009B][[\\]()#;?]*' + - '(?:(?:(?:(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]+)*' + - '|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/#&.:=?%@~_]*)*)?\\u0007)' + - '|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?[\\dA-PR-TZcf-ntqry=><~]))'; -const ansi = new RegExp(ansiPattern, 'g'); +const ansi = new RegExp( + '[\\u001B\\u009B][[\\]()#;?]*' + + '(?:(?:(?:(?:;[-a-zA-Z\\d\\/\\#&.:=?%@~_]+)*' + + '|[a-zA-Z\\d]+(?:;[-a-zA-Z\\d\\/\\#&.:=?%@~_]*)*)?' + + '(?:\\u0007|\\u001B\\u005C|\\u009C))' + + '|(?:(?:\\d{1,4}(?:;\\d{0,4})*)?' + + '[\\dA-PR-TZcf-nq-uy=><~]))', 'g', +); let getStringWidth; @@ -1281,7 +1284,7 @@ function improveStack(stack, constructor, name, tag) { if (constructor === null) { const start = RegExpPrototypeExec(/^([A-Z][a-z_ A-Z0-9[\]()-]+)(?::|\n {4}at)/, stack) || RegExpPrototypeExec(/^([a-z_A-Z0-9-]*Error)$/, stack); - fallback = (start && start[1]) || ''; + fallback = (start?.[1]) || ''; len = fallback.length; fallback = fallback || 'Error'; } diff --git a/lib/internal/v8/startup_snapshot.js b/lib/internal/v8/startup_snapshot.js index 7c789577aec969..01204b96239406 100644 --- a/lib/internal/v8/startup_snapshot.js +++ b/lib/internal/v8/startup_snapshot.js @@ -58,11 +58,16 @@ function addDeserializeCallback(callback, data) { } const serializeCallbacks = []; +const afterUserSerializeCallbacks = []; // Callbacks to run after user callbacks function runSerializeCallbacks() { while (serializeCallbacks.length > 0) { const { 0: callback, 1: data } = serializeCallbacks.shift(); callback(data); } + while (afterUserSerializeCallbacks.length > 0) { + const { 0: callback, 1: data } = afterUserSerializeCallbacks.shift(); + callback(data); + } } function addSerializeCallback(callback, data) { @@ -71,6 +76,10 @@ function addSerializeCallback(callback, data) { serializeCallbacks.push([callback, data]); } +function addAfterUserSerializeCallback(callback, data) { + afterUserSerializeCallbacks.push([callback, data]); +} + function initializeCallbacks() { // Only run the serialize callbacks in snapshot building mode, otherwise // they throw. @@ -117,4 +126,5 @@ module.exports = { setDeserializeMainFunction, isBuildingSnapshot, }, + addAfterUserSerializeCallback, }; diff --git a/lib/internal/webidl.js b/lib/internal/webidl.js index 9e4b20435bf831..4b689cf5eb24b2 100644 --- a/lib/internal/webidl.js +++ b/lib/internal/webidl.js @@ -241,7 +241,7 @@ function type(V) { case 'object': // Fall through case 'function': // Fall through default: - // Per ES spec, typeof returns an implemention-defined value that is not + // Per ES spec, typeof returns an implementation-defined value that is not // any of the existing ones for uncallable non-standard exotic objects. // Yet Type() which the Web IDL spec depends on returns Object for such // cases. So treat the default case as an object. diff --git a/lib/internal/webstreams/encoding.js b/lib/internal/webstreams/encoding.js index b5533b4287b65a..f316222ccbf0e8 100644 --- a/lib/internal/webstreams/encoding.js +++ b/lib/internal/webstreams/encoding.js @@ -20,6 +20,7 @@ const { customInspect } = require('internal/webstreams/util'); const { codes: { + ERR_INVALID_ARG_TYPE, ERR_INVALID_THIS, }, } = require('internal/errors'); @@ -133,6 +134,9 @@ class TextDecoderStream { this.#handle = new TextDecoder(encoding, options); this.#transform = new TransformStream({ transform: (chunk, controller) => { + if (chunk === undefined) { + throw new ERR_INVALID_ARG_TYPE('chunk', 'string', chunk); + } const value = this.#handle.decode(chunk, { stream: true }); if (value) controller.enqueue(value); diff --git a/lib/internal/webstreams/readablestream.js b/lib/internal/webstreams/readablestream.js index 94c76a83898222..7279ff3de4b28b 100644 --- a/lib/internal/webstreams/readablestream.js +++ b/lib/internal/webstreams/readablestream.js @@ -639,6 +639,7 @@ ObjectDefineProperties(ReadableStream, { }); function InternalTransferredReadableStream() { + ObjectSetPrototypeOf(this, ReadableStream.prototype); markTransferMode(this, false, true); this[kType] = 'ReadableStream'; this[kState] = createReadableStreamState(); @@ -1226,6 +1227,7 @@ ObjectDefineProperties(ReadableByteStreamController.prototype, { }); function InternalReadableStream(start, pull, cancel, highWaterMark, size) { + ObjectSetPrototypeOf(this, ReadableStream.prototype); markTransferMode(this, false, true); this[kType] = 'ReadableStream'; this[kState] = createReadableStreamState(); @@ -1253,6 +1255,7 @@ function createReadableStream(start, pull, cancel, highWaterMark = 1, size = () } function InternalReadableByteStream(start, pull, cancel) { + ObjectSetPrototypeOf(this, ReadableStream.prototype); markTransferMode(this, false, true); this[kType] = 'ReadableStream'; this[kState] = createReadableStreamState(); diff --git a/lib/internal/webstreams/transformstream.js b/lib/internal/webstreams/transformstream.js index 68b1e7aa83d9fc..9e26387b3b6110 100644 --- a/lib/internal/webstreams/transformstream.js +++ b/lib/internal/webstreams/transformstream.js @@ -256,6 +256,7 @@ ObjectDefineProperties(TransformStream.prototype, { }); function InternalTransferredTransformStream() { + ObjectSetPrototypeOf(this, TransformStream.prototype); markTransferMode(this, false, true); this[kType] = 'TransformStream'; this[kState] = { diff --git a/lib/internal/webstreams/util.js b/lib/internal/webstreams/util.js index cace607a077618..2c70ef7acdfe66 100644 --- a/lib/internal/webstreams/util.js +++ b/lib/internal/webstreams/util.js @@ -211,11 +211,12 @@ function createAsyncFromSyncIterator(syncIteratorRecord) { return { iterator: asyncIterator, nextMethod, done: false }; } +// Refs: https://tc39.es/ecma262/#sec-getiterator function getIterator(obj, kind = 'sync', method) { if (method === undefined) { if (kind === 'async') { method = obj[SymbolAsyncIterator]; - if (method === undefined) { + if (method == null) { const syncMethod = obj[SymbolIterator]; if (syncMethod === undefined) { diff --git a/lib/internal/webstreams/writablestream.js b/lib/internal/webstreams/writablestream.js index 43f7a5121b1e97..ba1461fc70e95f 100644 --- a/lib/internal/webstreams/writablestream.js +++ b/lib/internal/webstreams/writablestream.js @@ -300,6 +300,7 @@ ObjectDefineProperties(WritableStream.prototype, { }); function InternalTransferredWritableStream() { + ObjectSetPrototypeOf(this, WritableStream.prototype); markTransferMode(this, false, true); this[kType] = 'WritableStream'; this[kState] = createWritableStreamState(); @@ -516,6 +517,7 @@ ObjectDefineProperties(WritableStreamDefaultController.prototype, { }); function InternalWritableStream(start, write, close, abort, highWaterMark, size) { + ObjectSetPrototypeOf(this, WritableStream.prototype); markTransferMode(this, false, true); this[kType] = 'WritableStream'; this[kState] = createWritableStreamState(); diff --git a/lib/internal/worker/io.js b/lib/internal/worker/io.js index 3263ddb88194fc..42b8845cec6711 100644 --- a/lib/internal/worker/io.js +++ b/lib/internal/worker/io.js @@ -29,6 +29,14 @@ const { oninit: onInitSymbol, no_message_symbol: noMessageSymbol, } = internalBinding('symbols'); +const { + privateSymbols: { + transfer_mode_private_symbol, + }, + constants: { + kCloneable, + }, +} = internalBinding('util'); const { MessagePort, MessageChannel, @@ -402,7 +410,7 @@ class BroadcastChannel extends EventTarget { if (arguments.length === 0) throw new ERR_MISSING_ARGS('message'); if (this[kHandle] === undefined) - throw new DOMException('BroadcastChannel is closed.'); + throw new DOMException('BroadcastChannel is closed.', 'InvalidStateError'); if (this[kHandle].postMessage(message) === undefined) throw new DOMException('Message could not be posted.'); } @@ -447,6 +455,13 @@ ObjectDefineProperties(BroadcastChannel.prototype, { defineEventHandler(BroadcastChannel.prototype, 'message'); defineEventHandler(BroadcastChannel.prototype, 'messageerror'); +function markAsUncloneable(obj) { + if ((typeof obj !== 'object' && typeof obj !== 'function') || obj === null) { + return; + } + obj[transfer_mode_private_symbol] &= ~kCloneable; +} + module.exports = { drainMessagePort, messageTypes, @@ -454,6 +469,7 @@ module.exports = { kIncrementsPortRef, kWaitingStreams, kStdioWantsMoreDataCallback, + markAsUncloneable, moveMessagePortToContext, MessagePort, MessageChannel, diff --git a/lib/module.js b/lib/module.js index fe0eb937eaeac0..48ba9215b081eb 100644 --- a/lib/module.js +++ b/lib/module.js @@ -7,6 +7,7 @@ const { SourceMap } = require('internal/source_map/source_map'); const { constants, enableCompileCache, + flushCompileCache, getCompileCacheDir, } = require('internal/modules/helpers'); @@ -15,5 +16,7 @@ Module.register = register; Module.SourceMap = SourceMap; Module.constants = constants; Module.enableCompileCache = enableCompileCache; +Module.flushCompileCache = flushCompileCache; + Module.getCompileCacheDir = getCompileCacheDir; module.exports = Module; diff --git a/lib/net.js b/lib/net.js index 4591e3b977cc51..604ba648754a2a 100644 --- a/lib/net.js +++ b/lib/net.js @@ -747,8 +747,7 @@ Socket.prototype.resetAndDestroy = function() { }; Socket.prototype.pause = function() { - if (this[kBuffer] && !this.connecting && this._handle && - this._handle.reading) { + if (this[kBuffer] && !this.connecting && this._handle?.reading) { this._handle.reading = false; if (!this.destroyed) { const err = this._handle.readStop(); @@ -1218,7 +1217,7 @@ Socket.prototype.connect = function(...args) { } // If the parent is already connecting, do not attempt to connect again - if (this._parent && this._parent.connecting) { + if (this._parent?.connecting) { return this; } @@ -2184,7 +2183,7 @@ ObjectDefineProperty(Server.prototype, 'listening', { }); Server.prototype.address = function() { - if (this._handle && this._handle.getsockname) { + if (this._handle?.getsockname) { const out = {}; const err = this._handle.getsockname(out); if (err) { diff --git a/lib/os.js b/lib/os.js index bbae40b2ab1046..c88ef443b9b4f2 100644 --- a/lib/os.js +++ b/lib/os.js @@ -180,12 +180,13 @@ platform[SymbolToPrimitive] = () => process.platform; */ function tmpdir() { if (isWindows) { - let path = process.env.TEMP || + const path = process.env.TEMP || process.env.TMP || (process.env.SystemRoot || process.env.windir) + '\\temp'; + if (path.length > 1 && StringPrototypeEndsWith(path, '\\') && !StringPrototypeEndsWith(path, ':\\')) - path = StringPrototypeSlice(path, 0, -1); + return StringPrototypeSlice(path, 0, -1); return path; } diff --git a/lib/path.js b/lib/path.js index e9b56e89a40ee7..eefa9bc5db0729 100644 --- a/lib/path.js +++ b/lib/path.js @@ -1163,8 +1163,8 @@ const posix = { let resolvedPath = ''; let resolvedAbsolute = false; - for (let i = args.length - 1; i >= -1 && !resolvedAbsolute; i--) { - const path = i >= 0 ? args[i] : posixCwd(); + for (let i = args.length - 1; i >= 0 && !resolvedAbsolute; i--) { + const path = args[i]; validateString(path, `paths[${i}]`); // Skip empty entries @@ -1177,6 +1177,13 @@ const posix = { StringPrototypeCharCodeAt(path, 0) === CHAR_FORWARD_SLASH; } + if (!resolvedAbsolute) { + const cwd = posixCwd(); + resolvedPath = `${cwd}/${resolvedPath}`; + resolvedAbsolute = + StringPrototypeCharCodeAt(cwd, 0) === CHAR_FORWARD_SLASH; + } + // At this point the path should be resolved to a full absolute path, but // handle relative paths to be safe (might happen when process.cwd() fails) diff --git a/lib/readline.js b/lib/readline.js index abc5b25d2e00e6..e3ae952648fcec 100644 --- a/lib/readline.js +++ b/lib/readline.js @@ -163,7 +163,7 @@ Interface.prototype.question[promisify.custom] = function question(query, option options = kEmptyObject; } - if (options.signal && options.signal.aborted) { + if (options.signal?.aborted) { return PromiseReject( new AbortError(undefined, { cause: options.signal.reason })); } diff --git a/lib/repl.js b/lib/repl.js index a7aa4c717837dd..005da4ee915625 100644 --- a/lib/repl.js +++ b/lib/repl.js @@ -908,8 +908,8 @@ function REPLServer(prompt, StringPrototypeCharAt(trimmedCmd, 1) !== '.' && NumberIsNaN(NumberParseFloat(trimmedCmd))) { const matches = RegExpPrototypeExec(/^\.([^\s]+)\s*(.*)$/, trimmedCmd); - const keyword = matches && matches[1]; - const rest = matches && matches[2]; + const keyword = matches?.[1]; + const rest = matches?.[2]; if (ReflectApply(_parseREPLKeyword, self, [keyword, rest]) === true) { return; } @@ -1253,7 +1253,7 @@ function filteredOwnPropertyNames(obj) { let isObjectPrototype = false; if (ObjectGetPrototypeOf(obj) === null) { const ctorDescriptor = ObjectGetOwnPropertyDescriptor(obj, 'constructor'); - if (ctorDescriptor && ctorDescriptor.value) { + if (ctorDescriptor?.value) { const ctorProto = ObjectGetPrototypeOf(ctorDescriptor.value); isObjectPrototype = ctorProto && ObjectGetPrototypeOf(ctorProto) === obj; } @@ -1528,7 +1528,7 @@ function complete(line, callback) { let p; if ((typeof obj === 'object' && obj !== null) || typeof obj === 'function') { - memberGroups.push(filteredOwnPropertyNames(obj)); + ArrayPrototypePush(memberGroups, filteredOwnPropertyNames(obj)); p = ObjectGetPrototypeOf(obj); } else { p = obj.constructor ? obj.constructor.prototype : null; @@ -1536,7 +1536,7 @@ function complete(line, callback) { // Circular refs possible? Let's guard against that. let sentinel = 5; while (p !== null && sentinel-- !== 0) { - memberGroups.push(filteredOwnPropertyNames(p)); + ArrayPrototypePush(memberGroups, filteredOwnPropertyNames(p)); p = ObjectGetPrototypeOf(p); } } catch { diff --git a/lib/timers.js b/lib/timers.js index b594f79d120e40..a0e58810752035 100644 --- a/lib/timers.js +++ b/lib/timers.js @@ -182,7 +182,7 @@ ObjectDefineProperty(setTimeout, customPromisify, { * @returns {void} */ function clearTimeout(timer) { - if (timer && timer._onTimeout) { + if (timer?._onTimeout) { timer._onTimeout = null; unenroll(timer); return; diff --git a/lib/url.js b/lib/url.js index 8fa8553c2b3f30..a2dc57e8bd24e4 100644 --- a/lib/url.js +++ b/lib/url.js @@ -30,7 +30,7 @@ const { decodeURIComponent, } = primordials; -const { toASCII } = require('internal/idna'); +const { toASCII } = internalBinding('encoding_binding'); const { encodeStr, hexTable } = require('internal/querystring'); const querystring = require('querystring'); diff --git a/lib/v8.js b/lib/v8.js index b687d8709c99a0..7a8979887bab49 100644 --- a/lib/v8.js +++ b/lib/v8.js @@ -49,7 +49,6 @@ if (internalBinding('config').hasInspector) { } const assert = require('internal/assert'); -const { copy } = internalBinding('buffer'); const { inspect } = require('internal/util/inspect'); const { FastBuffer } = require('internal/buffer'); const { getValidatedPath } = require('internal/fs/utils'); @@ -368,7 +367,7 @@ class DefaultDeserializer extends Deserializer { } // Copy to an aligned buffer first. const buffer_copy = Buffer.allocUnsafe(byteLength); - copy(this.buffer, buffer_copy, 0, byteOffset, byteOffset + byteLength); + buffer_copy.set(new Uint8Array(this.buffer.buffer, this.buffer.byteOffset + byteOffset, byteLength)); return new ctor(buffer_copy.buffer, buffer_copy.byteOffset, byteLength / BYTES_PER_ELEMENT); diff --git a/lib/worker_threads.js b/lib/worker_threads.js index 71ce4de37abce4..e874671ea12221 100644 --- a/lib/worker_threads.js +++ b/lib/worker_threads.js @@ -13,6 +13,7 @@ const { const { MessagePort, MessageChannel, + markAsUncloneable, moveMessagePortToContext, receiveMessageOnPort, BroadcastChannel, @@ -31,6 +32,7 @@ module.exports = { isMainThread, MessagePort, MessageChannel, + markAsUncloneable, markAsUntransferable, isMarkedAsUntransferable, moveMessagePortToContext, diff --git a/lib/zlib.js b/lib/zlib.js index 1210f3baa6962b..f9f859a35cdb2d 100644 --- a/lib/zlib.js +++ b/lib/zlib.js @@ -23,21 +23,16 @@ const { ArrayBuffer, - ArrayPrototypeForEach, - ArrayPrototypeMap, - ArrayPrototypePush, - FunctionPrototypeBind, - MathMaxApply, + MathMax, NumberIsNaN, ObjectDefineProperties, ObjectDefineProperty, + ObjectEntries, ObjectFreeze, ObjectKeys, ObjectSetPrototypeOf, ReflectApply, - StringPrototypeStartsWith, Symbol, - TypedArrayPrototypeFill, Uint32Array, } = primordials; @@ -47,7 +42,6 @@ const { ERR_BUFFER_TOO_LARGE, ERR_INVALID_ARG_TYPE, ERR_OUT_OF_RANGE, - ERR_ZLIB_INITIALIZATION_FAILED, }, genericNodeError, } = require('internal/errors'); @@ -130,10 +124,11 @@ function zlibBuffer(engine, buffer, callback) { } function zlibBufferOnData(chunk) { - if (!this.buffers) + if (!this.buffers) { this.buffers = [chunk]; - else - ArrayPrototypePush(this.buffers, chunk); + } else { + this.buffers.push(chunk); + } this.nread += chunk.length; if (this.nread > this._maxOutputLength) { this.close(); @@ -261,7 +256,7 @@ function ZlibBase(opts, mode, handle, { flush, finishFlush, fullFlush }) { this._defaultFlushFlag = flush; this._finishFlushFlag = finishFlush; this._defaultFullFlushFlag = fullFlush; - this._info = opts && opts.info; + this._info = opts?.info; this._maxOutputLength = maxOutputLength; } ObjectSetPrototypeOf(ZlibBase.prototype, Transform.prototype); @@ -442,7 +437,7 @@ function processChunkSync(self, chunk, flushFlag) { if (have > 0) { const out = buffer.slice(offset, offset + have); offset += have; - ArrayPrototypePush(buffers, out); + buffers.push(out); nread += out.byteLength; if (nread > self._maxOutputLength) { @@ -700,9 +695,10 @@ Zlib.prototype.params = function params(level, strategy, callback) { checkRangesOrGetDefault(strategy, 'strategy', Z_DEFAULT_STRATEGY, Z_FIXED); if (this._level !== level || this._strategy !== strategy) { - this.flush(Z_SYNC_FLUSH, - FunctionPrototypeBind(paramsAfterFlushCallback, this, - level, strategy, callback)); + this.flush( + Z_SYNC_FLUSH, + paramsAfterFlushCallback.bind(this, level, strategy, callback), + ); } else { process.nextTick(callback); } @@ -782,13 +778,10 @@ function createConvenienceMethod(ctor, sync) { }; } -const kMaxBrotliParam = MathMaxApply(ArrayPrototypeMap( - ObjectKeys(constants), - (key) => (StringPrototypeStartsWith(key, 'BROTLI_PARAM_') ? - constants[key] : - 0), -)); - +const kMaxBrotliParam = MathMax( + ...ObjectEntries(constants) + .map(({ 0: key, 1: value }) => (key.startsWith('BROTLI_PARAM_') ? value : 0)), +); const brotliInitParamsArray = new Uint32Array(kMaxBrotliParam + 1); const brotliDefaultOpts = { @@ -799,9 +792,9 @@ const brotliDefaultOpts = { function Brotli(opts, mode) { assert(mode === BROTLI_DECODE || mode === BROTLI_ENCODE); - TypedArrayPrototypeFill(brotliInitParamsArray, -1); + brotliInitParamsArray.fill(-1); if (opts?.params) { - ArrayPrototypeForEach(ObjectKeys(opts.params), (origKey) => { + ObjectKeys(opts.params).forEach((origKey) => { const key = +origKey; if (NumberIsNaN(key) || key < 0 || key > kMaxBrotliParam || (brotliInitParamsArray[key] | 0) !== -1) { @@ -821,14 +814,7 @@ function Brotli(opts, mode) { new binding.BrotliDecoder(mode) : new binding.BrotliEncoder(mode); this._writeState = new Uint32Array(2); - // TODO(addaleax): Sometimes we generate better error codes in C++ land, - // e.g. ERR_BROTLI_PARAM_SET_FAILED -- it's hard to access them with - // the current bindings setup, though. - if (!handle.init(brotliInitParamsArray, - this._writeState, - processCallback)) { - throw new ERR_ZLIB_INITIALIZATION_FAILED(); - } + handle.init(brotliInitParamsArray, this._writeState, processCallback); ReflectApply(ZlibBase, this, [opts, mode, handle, brotliDefaultOpts]); } @@ -939,10 +925,12 @@ ObjectDefineProperties(module.exports, { // These should be considered deprecated // expose all the zlib constants -for (const bkey of ObjectKeys(constants)) { - if (StringPrototypeStartsWith(bkey, 'BROTLI')) continue; - ObjectDefineProperty(module.exports, bkey, { +for (const { 0: key, 1: value } of ObjectEntries(constants)) { + if (key.startsWith('BROTLI')) continue; + ObjectDefineProperty(module.exports, key, { __proto__: null, - enumerable: false, value: constants[bkey], writable: false, + enumerable: false, + value, + writable: false, }); } diff --git a/onboarding.md b/onboarding.md index e842c3b7e4eb57..0731161a4c4e25 100644 --- a/onboarding.md +++ b/onboarding.md @@ -216,6 +216,13 @@ needs to be pointed out separately during the onboarding. `git show --format=%B 6669b3857f0f43ee0296eb7ac45086cd907b9e94` * Collaborators are in alphabetical order by GitHub username. * Optionally, include your personal pronouns. +* The PR should include an addition to the + [mailmap](.mailmap) file if the email + being added to the collaborator list does not match the email used for + commits. Otherwise tooling will not see the collaborator as being active and + may suggest removing them. See + [gitmailmap](https://git-scm.com/docs/gitmailmap) for information on the + format of the mailmap file. * Add the `Fixes: ` to the commit message so that when the commit lands, the nomination issue url will be automatically closed. diff --git a/pyproject.toml b/pyproject.toml index 84470a8a18b46d..8e97e3b4446293 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -5,6 +5,7 @@ exclude = [ "tools/gyp", "tools/inspector_protocol", "tools/eslint/node_modules", + "tools/github_reporter" ] line-length = 172 target-version = "py38" @@ -40,12 +41,6 @@ ignore = [ [tool.ruff.lint.mccabe] max-complexity = 100 -[tool.ruff.lint.per-file-ignores] -"tools/checkimports.py" = ["W605"] -"tools/gyp/pylib/gyp/xcodeproj_file.py" = ["PLE0101"] -"tools/icu/shrink-icu-src.py" = ["W605"] -"tools/mkssldef.py" = ["W605"] - [tool.ruff.lint.pylint] max-args = 12 max-branches = 110 diff --git a/src/.clang-tidy b/src/.clang-tidy index 1713161c8ca9e0..9cb5edbe5bee8a 100644 --- a/src/.clang-tidy +++ b/src/.clang-tidy @@ -5,15 +5,19 @@ Checks: '-*, modernize-deprecated-headers, modernize-make-unique, modernize-make-shared, + modernize-raw-string-literal, modernize-redundant-void-arg, modernize-replace-random-shuffle, modernize-shrink-to-fit, + modernize-use-default-member-init, modernize-use-bool-literals, modernize-use-emplace, modernize-use-equals-default, modernize-use-nullptr, modernize-use-override, + modernize-use-starts-ends-with, performance-faster-string-find, + performance-implicit-conversion-in-loop, # performance-unnecessary-value-param, see https://github.com/nodejs/node/pull/26042 readability-delete-null-pointer, ' WarningsAsErrors: '' diff --git a/src/aliased_buffer.h b/src/aliased_buffer.h index b847641f8faa15..8c93ef4b317ab0 100644 --- a/src/aliased_buffer.h +++ b/src/aliased_buffer.h @@ -111,7 +111,7 @@ class AliasedBufferBase : public MemoryRetainer { }; /** - * Get the underlying v8 TypedArray overlayed on top of the native buffer + * Get the underlying v8 TypedArray overlaid on top of the native buffer */ v8::Local GetJSArray() const; diff --git a/src/api/callback.cc b/src/api/callback.cc index c1ec67ae657f32..26628b31543e03 100644 --- a/src/api/callback.cc +++ b/src/api/callback.cc @@ -74,7 +74,7 @@ InternalCallbackScope::InternalCallbackScope(Environment* env, // We first check `env->context() != current_context` because the contexts // likely *are* the same, in which case we can skip the slightly more // expensive Environment::GetCurrent() call. - if (UNLIKELY(env->context() != current_context)) { + if (env->context() != current_context) [[unlikely]] { CHECK_EQ(Environment::GetCurrent(isolate), env); } diff --git a/src/api/environment.cc b/src/api/environment.cc index 77c20a4b6b9db4..ad323fc800a33c 100644 --- a/src/api/environment.cc +++ b/src/api/environment.cc @@ -31,6 +31,7 @@ using v8::FunctionCallbackInfo; using v8::HandleScope; using v8::Isolate; using v8::Just; +using v8::JustVoid; using v8::Local; using v8::Maybe; using v8::MaybeLocal; @@ -107,15 +108,17 @@ void* NodeArrayBufferAllocator::Allocate(size_t size) { ret = allocator_->Allocate(size); else ret = allocator_->AllocateUninitialized(size); - if (LIKELY(ret != nullptr)) + if (ret != nullptr) [[likely]] { total_mem_usage_.fetch_add(size, std::memory_order_relaxed); + } return ret; } void* NodeArrayBufferAllocator::AllocateUninitialized(size_t size) { void* ret = allocator_->AllocateUninitialized(size); - if (LIKELY(ret != nullptr)) + if (ret != nullptr) [[likely]] { total_mem_usage_.fetch_add(size, std::memory_order_relaxed); + } return ret; } @@ -635,7 +638,7 @@ void ProtoThrower(const FunctionCallbackInfo& info) { // This runs at runtime, regardless of whether the context // is created from a snapshot. -Maybe InitializeContextRuntime(Local context) { +Maybe InitializeContextRuntime(Local context) { Isolate* isolate = context->GetIsolate(); HandleScope handle_scope(isolate); @@ -653,7 +656,7 @@ Maybe InitializeContextRuntime(Local context) { Boolean::New(isolate, is_code_generation_from_strings_allowed)); if (per_process::cli_options->disable_proto == "") { - return Just(true); + return JustVoid(); } // Remove __proto__ @@ -669,14 +672,14 @@ Maybe InitializeContextRuntime(Local context) { if (!context->Global() ->Get(context, object_string) .ToLocal(&object_v)) { - return Nothing(); + return Nothing(); } Local prototype_v; if (!object_v.As() ->Get(context, prototype_string) .ToLocal(&prototype_v)) { - return Nothing(); + return Nothing(); } prototype = prototype_v.As(); @@ -689,13 +692,13 @@ Maybe InitializeContextRuntime(Local context) { if (prototype ->Delete(context, proto_string) .IsNothing()) { - return Nothing(); + return Nothing(); } } else if (per_process::cli_options->disable_proto == "throw") { Local thrower; if (!Function::New(context, ProtoThrower) .ToLocal(&thrower)) { - return Nothing(); + return Nothing(); } PropertyDescriptor descriptor(thrower, thrower); @@ -704,17 +707,17 @@ Maybe InitializeContextRuntime(Local context) { if (prototype ->DefineProperty(context, proto_string, descriptor) .IsNothing()) { - return Nothing(); + return Nothing(); } } else if (per_process::cli_options->disable_proto != "") { // Validated in ProcessGlobalArgs UNREACHABLE("invalid --disable-proto mode"); } - return Just(true); + return JustVoid(); } -Maybe InitializeBaseContextForSnapshot(Local context) { +Maybe InitializeBaseContextForSnapshot(Local context) { Isolate* isolate = context->GetIsolate(); HandleScope handle_scope(isolate); @@ -728,18 +731,18 @@ Maybe InitializeBaseContextForSnapshot(Local context) { Local intl_v; if (!context->Global()->Get(context, intl_string).ToLocal(&intl_v)) { - return Nothing(); + return Nothing(); } if (intl_v->IsObject() && intl_v.As()->Delete(context, break_iter_string).IsNothing()) { - return Nothing(); + return Nothing(); } } - return Just(true); + return JustVoid(); } -Maybe InitializeMainContextForSnapshot(Local context) { +Maybe InitializeMainContextForSnapshot(Local context) { Isolate* isolate = context->GetIsolate(); HandleScope handle_scope(isolate); @@ -750,12 +753,12 @@ Maybe InitializeMainContextForSnapshot(Local context) { ContextEmbedderIndex::kAllowCodeGenerationFromStrings, True(isolate)); if (InitializeBaseContextForSnapshot(context).IsNothing()) { - return Nothing(); + return Nothing(); } return InitializePrimordials(context); } -Maybe InitializePrimordials(Local context) { +Maybe InitializePrimordials(Local context) { // Run per-context JS files. Isolate* isolate = context->GetIsolate(); Context::Scope context_scope(context); @@ -769,7 +772,7 @@ Maybe InitializePrimordials(Local context) { if (primordials->SetPrototype(context, Null(isolate)).IsNothing() || !GetPerContextExports(context).ToLocal(&exports) || exports->Set(context, primordials_string, primordials).IsNothing()) { - return Nothing(); + return Nothing(); } static const char* context_files[] = {"internal/per_context/primordials", @@ -793,11 +796,11 @@ Maybe InitializePrimordials(Local context) { context, *module, arraysize(arguments), arguments, nullptr) .IsEmpty()) { // Execution failed during context creation. - return Nothing(); + return Nothing(); } } - return Just(true); + return JustVoid(); } // This initializes the main context (i.e. vm contexts are not included). @@ -806,7 +809,10 @@ Maybe InitializeContext(Local context) { return Nothing(); } - return InitializeContextRuntime(context); + if (InitializeContextRuntime(context).IsNothing()) { + return Nothing(); + } + return Just(true); } uv_loop_t* GetCurrentEventLoop(Isolate* isolate) { diff --git a/src/api/hooks.cc b/src/api/hooks.cc index 65d39e6b9ff921..cf95d009d4a088 100644 --- a/src/api/hooks.cc +++ b/src/api/hooks.cc @@ -46,8 +46,8 @@ Maybe EmitProcessBeforeExit(Environment* env) { Local exit_code = Integer::New( isolate, static_cast(env->exit_code(ExitCode::kNoFailure))); - return ProcessEmit(env, "beforeExit", exit_code).IsEmpty() ? - Nothing() : Just(true); + return ProcessEmit(env, "beforeExit", exit_code).IsEmpty() ? Nothing() + : Just(true); } static ExitCode EmitExitInternal(Environment* env) { diff --git a/src/base_object.cc b/src/base_object.cc index 58ceecca2f91d7..b496aa0af3ed0b 100644 --- a/src/base_object.cc +++ b/src/base_object.cc @@ -10,6 +10,7 @@ using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::HandleScope; using v8::Just; +using v8::JustVoid; using v8::Local; using v8::Maybe; using v8::Object; @@ -31,7 +32,7 @@ BaseObject::~BaseObject() { realm()->modify_base_object_count(-1); realm()->RemoveCleanupHook(DeleteMe, static_cast(this)); - if (UNLIKELY(has_pointer_data())) { + if (has_pointer_data()) [[unlikely]] { PointerData* metadata = pointer_data(); CHECK_EQ(metadata->strong_ptr_count, 0); metadata->self = nullptr; @@ -110,9 +111,9 @@ Maybe>> BaseObject::NestedTransferables() return Just(std::vector>{}); } -Maybe BaseObject::FinalizeTransferRead(Local context, +Maybe BaseObject::FinalizeTransferRead(Local context, ValueDeserializer* deserializer) { - return Just(true); + return JustVoid(); } BaseObject::PointerData* BaseObject::pointer_data() { diff --git a/src/base_object.h b/src/base_object.h index 546d968e5ca424..cbf27190f135e7 100644 --- a/src/base_object.h +++ b/src/base_object.h @@ -173,7 +173,7 @@ class BaseObject : public MemoryRetainer { virtual std::unique_ptr CloneForMessaging() const; virtual v8::Maybe>> NestedTransferables() const; - virtual v8::Maybe FinalizeTransferRead( + virtual v8::Maybe FinalizeTransferRead( v8::Local context, v8::ValueDeserializer* deserializer); // Indicates whether this object is expected to use a strong reference during diff --git a/src/cares_wrap.cc b/src/cares_wrap.cc index 436b2e3e002d81..ac79eeaaf7b150 100644 --- a/src/cares_wrap.cc +++ b/src/cares_wrap.cc @@ -66,6 +66,7 @@ using v8::Int32; using v8::Integer; using v8::Isolate; using v8::Just; +using v8::JustVoid; using v8::Local; using v8::Maybe; using v8::Nothing; @@ -891,8 +892,9 @@ int SoaTraits::Send(QueryWrap* wrap, const char* name) { int AnyTraits::Parse( QueryAnyWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(response->is_host)) + if (response->is_host) [[unlikely]] { return ARES_EBADRESP; + } unsigned char* buf = response->buf.data; int len = response->buf.size; @@ -1058,8 +1060,9 @@ int AnyTraits::Parse( int ATraits::Parse( QueryAWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(response->is_host)) + if (response->is_host) [[unlikely]] { return ARES_EBADRESP; + } unsigned char* buf = response->buf.data; int len = response->buf.size; @@ -1092,8 +1095,9 @@ int ATraits::Parse( int AaaaTraits::Parse( QueryAaaaWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(response->is_host)) + if (response->is_host) [[unlikely]] { return ARES_EBADRESP; + } unsigned char* buf = response->buf.data; int len = response->buf.size; @@ -1126,8 +1130,9 @@ int AaaaTraits::Parse( int CaaTraits::Parse( QueryCaaWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(response->is_host)) + if (response->is_host) [[unlikely]] { return ARES_EBADRESP; + } unsigned char* buf = response->buf.data; int len = response->buf.size; @@ -1148,8 +1153,9 @@ int CaaTraits::Parse( int CnameTraits::Parse( QueryCnameWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(response->is_host)) + if (response->is_host) [[unlikely]] { return ARES_EBADRESP; + } unsigned char* buf = response->buf.data; int len = response->buf.size; @@ -1171,8 +1177,9 @@ int CnameTraits::Parse( int MxTraits::Parse( QueryMxWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(response->is_host)) + if (response->is_host) [[unlikely]] { return ARES_EBADRESP; + } unsigned char* buf = response->buf.data; int len = response->buf.size; @@ -1194,8 +1201,9 @@ int MxTraits::Parse( int NsTraits::Parse( QueryNsWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(response->is_host)) + if (response->is_host) [[unlikely]] { return ARES_EBADRESP; + } unsigned char* buf = response->buf.data; int len = response->buf.size; @@ -1217,8 +1225,9 @@ int NsTraits::Parse( int TxtTraits::Parse( QueryTxtWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(response->is_host)) + if (response->is_host) [[unlikely]] { return ARES_EBADRESP; + } unsigned char* buf = response->buf.data; int len = response->buf.size; @@ -1239,8 +1248,9 @@ int TxtTraits::Parse( int SrvTraits::Parse( QuerySrvWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(response->is_host)) + if (response->is_host) [[unlikely]] { return ARES_EBADRESP; + } unsigned char* buf = response->buf.data; int len = response->buf.size; @@ -1261,9 +1271,9 @@ int SrvTraits::Parse( int PtrTraits::Parse( QueryPtrWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(response->is_host)) + if (response->is_host) [[unlikely]] { return ARES_EBADRESP; - + } unsigned char* buf = response->buf.data; int len = response->buf.size; @@ -1285,9 +1295,9 @@ int PtrTraits::Parse( int NaptrTraits::Parse( QueryNaptrWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(response->is_host)) + if (response->is_host) [[unlikely]] { return ARES_EBADRESP; - + } unsigned char* buf = response->buf.data; int len = response->buf.size; @@ -1307,9 +1317,9 @@ int NaptrTraits::Parse( int SoaTraits::Parse( QuerySoaWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(response->is_host)) + if (response->is_host) [[unlikely]] { return ARES_EBADRESP; - + } unsigned char* buf = response->buf.data; int len = response->buf.size; @@ -1387,9 +1397,9 @@ int ReverseTraits::Send(GetHostByAddrWrap* wrap, const char* name) { int ReverseTraits::Parse( GetHostByAddrWrap* wrap, const std::unique_ptr& response) { - if (UNLIKELY(!response->is_host)) + if (!response->is_host) [[unlikely]] { return ARES_EBADRESP; - + } struct hostent* host = response->host.get(); Environment* env = wrap->env(); @@ -1450,7 +1460,7 @@ void AfterGetAddrInfo(uv_getaddrinfo_t* req, int status, struct addrinfo* res) { if (status == 0) { Local results = Array::New(env->isolate()); - auto add = [&] (bool want_ipv4, bool want_ipv6) -> Maybe { + auto add = [&](bool want_ipv4, bool want_ipv6) -> Maybe { for (auto p = res; p != nullptr; p = p->ai_next) { CHECK_EQ(p->ai_socktype, SOCK_STREAM); @@ -1471,21 +1481,21 @@ void AfterGetAddrInfo(uv_getaddrinfo_t* req, int status, struct addrinfo* res) { Local s = OneByteString(env->isolate(), ip); if (results->Set(env->context(), n, s).IsNothing()) - return Nothing(); + return Nothing(); n++; } - return Just(true); + return JustVoid(); }; switch (order) { case DNS_ORDER_IPV4_FIRST: - if (add(true, false).IsNothing()) return; - if (add(false, true).IsNothing()) return; + if (add(true, false).IsNothing() || add(false, true).IsNothing()) + return; break; case DNS_ORDER_IPV6_FIRST: - if (add(false, true).IsNothing()) return; - if (add(true, false).IsNothing()) return; + if (add(false, true).IsNothing() || add(true, false).IsNothing()) + return; break; default: @@ -1956,16 +1966,6 @@ void Initialize(Local target, FIXED_ONE_BYTE_STRING(env->isolate(), "DNS_ORDER_IPV6_FIRST"), Integer::New(env->isolate(), DNS_ORDER_IPV6_FIRST)) .Check(); - target - ->Set(env->context(), - FIXED_ONE_BYTE_STRING(env->isolate(), "AF_INET"), - Integer::New(env->isolate(), AF_INET)) - .Check(); - target - ->Set(env->context(), - FIXED_ONE_BYTE_STRING(env->isolate(), "AF_INET"), - Integer::New(env->isolate(), AF_INET)) - .Check(); Local aiw = BaseObject::MakeLazilyInitializedJSTemplate(env); diff --git a/src/compile_cache.cc b/src/compile_cache.cc index f5588fa99d5394..50697bcfe1671d 100644 --- a/src/compile_cache.cc +++ b/src/compile_cache.cc @@ -6,8 +6,13 @@ #include "node_internals.h" #include "node_version.h" #include "path.h" +#include "util.h" #include "zlib.h" +#ifdef NODE_IMPLEMENTS_POSIX_CREDENTIALS +#include // getuid +#endif + namespace node { std::string Uint32ToHex(uint32_t crc) { std::string str; @@ -54,7 +59,7 @@ uint32_t GetCacheKey(std::string_view filename, CachedCodeType type) { template inline void CompileCacheHandler::Debug(const char* format, Args&&... args) const { - if (UNLIKELY(is_debug_)) { + if (is_debug_) [[unlikely]] { FPrintF(stderr, format, std::forward(args)...); } } @@ -215,18 +220,17 @@ CompileCacheEntry* CompileCacheHandler::GetOrInsert( return loaded->second.get(); } + // If the code hash mismatches, the code has changed, discard the stale entry + // and create a new one. auto emplaced = compiler_cache_store_.emplace(key, std::make_unique()); auto* result = emplaced.first->second.get(); - std::u8string cache_filename_u8 = - (compile_cache_dir_ / Uint32ToHex(key)).u8string(); result->code_hash = code_hash; result->code_size = code_utf8.length(); result->cache_key = key; result->cache_filename = - std::string(cache_filename_u8.begin(), cache_filename_u8.end()) + - ".cache"; + compile_cache_dir_ + kPathSeparator + Uint32ToHex(key); result->source_filename = filename_utf8.ToString(); result->cache = nullptr; result->type = type; @@ -283,23 +287,33 @@ void CompileCacheHandler::MaybeSave(CompileCacheEntry* entry, MaybeSaveImpl(entry, func, rejected); } -// Layout of a cache file: -// [uint32_t] magic number -// [uint32_t] code size -// [uint32_t] code hash -// [uint32_t] cache size -// [uint32_t] cache hash -// .... compile cache content .... +/** + * Persist the compile cache accumulated in memory to disk. + * + * To avoid race conditions, the cache file includes hashes of the original + * source code and the cache content. It's first written to a temporary file + * before being renamed to the target name. + * + * Layout of a cache file: + * [uint32_t] magic number + * [uint32_t] code size + * [uint32_t] code hash + * [uint32_t] cache size + * [uint32_t] cache hash + * .... compile cache content .... + */ void CompileCacheHandler::Persist() { DCHECK(!compile_cache_dir_.empty()); - // NOTE(joyeecheung): in most circumstances the code caching reading - // writing logic is lenient enough that it's fine even if someone - // overwrites the cache (that leads to either size or hash mismatch - // in subsequent loads and the overwritten cache will be ignored). - // Also in most use cases users should not change the files on disk - // too rapidly. Therefore locking is not currently implemented to - // avoid the cost. + // TODO(joyeecheung): do this using a separate event loop to utilize the + // libuv thread pool and do the file system operations concurrently. + // TODO(joyeecheung): Currently flushing is triggered by either process + // shutdown or user requests. In the future we should simply start the + // writes right after module loading on a separate thread, and this method + // only blocks until all the pending writes (if any) on the other thread are + // finished. In that case, the off-thread writes should finish long + // before any attempt of flushing is made so the method would then only + // incur a negligible overhead from thread synchronization. for (auto& pair : compiler_cache_store_) { auto* entry = pair.second.get(); if (entry->cache == nullptr) { @@ -312,6 +326,11 @@ void CompileCacheHandler::Persist() { entry->source_filename); continue; } + if (entry->persisted == true) { + Debug("[compile cache] skip %s because cache was already persisted\n", + entry->source_filename); + continue; + } DCHECK_EQ(entry->cache->buffer_policy, v8::ScriptCompiler::CachedData::BufferOwned); @@ -328,27 +347,94 @@ void CompileCacheHandler::Persist() { headers[kCodeHashOffset] = entry->code_hash; headers[kCacheHashOffset] = cache_hash; - Debug("[compile cache] writing cache for %s in %s [%d %d %d %d %d]...", + // Generate the temporary filename. + // The temporary file should be placed in a location like: + // + // $NODE_COMPILE_CACHE_DIR/v23.0.0-pre-arm64-5fad6d45-501/e7f8ef7f.cache.tcqrsK + // + // 1. $NODE_COMPILE_CACHE_DIR either comes from the $NODE_COMPILE_CACHE + // environment + // variable or `module.enableCompileCache()`. + // 2. v23.0.0-pre-arm64-5fad6d45-501 is the sub cache directory and + // e7f8ef7f is the hash for the cache (see + // CompileCacheHandler::Enable()), + // 3. tcqrsK is generated by uv_fs_mkstemp() as a temporary identifier. + uv_fs_t mkstemp_req; + auto cleanup_mkstemp = + OnScopeLeave([&mkstemp_req]() { uv_fs_req_cleanup(&mkstemp_req); }); + std::string cache_filename_tmp = entry->cache_filename + ".XXXXXX"; + Debug("[compile cache] Creating temporary file for cache of %s...", + entry->source_filename); + int err = uv_fs_mkstemp( + nullptr, &mkstemp_req, cache_filename_tmp.c_str(), nullptr); + if (err < 0) { + Debug("failed. %s\n", uv_strerror(err)); + continue; + } + Debug(" -> %s\n", mkstemp_req.path); + Debug("[compile cache] writing cache for %s to temporary file %s [%d %d %d " + "%d %d]...", entry->source_filename, - entry->cache_filename, + mkstemp_req.path, headers[kMagicNumberOffset], headers[kCodeSizeOffset], headers[kCacheSizeOffset], headers[kCodeHashOffset], headers[kCacheHashOffset]); + // Write to the temporary file. uv_buf_t headers_buf = uv_buf_init(reinterpret_cast(headers.data()), headers.size() * sizeof(uint32_t)); uv_buf_t data_buf = uv_buf_init(cache_ptr, entry->cache->length); uv_buf_t bufs[] = {headers_buf, data_buf}; - int err = WriteFileSync(entry->cache_filename.c_str(), bufs, 2); + uv_fs_t write_req; + auto cleanup_write = + OnScopeLeave([&write_req]() { uv_fs_req_cleanup(&write_req); }); + err = uv_fs_write( + nullptr, &write_req, mkstemp_req.result, bufs, 2, 0, nullptr); if (err < 0) { Debug("failed: %s\n", uv_strerror(err)); - } else { - Debug("success\n"); + continue; } + + uv_fs_t close_req; + auto cleanup_close = + OnScopeLeave([&close_req]() { uv_fs_req_cleanup(&close_req); }); + err = uv_fs_close(nullptr, &close_req, mkstemp_req.result, nullptr); + + if (err < 0) { + Debug("failed: %s\n", uv_strerror(err)); + continue; + } + + Debug("success\n"); + + // Rename the temporary file to the actual cache file. + uv_fs_t rename_req; + auto cleanup_rename = + OnScopeLeave([&rename_req]() { uv_fs_req_cleanup(&rename_req); }); + std::string cache_filename_final = entry->cache_filename; + Debug("[compile cache] Renaming %s to %s...", + mkstemp_req.path, + cache_filename_final); + err = uv_fs_rename(nullptr, + &rename_req, + mkstemp_req.path, + cache_filename_final.c_str(), + nullptr); + if (err < 0) { + Debug("failed: %s\n", uv_strerror(err)); + continue; + } + Debug("success\n"); + entry->persisted = true; } + + // Clear the map at the end in one go instead of during the iteration to + // avoid rehashing costs. + Debug("[compile cache] Clear deserialized cache.\n"); + compiler_cache_store_.clear(); } CompileCacheHandler::CompileCacheHandler(Environment* env) @@ -358,49 +444,46 @@ CompileCacheHandler::CompileCacheHandler(Environment* env) // Directory structure: // - Compile cache directory (from NODE_COMPILE_CACHE) -// - $NODE_VERION-$ARCH-$CACHE_DATA_VERSION_TAG-$UID +// - $NODE_VERSION-$ARCH-$CACHE_DATA_VERSION_TAG-$UID // - $FILENAME_AND_MODULE_TYPE_HASH.cache: a hash of filename + module type CompileCacheEnableResult CompileCacheHandler::Enable(Environment* env, const std::string& dir) { std::string cache_tag = GetCacheVersionTag(); std::string absolute_cache_dir_base = PathResolve(env, {dir}); - std::filesystem::path cache_dir_with_tag = - std::filesystem::path(absolute_cache_dir_base) / cache_tag; - std::u8string cache_dir_with_tag_u8 = cache_dir_with_tag.u8string(); - std::string cache_dir_with_tag_str(cache_dir_with_tag_u8.begin(), - cache_dir_with_tag_u8.end()); + std::string cache_dir_with_tag = + absolute_cache_dir_base + kPathSeparator + cache_tag; CompileCacheEnableResult result; Debug("[compile cache] resolved path %s + %s -> %s\n", dir, cache_tag, - cache_dir_with_tag_str); + cache_dir_with_tag); - if (UNLIKELY(!env->permission()->is_granted( + if (!env->permission()->is_granted( env, permission::PermissionScope::kFileSystemWrite, - cache_dir_with_tag_str))) { + cache_dir_with_tag)) [[unlikely]] { result.message = "Skipping compile cache because write permission for " + - cache_dir_with_tag_str + " is not granted"; + cache_dir_with_tag + " is not granted"; result.status = CompileCacheEnableStatus::FAILED; return result; } - if (UNLIKELY(!env->permission()->is_granted( + if (!env->permission()->is_granted( env, permission::PermissionScope::kFileSystemRead, - cache_dir_with_tag_str))) { + cache_dir_with_tag)) [[unlikely]] { result.message = "Skipping compile cache because read permission for " + - cache_dir_with_tag_str + " is not granted"; + cache_dir_with_tag + " is not granted"; result.status = CompileCacheEnableStatus::FAILED; return result; } fs::FSReqWrapSync req_wrap; int err = fs::MKDirpSync( - nullptr, &(req_wrap.req), cache_dir_with_tag_str, 0777, nullptr); + nullptr, &(req_wrap.req), cache_dir_with_tag, 0777, nullptr); if (is_debug_) { Debug("[compile cache] creating cache directory %s...%s\n", - cache_dir_with_tag_str, + cache_dir_with_tag, err < 0 ? uv_strerror(err) : "success"); } if (err != 0 && err != UV_EEXIST) { @@ -410,7 +493,6 @@ CompileCacheEnableResult CompileCacheHandler::Enable(Environment* env, return result; } - compile_cache_dir_str_ = absolute_cache_dir_base; result.cache_directory = absolute_cache_dir_base; compile_cache_dir_ = cache_dir_with_tag; result.status = CompileCacheEnableStatus::ENABLED; diff --git a/src/compile_cache.h b/src/compile_cache.h index 975a3f2080f7a7..a7bb58c4a0be95 100644 --- a/src/compile_cache.h +++ b/src/compile_cache.h @@ -4,7 +4,6 @@ #if defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS #include -#include #include #include #include @@ -30,6 +29,8 @@ struct CompileCacheEntry { std::string source_filename; CachedCodeType type; bool refreshed = false; + bool persisted = false; + // Copy the cache into a new store for V8 to consume. Caller takes // ownership. v8::ScriptCompiler::CachedData* CopyCache() const; @@ -69,7 +70,7 @@ class CompileCacheHandler { void MaybeSave(CompileCacheEntry* entry, v8::Local mod, bool rejected); - std::string_view cache_dir() { return compile_cache_dir_str_; } + std::string_view cache_dir() { return compile_cache_dir_; } private: void ReadCacheFile(CompileCacheEntry* entry); @@ -92,8 +93,7 @@ class CompileCacheHandler { v8::Isolate* isolate_ = nullptr; bool is_debug_ = false; - std::string compile_cache_dir_str_; - std::filesystem::path compile_cache_dir_; + std::string compile_cache_dir_; std::unordered_map> compiler_cache_store_; }; diff --git a/src/crypto/crypto_aes.cc b/src/crypto/crypto_aes.cc index 542d23f2a1f146..d430648aebc540 100644 --- a/src/crypto/crypto_aes.cc +++ b/src/crypto/crypto_aes.cc @@ -339,7 +339,7 @@ bool ValidateIV( Local value, AESCipherConfig* params) { ArrayBufferOrViewContents iv(value); - if (UNLIKELY(!iv.CheckSizeInt32())) { + if (!iv.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "iv is too big"); return false; } @@ -377,7 +377,7 @@ bool ValidateAuthTag( return false; } ArrayBufferOrViewContents tag_contents(value); - if (UNLIKELY(!tag_contents.CheckSizeInt32())) { + if (!tag_contents.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "tagLength is too big"); return false; } @@ -412,7 +412,7 @@ bool ValidateAdditionalData( // Additional Data if (IsAnyBufferSource(value)) { ArrayBufferOrViewContents additional(value); - if (UNLIKELY(!additional.CheckSizeInt32())) { + if (!additional.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "additionalData is too big"); return false; } diff --git a/src/crypto/crypto_cipher.cc b/src/crypto/crypto_cipher.cc index fe35a8e0f6bbb7..a81ab8e95f2fc2 100644 --- a/src/crypto/crypto_cipher.cc +++ b/src/crypto/crypto_cipher.cc @@ -522,15 +522,16 @@ void CipherBase::InitIv(const FunctionCallbackInfo& args) { // raw bytes and proceed... const ByteSource key_buf = ByteSource::FromSecretKeyBytes(env, args[1]); - if (UNLIKELY(key_buf.size() > INT_MAX)) + if (key_buf.size() > INT_MAX) [[unlikely]] { return THROW_ERR_OUT_OF_RANGE(env, "key is too big"); + } ArrayBufferOrViewContents iv_buf( !args[2]->IsNull() ? args[2] : Local()); - if (UNLIKELY(!iv_buf.CheckSizeInt32())) + if (!iv_buf.CheckSizeInt32()) [[unlikely]] { return THROW_ERR_OUT_OF_RANGE(env, "iv is too big"); - + } // Don't assign to cipher->auth_tag_len_ directly; the value might not // represent a valid length at this point. unsigned int auth_tag_len; @@ -672,9 +673,9 @@ void CipherBase::SetAuthTag(const FunctionCallbackInfo& args) { } ArrayBufferOrViewContents auth_tag(args[0]); - if (UNLIKELY(!auth_tag.CheckSizeInt32())) + if (!auth_tag.CheckSizeInt32()) [[unlikely]] { return THROW_ERR_OUT_OF_RANGE(env, "buffer is too big"); - + } unsigned int tag_len = auth_tag.size(); const int mode = EVP_CIPHER_CTX_mode(cipher->ctx_.get()); @@ -782,8 +783,9 @@ void CipherBase::SetAAD(const FunctionCallbackInfo& args) { int plaintext_len = args[1].As()->Value(); ArrayBufferOrViewContents buf(args[0]); - if (UNLIKELY(!buf.CheckSizeInt32())) + if (!buf.CheckSizeInt32()) [[unlikely]] { return THROW_ERR_OUT_OF_RANGE(env, "buffer is too big"); + } args.GetReturnValue().Set(cipher->SetAAD(buf, plaintext_len)); } @@ -859,9 +861,9 @@ void CipherBase::Update(const FunctionCallbackInfo& args) { std::unique_ptr out; Environment* env = Environment::GetCurrent(args); - if (UNLIKELY(size > INT_MAX)) + if (size > INT_MAX) [[unlikely]] { return THROW_ERR_OUT_OF_RANGE(env, "data is too long"); - + } UpdateResult r = cipher->Update(data, size, &out); if (r != kSuccess) { @@ -996,7 +998,7 @@ bool PublicKeyCipher::Cipher( const ArrayBufferOrViewContents& oaep_label, const ArrayBufferOrViewContents& data, std::unique_ptr* out) { - EVPKeyCtxPointer ctx(EVP_PKEY_CTX_new(pkey.get(), nullptr)); + EVPKeyCtxPointer ctx = pkey.newCtx(); if (!ctx) return false; if (EVP_PKEY_cipher_init(ctx.get()) <= 0) @@ -1064,15 +1066,15 @@ void PublicKeyCipher::Cipher(const FunctionCallbackInfo& args) { return; ArrayBufferOrViewContents buf(args[offset]); - if (UNLIKELY(!buf.CheckSizeInt32())) + if (!buf.CheckSizeInt32()) [[unlikely]] { return THROW_ERR_OUT_OF_RANGE(env, "buffer is too long"); - + } uint32_t padding; if (!args[offset + 1]->Uint32Value(env->context()).To(&padding)) return; if (EVP_PKEY_cipher == EVP_PKEY_decrypt && operation == PublicKeyCipher::kPrivate && padding == RSA_PKCS1_PADDING) { - EVPKeyCtxPointer ctx(EVP_PKEY_CTX_new(pkey.get(), nullptr)); + EVPKeyCtxPointer ctx = pkey.newCtx(); CHECK(ctx); if (EVP_PKEY_decrypt_init(ctx.get()) <= 0) { @@ -1106,9 +1108,9 @@ void PublicKeyCipher::Cipher(const FunctionCallbackInfo& args) { ArrayBufferOrViewContents oaep_label( !args[offset + 3]->IsUndefined() ? args[offset + 3] : Local()); - if (UNLIKELY(!oaep_label.CheckSizeInt32())) + if (!oaep_label.CheckSizeInt32()) [[unlikely]] { return THROW_ERR_OUT_OF_RANGE(env, "oaepLabel is too big"); - + } std::unique_ptr out; if (!Cipher( env, pkey, padding, digest, oaep_label, buf, &out)) { diff --git a/src/crypto/crypto_common.cc b/src/crypto/crypto_common.cc index 6a967702b22df0..43a126f863779d 100644 --- a/src/crypto/crypto_common.cc +++ b/src/crypto/crypto_common.cc @@ -449,15 +449,14 @@ MaybeLocal GetEphemeralKey(Environment* env, const SSLPointer& ssl) { Local context = env->context(); crypto::EVPKeyPointer key(raw_key); - int kid = EVP_PKEY_id(key.get()); - int bits = EVP_PKEY_bits(key.get()); + int kid = key.id(); switch (kid) { case EVP_PKEY_DH: if (!Set(context, info, env->type_string(), env->dh_string()) || !Set(context, - info, - env->size_string(), - Integer::New(env->isolate(), bits))) { + info, + env->size_string(), + Integer::New(env->isolate(), key.bits()))) { return MaybeLocal(); } break; @@ -473,18 +472,16 @@ MaybeLocal GetEphemeralKey(Environment* env, const SSLPointer& ssl) { } else { curve_name = OBJ_nid2sn(kid); } - if (!Set(context, - info, - env->type_string(), - env->ecdh_string()) || + if (!Set( + context, info, env->type_string(), env->ecdh_string()) || !Set(context, - info, - env->name_string(), - OneByteString(env->isolate(), curve_name)) || + info, + env->name_string(), + OneByteString(env->isolate(), curve_name)) || !Set(context, - info, - env->size_string(), - Integer::New(env->isolate(), bits))) { + info, + env->size_string(), + Integer::New(env->isolate(), key.bits()))) { return MaybeLocal(); } } diff --git a/src/crypto/crypto_context.cc b/src/crypto/crypto_context.cc index c924a54639e8c2..aa5fc61f19e435 100644 --- a/src/crypto/crypto_context.cc +++ b/src/crypto/crypto_context.cc @@ -685,7 +685,7 @@ void SecureContext::SetEngineKey(const FunctionCallbackInfo& args) { CHECK_EQ(args.Length(), 2); - if (UNLIKELY(env->permission()->enabled())) { + if (env->permission()->enabled()) [[unlikely]] { return THROW_ERR_CRYPTO_CUSTOM_ENGINE_NOT_SUPPORTED( env, "Programmatic selection of OpenSSL engines is unsupported while the " @@ -1198,7 +1198,7 @@ void SecureContext::SetClientCertEngine( // support multiple calls to SetClientCertEngine. CHECK(!sc->client_cert_engine_provided_); - if (UNLIKELY(env->permission()->enabled())) { + if (env->permission()->enabled()) [[unlikely]] { return THROW_ERR_CRYPTO_CUSTOM_ENGINE_NOT_SUPPORTED( env, "Programmatic selection of OpenSSL engines is unsupported while the " diff --git a/src/crypto/crypto_dh.cc b/src/crypto/crypto_dh.cc index e5664dfa2bc7e1..d760a0d3ea1d12 100644 --- a/src/crypto/crypto_dh.cc +++ b/src/crypto/crypto_dh.cc @@ -120,7 +120,7 @@ void New(const FunctionCallbackInfo& args) { // or an ArrayBuffer or ArrayBufferView with the generator. ArrayBufferOrViewContents arg0(args[0]); - if (UNLIKELY(!arg0.CheckSizeInt32())) + if (!arg0.CheckSizeInt32()) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "prime is too big"); BignumPointer bn_p(reinterpret_cast(arg0.data()), arg0.size()); @@ -142,7 +142,7 @@ void New(const FunctionCallbackInfo& args) { } } else { ArrayBufferOrViewContents arg1(args[1]); - if (UNLIKELY(!arg1.CheckSizeInt32())) + if (!arg1.CheckSizeInt32()) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "generator is too big"); bn_g = BignumPointer(reinterpret_cast(arg1.data()), arg1.size()); if (!bn_g) { @@ -253,7 +253,7 @@ void ComputeSecret(const FunctionCallbackInfo& args) { CHECK_EQ(args.Length(), 1); ArrayBufferOrViewContents key_buf(args[0]); - if (UNLIKELY(!key_buf.CheckSizeInt32())) + if (!key_buf.CheckSizeInt32()) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "secret is too big"); BignumPointer key(key_buf.data(), key_buf.size()); @@ -286,7 +286,7 @@ void SetPublicKey(const FunctionCallbackInfo& args) { DHPointer& dh = *diffieHellman; CHECK_EQ(args.Length(), 1); ArrayBufferOrViewContents buf(args[0]); - if (UNLIKELY(!buf.CheckSizeInt32())) + if (!buf.CheckSizeInt32()) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "buf is too big"); BignumPointer num(buf.data(), buf.size()); CHECK(num); @@ -300,7 +300,7 @@ void SetPrivateKey(const FunctionCallbackInfo& args) { DHPointer& dh = *diffieHellman; CHECK_EQ(args.Length(), 1); ArrayBufferOrViewContents buf(args[0]); - if (UNLIKELY(!buf.CheckSizeInt32())) + if (!buf.CheckSizeInt32()) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "buf is too big"); BignumPointer num(buf.data(), buf.size()); CHECK(num); @@ -368,7 +368,7 @@ Maybe DhKeyGenTraits::AdditionalConfig( params->params.prime = size; } else { ArrayBufferOrViewContents input(args[*offset]); - if (UNLIKELY(!input.CheckSizeInt32())) { + if (!input.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "prime is too big"); return Nothing(); } @@ -395,7 +395,7 @@ EVPKeyCtxPointer DhKeyGenTraits::Setup(DhKeyPairGenConfig* params) { auto dh = DHPointer::New(std::move(prime), std::move(bn_g)); if (!dh) return {}; - key_params = EVPKeyPointer(EVP_PKEY_new()); + key_params = EVPKeyPointer::New(); CHECK(key_params); CHECK_EQ(EVP_PKEY_assign_DH(key_params.get(), dh.release()), 1); } else if (int* prime_size = std::get_if(¶ms->params.prime)) { @@ -418,7 +418,7 @@ EVPKeyCtxPointer DhKeyGenTraits::Setup(DhKeyPairGenConfig* params) { UNREACHABLE(); } - EVPKeyCtxPointer ctx(EVP_PKEY_CTX_new(key_params.get(), nullptr)); + EVPKeyCtxPointer ctx = key_params.newCtx(); if (!ctx || EVP_PKEY_keygen_init(ctx.get()) <= 0) return {}; return ctx; @@ -533,7 +533,7 @@ bool DHBitsTraits::DeriveBits( Maybe GetDhKeyDetail(Environment* env, const KeyObjectData& key, Local target) { - CHECK_EQ(EVP_PKEY_id(key.GetAsymmetricKey().get()), EVP_PKEY_DH); + CHECK_EQ(key.GetAsymmetricKey().id(), EVP_PKEY_DH); return JustVoid(); } diff --git a/src/crypto/crypto_dsa.cc b/src/crypto/crypto_dsa.cc index 5d081863cf2dcd..b557de774117e4 100644 --- a/src/crypto/crypto_dsa.cc +++ b/src/crypto/crypto_dsa.cc @@ -60,7 +60,7 @@ EVPKeyCtxPointer DsaKeyGenTraits::Setup(DsaKeyPairGenConfig* params) { return EVPKeyCtxPointer(); EVPKeyPointer key_params(raw_params); - EVPKeyCtxPointer key_ctx(EVP_PKEY_CTX_new(key_params.get(), nullptr)); + EVPKeyCtxPointer key_ctx = key_params.newCtx(); if (!key_ctx || EVP_PKEY_keygen_init(key_ctx.get()) <= 0) return EVPKeyCtxPointer(); @@ -134,7 +134,7 @@ Maybe GetDsaKeyDetail(Environment* env, Mutex::ScopedLock lock(key.mutex()); const auto& m_pkey = key.GetAsymmetricKey(); - int type = EVP_PKEY_id(m_pkey.get()); + int type = m_pkey.id(); CHECK(type == EVP_PKEY_DSA); const DSA* dsa = EVP_PKEY_get0_DSA(m_pkey.get()); diff --git a/src/crypto/crypto_ec.cc b/src/crypto/crypto_ec.cc index 0c021eab60d509..3942a52c142226 100644 --- a/src/crypto/crypto_ec.cc +++ b/src/crypto/crypto_ec.cc @@ -174,7 +174,7 @@ ECPointPointer ECDH::BufferToPoint(Environment* env, } ArrayBufferOrViewContents input(buf); - if (UNLIKELY(!input.CheckSizeInt32())) { + if (!input.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "buffer is too big"); return ECPointPointer(); } @@ -293,7 +293,7 @@ void ECDH::SetPrivateKey(const FunctionCallbackInfo& args) { ASSIGN_OR_RETURN_UNWRAP(&ecdh, args.This()); ArrayBufferOrViewContents priv_buffer(args[0]); - if (UNLIKELY(!priv_buffer.CheckSizeInt32())) + if (!priv_buffer.CheckSizeInt32()) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "key is too big"); BignumPointer priv(priv_buffer.data(), priv_buffer.size()); @@ -396,7 +396,7 @@ void ECDH::ConvertKey(const FunctionCallbackInfo& args) { CHECK(IsAnyBufferSource(args[0])); ArrayBufferOrViewContents args0(args[0]); - if (UNLIKELY(!args0.CheckSizeInt32())) + if (!args0.CheckSizeInt32()) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "key is too big"); if (args0.empty()) return args.GetReturnValue().SetEmptyString(); @@ -486,10 +486,7 @@ bool ECDHBitsTraits::DeriveBits(Environment* env, case EVP_PKEY_X25519: // Fall through case EVP_PKEY_X448: { - EVPKeyCtxPointer ctx = nullptr; - { - ctx.reset(EVP_PKEY_CTX_new(m_privkey.get(), nullptr)); - } + EVPKeyCtxPointer ctx = m_privkey.newCtx(); Mutex::ScopedLock pub_lock(params.public_.mutex()); if (EVP_PKEY_derive_init(ctx.get()) <= 0 || EVP_PKEY_derive_set_peer( @@ -568,7 +565,7 @@ EVPKeyCtxPointer EcKeyGenTraits::Setup(EcKeyPairGenConfig* params) { return EVPKeyCtxPointer(); } EVPKeyPointer key_params(raw_params); - key_ctx.reset(EVP_PKEY_CTX_new(key_params.get(), nullptr)); + key_ctx = key_params.newCtx(); } } @@ -626,29 +623,23 @@ WebCryptoKeyExportStatus EC_Raw_Export(const KeyObjectData& key_data, const EC_KEY* ec_key = EVP_PKEY_get0_EC_KEY(m_pkey.get()); - size_t len = 0; - if (ec_key == nullptr) { - typedef int (*export_fn)(const EVP_PKEY*, unsigned char*, size_t* len); - export_fn fn = nullptr; switch (key_data.GetKeyType()) { - case kKeyTypePrivate: - fn = EVP_PKEY_get_raw_private_key; + case kKeyTypePrivate: { + auto data = m_pkey.rawPrivateKey(); + if (!data) return WebCryptoKeyExportStatus::INVALID_KEY_TYPE; + *out = ByteSource::Allocated(data.release()); break; - case kKeyTypePublic: - fn = EVP_PKEY_get_raw_public_key; + } + case kKeyTypePublic: { + auto data = m_pkey.rawPublicKey(); + if (!data) return WebCryptoKeyExportStatus::INVALID_KEY_TYPE; + *out = ByteSource::Allocated(data.release()); break; + } case kKeyTypeSecret: UNREACHABLE(); } - CHECK_NOT_NULL(fn); - // Get the size of the raw key data - if (fn(m_pkey.get(), nullptr, &len) == 0) - return WebCryptoKeyExportStatus::INVALID_KEY_TYPE; - ByteSource::Builder data(len); - if (fn(m_pkey.get(), data.data(), &len) == 0) - return WebCryptoKeyExportStatus::INVALID_KEY_TYPE; - *out = std::move(data).release(len); } else { if (key_data.GetKeyType() != kKeyTypePublic) return WebCryptoKeyExportStatus::INVALID_KEY_TYPE; @@ -657,7 +648,7 @@ WebCryptoKeyExportStatus EC_Raw_Export(const KeyObjectData& key_data, point_conversion_form_t form = POINT_CONVERSION_UNCOMPRESSED; // Get the allocated data size... - len = EC_POINT_point2oct(group, point, form, nullptr, 0, nullptr); + size_t len = EC_POINT_point2oct(group, point, form, nullptr, 0, nullptr); if (len == 0) return WebCryptoKeyExportStatus::FAILED; ByteSource::Builder data(len); @@ -700,7 +691,7 @@ WebCryptoKeyExportStatus ECKeyExportTraits::DoExport( return WebCryptoKeyExportStatus::INVALID_KEY_TYPE; const auto& m_pkey = key_data.GetAsymmetricKey(); - if (EVP_PKEY_id(m_pkey.get()) != EVP_PKEY_EC) { + if (m_pkey.id() != EVP_PKEY_EC) { return PKEY_SPKI_Export(key_data, out); } else { // Ensure exported key is in uncompressed point format. @@ -730,12 +721,10 @@ WebCryptoKeyExportStatus ECKeyExportTraits::DoExport( data.size(), nullptr)); CHECK_EQ(1, EC_KEY_set_public_key(ec.get(), uncompressed.get())); - EVPKeyPointer pkey(EVP_PKEY_new()); + auto pkey = EVPKeyPointer::New(); CHECK_EQ(1, EVP_PKEY_set1_EC_KEY(pkey.get(), ec.get())); - auto bio = BIOPointer::NewMem(); - CHECK(bio); - if (!i2d_PUBKEY_bio(bio.get(), pkey.get())) - return WebCryptoKeyExportStatus::FAILED; + auto bio = pkey.derPublicKey(); + if (!bio) return WebCryptoKeyExportStatus::FAILED; *out = ByteSource::FromBIO(bio); return WebCryptoKeyExportStatus::OK; } @@ -750,7 +739,7 @@ Maybe ExportJWKEcKey(Environment* env, Local target) { Mutex::ScopedLock lock(key.mutex()); const auto& m_pkey = key.GetAsymmetricKey(); - CHECK_EQ(EVP_PKEY_id(m_pkey.get()), EVP_PKEY_EC); + CHECK_EQ(m_pkey.id(), EVP_PKEY_EC); const EC_KEY* ec = EVP_PKEY_get0_EC_KEY(m_pkey.get()); CHECK_NOT_NULL(ec); @@ -835,67 +824,49 @@ Maybe ExportJWKEdKey(Environment* env, Mutex::ScopedLock lock(key.mutex()); const auto& pkey = key.GetAsymmetricKey(); - const char* curve = nullptr; - switch (EVP_PKEY_id(pkey.get())) { - case EVP_PKEY_ED25519: - curve = "Ed25519"; - break; - case EVP_PKEY_ED448: - curve = "Ed448"; - break; - case EVP_PKEY_X25519: - curve = "X25519"; - break; - case EVP_PKEY_X448: - curve = "X448"; - break; - default: - UNREACHABLE(); - } - if (target->Set( - env->context(), - env->jwk_crv_string(), - OneByteString(env->isolate(), curve)).IsNothing()) { - return Nothing(); - } - - size_t len = 0; - Local encoded; - Local error; - - if (!EVP_PKEY_get_raw_public_key(pkey.get(), nullptr, &len)) - return Nothing(); - - ByteSource::Builder out(len); - - if (key.GetKeyType() == kKeyTypePrivate) { - if (!EVP_PKEY_get_raw_private_key( - pkey.get(), out.data(), &len) || - !StringBytes::Encode( - env->isolate(), out.data(), len, BASE64URL, &error) + const char* curve = ([&] { + switch (pkey.id()) { + case EVP_PKEY_ED25519: + return "Ed25519"; + case EVP_PKEY_ED448: + return "Ed448"; + case EVP_PKEY_X25519: + return "X25519"; + case EVP_PKEY_X448: + return "X448"; + default: + UNREACHABLE(); + } + })(); + + static constexpr auto trySetKey = [](Environment* env, + ncrypto::DataPointer data, + Local target, + Local key) { + Local encoded; + Local error; + if (!data) return false; + const ncrypto::Buffer out = data; + if (!StringBytes::Encode( + env->isolate(), out.data, out.len, BASE64URL, &error) .ToLocal(&encoded) || - !target->Set(env->context(), env->jwk_d_string(), encoded).IsJust()) { - if (!error.IsEmpty()) - env->isolate()->ThrowException(error); - return Nothing(); + target->Set(env->context(), key, encoded).IsNothing()) { + if (!error.IsEmpty()) env->isolate()->ThrowException(error); + return false; } - } - - if (!EVP_PKEY_get_raw_public_key( - pkey.get(), out.data(), &len) || - !StringBytes::Encode( - env->isolate(), out.data(), len, BASE64URL, &error) - .ToLocal(&encoded) || - !target->Set(env->context(), env->jwk_x_string(), encoded).IsJust()) { - if (!error.IsEmpty()) - env->isolate()->ThrowException(error); - return Nothing(); - } + return true; + }; - if (target->Set( - env->context(), - env->jwk_kty_string(), - env->jwk_okp_string()).IsNothing()) { + if (target + ->Set(env->context(), + env->jwk_crv_string(), + OneByteString(env->isolate(), curve)) + .IsNothing() || + (key.GetKeyType() == kKeyTypePrivate && + !trySetKey(env, pkey.rawPrivateKey(), target, env->jwk_d_string())) || + !trySetKey(env, pkey.rawPublicKey(), target, env->jwk_x_string()) || + target->Set(env->context(), env->jwk_kty_string(), env->jwk_okp_string()) + .IsNothing()) { return Nothing(); } @@ -959,7 +930,7 @@ KeyObjectData ImportJWKEcKey(Environment* env, } } - EVPKeyPointer pkey(EVP_PKEY_new()); + auto pkey = EVPKeyPointer::New(); CHECK_EQ(EVP_PKEY_set1_EC_KEY(pkey.get(), ec.get()), 1); return KeyObjectData::CreateAsymmetric(type, std::move(pkey)); @@ -970,7 +941,7 @@ Maybe GetEcKeyDetail(Environment* env, Local target) { Mutex::ScopedLock lock(key.mutex()); const auto& m_pkey = key.GetAsymmetricKey(); - CHECK_EQ(EVP_PKEY_id(m_pkey.get()), EVP_PKEY_EC); + CHECK_EQ(m_pkey.id(), EVP_PKEY_EC); const EC_KEY* ec = EVP_PKEY_get0_EC_KEY(m_pkey.get()); CHECK_NOT_NULL(ec); diff --git a/src/crypto/crypto_hash.cc b/src/crypto/crypto_hash.cc index e27daa469c1915..23e5fea262f2a6 100644 --- a/src/crypto/crypto_hash.cc +++ b/src/crypto/crypto_hash.cc @@ -368,7 +368,7 @@ void Hash::HashUpdate(const FunctionCallbackInfo& args) { const char* data, size_t size) { Environment* env = Environment::GetCurrent(args); - if (UNLIKELY(size > INT_MAX)) + if (size > INT_MAX) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "data is too long"); bool r = hash->HashUpdate(data, size); args.GetReturnValue().Set(r); @@ -467,13 +467,13 @@ Maybe HashTraits::AdditionalConfig( CHECK(args[offset]->IsString()); // Hash algorithm Utf8Value digest(env->isolate(), args[offset]); params->digest = EVP_get_digestbyname(*digest); - if (UNLIKELY(params->digest == nullptr)) { + if (params->digest == nullptr) [[unlikely]] { THROW_ERR_CRYPTO_INVALID_DIGEST(env, "Invalid digest: %s", *digest); return Nothing(); } ArrayBufferOrViewContents data(args[offset + 1]); - if (UNLIKELY(!data.CheckSizeInt32())) { + if (!data.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "data is too big"); return Nothing(); } @@ -483,7 +483,7 @@ Maybe HashTraits::AdditionalConfig( unsigned int expected = EVP_MD_size(params->digest); params->length = expected; - if (UNLIKELY(args[offset + 2]->IsUint32())) { + if (args[offset + 2]->IsUint32()) [[unlikely]] { // length is expressed in terms of bits params->length = static_cast(args[offset + 2] @@ -505,14 +505,13 @@ bool HashTraits::DeriveBits( ByteSource* out) { EVPMDCtxPointer ctx(EVP_MD_CTX_new()); - if (UNLIKELY(!ctx || - EVP_DigestInit_ex(ctx.get(), params.digest, nullptr) <= 0 || - EVP_DigestUpdate( - ctx.get(), params.in.data(), params.in.size()) <= 0)) { + if (!ctx || EVP_DigestInit_ex(ctx.get(), params.digest, nullptr) <= 0 || + EVP_DigestUpdate(ctx.get(), params.in.data(), params.in.size()) <= + 0) [[unlikely]] { return false; } - if (LIKELY(params.length > 0)) { + if (params.length > 0) [[likely]] { unsigned int length = params.length; ByteSource::Builder buf(length); @@ -523,7 +522,7 @@ bool HashTraits::DeriveBits( ? EVP_DigestFinal_ex(ctx.get(), buf.data(), &length) : EVP_DigestFinalXOF(ctx.get(), buf.data(), length); - if (UNLIKELY(ret != 1)) + if (ret != 1) [[unlikely]] return false; *out = std::move(buf).release(); diff --git a/src/crypto/crypto_hkdf.cc b/src/crypto/crypto_hkdf.cc index 8a1782ef382b0d..2a465c849def44 100644 --- a/src/crypto/crypto_hkdf.cc +++ b/src/crypto/crypto_hkdf.cc @@ -67,11 +67,11 @@ Maybe HKDFTraits::AdditionalConfig( ArrayBufferOrViewContents salt(args[offset + 2]); ArrayBufferOrViewContents info(args[offset + 3]); - if (UNLIKELY(!salt.CheckSizeInt32())) { + if (!salt.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "salt is too big"); return Nothing(); } - if (UNLIKELY(!info.CheckSizeInt32())) { + if (!info.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "info is too big"); return Nothing(); } diff --git a/src/crypto/crypto_hmac.cc b/src/crypto/crypto_hmac.cc index 30cb1205870908..ecc4f3fd8ae7df 100644 --- a/src/crypto/crypto_hmac.cc +++ b/src/crypto/crypto_hmac.cc @@ -103,7 +103,7 @@ void Hmac::HmacUpdate(const FunctionCallbackInfo& args) { Decode(args, [](Hmac* hmac, const FunctionCallbackInfo& args, const char* data, size_t size) { Environment* env = Environment::GetCurrent(args); - if (UNLIKELY(size > INT_MAX)) + if (size > INT_MAX) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "data is too long"); bool r = hmac->HmacUpdate(data, size); args.GetReturnValue().Set(r); @@ -198,7 +198,7 @@ Maybe HmacTraits::AdditionalConfig( params->key = key->Data().addRef(); ArrayBufferOrViewContents data(args[offset + 3]); - if (UNLIKELY(!data.CheckSizeInt32())) { + if (!data.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "data is too big"); return Nothing(); } @@ -208,7 +208,7 @@ Maybe HmacTraits::AdditionalConfig( if (!args[offset + 4]->IsUndefined()) { ArrayBufferOrViewContents signature(args[offset + 4]); - if (UNLIKELY(!signature.CheckSizeInt32())) { + if (!signature.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "signature is too big"); return Nothing(); } diff --git a/src/crypto/crypto_keygen.h b/src/crypto/crypto_keygen.h index 120769df3651e6..2707cb8b41dc53 100644 --- a/src/crypto/crypto_keygen.h +++ b/src/crypto/crypto_keygen.h @@ -178,7 +178,8 @@ struct KeyPairGenTraits final { auto data = KeyObjectData::CreateAsymmetric(KeyType::kKeyTypePrivate, EVPKeyPointer(pkey)); - if (UNLIKELY(!data)) return KeyGenJobStatus::FAILED; + if (!data) [[unlikely]] + return KeyGenJobStatus::FAILED; params->key = std::move(data); return KeyGenJobStatus::OK; } diff --git a/src/crypto/crypto_keys.cc b/src/crypto/crypto_keys.cc index 8488fc57faaf72..6af8d089ca6bcd 100644 --- a/src/crypto/crypto_keys.cc +++ b/src/crypto/crypto_keys.cc @@ -25,7 +25,6 @@ using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::Int32; using v8::Isolate; -using v8::Just; using v8::JustVoid; using v8::Local; using v8::Maybe; @@ -60,8 +59,8 @@ void GetKeyFormatAndTypeFromJs( args[*offset].As()->Value()); if (args[*offset + 1]->IsInt32()) { - config->type_ = Just(static_cast( - args[*offset + 1].As()->Value())); + config->type_ = + static_cast(args[*offset + 1].As()->Value()); } else { CHECK( (context == kKeyContextInput && @@ -69,207 +68,13 @@ void GetKeyFormatAndTypeFromJs( (context == kKeyContextGenerate && config->format_ == kKeyFormatJWK)); CHECK(args[*offset + 1]->IsNullOrUndefined()); - config->type_ = Nothing(); + config->type_ = std::nullopt; } } *offset += 2; } -template -ParseKeyResult TryParsePublicKey(EVPKeyPointer* pkey, - const BIOPointer& bp, - const char* name, - F&& parse) { - unsigned char* der_data; - long der_len; // NOLINT(runtime/int) - - // This skips surrounding data and decodes PEM to DER. - { - MarkPopErrorOnReturn mark_pop_error_on_return; - if (PEM_bytes_read_bio(&der_data, &der_len, nullptr, name, - bp.get(), nullptr, nullptr) != 1) - return ParseKeyResult::kParseKeyNotRecognized; - } - - // OpenSSL might modify the pointer, so we need to make a copy before parsing. - const unsigned char* p = der_data; - pkey->reset(parse(&p, der_len)); - OPENSSL_clear_free(der_data, der_len); - - return *pkey ? ParseKeyResult::kParseKeyOk : - ParseKeyResult::kParseKeyFailed; -} - -ParseKeyResult ParsePublicKeyPEM(EVPKeyPointer* pkey, - const char* key_pem, - int key_pem_len) { - auto bp = BIOPointer::New(key_pem, key_pem_len); - if (!bp) - return ParseKeyResult::kParseKeyFailed; - - ParseKeyResult ret; - - // Try parsing as a SubjectPublicKeyInfo first. - ret = TryParsePublicKey(pkey, bp, "PUBLIC KEY", - [](const unsigned char** p, long l) { // NOLINT(runtime/int) - return d2i_PUBKEY(nullptr, p, l); - }); - if (ret != ParseKeyResult::kParseKeyNotRecognized) - return ret; - - // Maybe it is PKCS#1. - CHECK(bp.resetBio()); - ret = TryParsePublicKey(pkey, bp, "RSA PUBLIC KEY", - [](const unsigned char** p, long l) { // NOLINT(runtime/int) - return d2i_PublicKey(EVP_PKEY_RSA, nullptr, p, l); - }); - if (ret != ParseKeyResult::kParseKeyNotRecognized) - return ret; - - // X.509 fallback. - CHECK(bp.resetBio()); - return TryParsePublicKey(pkey, bp, "CERTIFICATE", - [](const unsigned char** p, long l) { // NOLINT(runtime/int) - X509Pointer x509(d2i_X509(nullptr, p, l)); - return x509 ? X509_get_pubkey(x509.get()) : nullptr; - }); -} - -ParseKeyResult ParsePublicKey(EVPKeyPointer* pkey, - const PublicKeyEncodingConfig& config, - const char* key, - size_t key_len) { - if (config.format_ == kKeyFormatPEM) { - return ParsePublicKeyPEM(pkey, key, key_len); - } else { - CHECK_EQ(config.format_, kKeyFormatDER); - - const unsigned char* p = reinterpret_cast(key); - if (config.type_.ToChecked() == kKeyEncodingPKCS1) { - pkey->reset(d2i_PublicKey(EVP_PKEY_RSA, nullptr, &p, key_len)); - } else { - CHECK_EQ(config.type_.ToChecked(), kKeyEncodingSPKI); - pkey->reset(d2i_PUBKEY(nullptr, &p, key_len)); - } - - return *pkey ? ParseKeyResult::kParseKeyOk : - ParseKeyResult::kParseKeyFailed; - } -} - -bool IsASN1Sequence(const unsigned char* data, size_t size, - size_t* data_offset, size_t* data_size) { - if (size < 2 || data[0] != 0x30) - return false; - - if (data[1] & 0x80) { - // Long form. - size_t n_bytes = data[1] & ~0x80; - if (n_bytes + 2 > size || n_bytes > sizeof(size_t)) - return false; - size_t length = 0; - for (size_t i = 0; i < n_bytes; i++) - length = (length << 8) | data[i + 2]; - *data_offset = 2 + n_bytes; - *data_size = std::min(size - 2 - n_bytes, length); - } else { - // Short form. - *data_offset = 2; - *data_size = std::min(size - 2, data[1]); - } - - return true; -} - -bool IsRSAPrivateKey(const unsigned char* data, size_t size) { - // Both RSAPrivateKey and RSAPublicKey structures start with a SEQUENCE. - size_t offset, len; - if (!IsASN1Sequence(data, size, &offset, &len)) - return false; - - // An RSAPrivateKey sequence always starts with a single-byte integer whose - // value is either 0 or 1, whereas an RSAPublicKey starts with the modulus - // (which is the product of two primes and therefore at least 4), so we can - // decide the type of the structure based on the first three bytes of the - // sequence. - return len >= 3 && - data[offset] == 2 && - data[offset + 1] == 1 && - !(data[offset + 2] & 0xfe); -} - -bool IsEncryptedPrivateKeyInfo(const unsigned char* data, size_t size) { - // Both PrivateKeyInfo and EncryptedPrivateKeyInfo start with a SEQUENCE. - size_t offset, len; - if (!IsASN1Sequence(data, size, &offset, &len)) - return false; - - // A PrivateKeyInfo sequence always starts with an integer whereas an - // EncryptedPrivateKeyInfo starts with an AlgorithmIdentifier. - return len >= 1 && - data[offset] != 2; -} - -ParseKeyResult ParsePrivateKey(EVPKeyPointer* pkey, - const PrivateKeyEncodingConfig& config, - const char* key, - size_t key_len) { - const ByteSource* passphrase = config.passphrase_.get(); - - if (config.format_ == kKeyFormatPEM) { - auto bio = BIOPointer::New(key, key_len); - if (!bio) - return ParseKeyResult::kParseKeyFailed; - - pkey->reset(PEM_read_bio_PrivateKey(bio.get(), - nullptr, - PasswordCallback, - &passphrase)); - } else { - CHECK_EQ(config.format_, kKeyFormatDER); - - if (config.type_.ToChecked() == kKeyEncodingPKCS1) { - const unsigned char* p = reinterpret_cast(key); - pkey->reset(d2i_PrivateKey(EVP_PKEY_RSA, nullptr, &p, key_len)); - } else if (config.type_.ToChecked() == kKeyEncodingPKCS8) { - auto bio = BIOPointer::New(key, key_len); - if (!bio) - return ParseKeyResult::kParseKeyFailed; - - if (IsEncryptedPrivateKeyInfo( - reinterpret_cast(key), key_len)) { - pkey->reset(d2i_PKCS8PrivateKey_bio(bio.get(), - nullptr, - PasswordCallback, - &passphrase)); - } else { - PKCS8Pointer p8inf(d2i_PKCS8_PRIV_KEY_INFO_bio(bio.get(), nullptr)); - if (p8inf) - pkey->reset(EVP_PKCS82PKEY(p8inf.get())); - } - } else { - CHECK_EQ(config.type_.ToChecked(), kKeyEncodingSEC1); - const unsigned char* p = reinterpret_cast(key); - pkey->reset(d2i_PrivateKey(EVP_PKEY_EC, nullptr, &p, key_len)); - } - } - - // OpenSSL can fail to parse the key but still return a non-null pointer. - unsigned long err = ERR_peek_error(); // NOLINT(runtime/int) - if (err != 0) - pkey->reset(); - - if (*pkey) - return ParseKeyResult::kParseKeyOk; - if (ERR_GET_LIB(err) == ERR_LIB_PEM && - ERR_GET_REASON(err) == PEM_R_BAD_PASSWORD_READ) { - if (config.passphrase_.IsEmpty()) - return ParseKeyResult::kParseKeyNeedPassphrase; - } - return ParseKeyResult::kParseKeyFailed; -} - MaybeLocal BIOToStringOrBuffer(Environment* env, const BIOPointer& bio, PKFormatType format) { @@ -318,10 +123,10 @@ MaybeLocal WritePrivateKey(Environment* env, MarkPopErrorOnReturn mark_pop_error_on_return; bool err; - PKEncodingType encoding_type = config.type_.ToChecked(); + PKEncodingType encoding_type = config.type_.value(); if (encoding_type == kKeyEncodingPKCS1) { // PKCS#1 is only permitted for RSA keys. - CHECK_EQ(EVP_PKEY_id(pkey), EVP_PKEY_RSA); + CHECK_EQ(EVPKeyPointer::id(pkey), EVP_PKEY_RSA); OSSL3_CONST RSA* rsa = EVP_PKEY_get0_RSA(pkey); if (config.format_ == kKeyFormatPEM) { @@ -362,7 +167,7 @@ MaybeLocal WritePrivateKey(Environment* env, CHECK_EQ(encoding_type, kKeyEncodingSEC1); // SEC1 is only permitted for EC keys. - CHECK_EQ(EVP_PKEY_id(pkey), EVP_PKEY_EC); + CHECK_EQ(EVPKeyPointer::id(pkey), EVP_PKEY_EC); OSSL3_CONST EC_KEY* ec_key = EVP_PKEY_get0_EC_KEY(pkey); if (config.format_ == kKeyFormatPEM) { @@ -392,9 +197,9 @@ MaybeLocal WritePrivateKey(Environment* env, bool WritePublicKeyInner(OSSL3_CONST EVP_PKEY* pkey, const BIOPointer& bio, const PublicKeyEncodingConfig& config) { - if (config.type_.ToChecked() == kKeyEncodingPKCS1) { + if (config.type_.value() == kKeyEncodingPKCS1) { // PKCS#1 is only valid for RSA keys. - CHECK_EQ(EVP_PKEY_id(pkey), EVP_PKEY_RSA); + CHECK_EQ(EVPKeyPointer::id(pkey), EVP_PKEY_RSA); OSSL3_CONST RSA* rsa = EVP_PKEY_get0_RSA(pkey); if (config.format_ == kKeyFormatPEM) { // Encode PKCS#1 as PEM. @@ -405,7 +210,7 @@ bool WritePublicKeyInner(OSSL3_CONST EVP_PKEY* pkey, return i2d_RSAPublicKey_bio(bio.get(), rsa) == 1; } } else { - CHECK_EQ(config.type_.ToChecked(), kKeyEncodingSPKI); + CHECK_EQ(config.type_.value(), kKeyEncodingSPKI); if (config.format_ == kKeyFormatPEM) { // Encode SPKI as PEM. return PEM_write_bio_PUBKEY(bio.get(), pkey) == 1; @@ -480,7 +285,7 @@ Maybe ExportJWKAsymmetricKey(Environment* env, const KeyObjectData& key, Local target, bool handleRsaPss) { - switch (EVP_PKEY_id(key.GetAsymmetricKey().get())) { + switch (key.GetAsymmetricKey().id()) { case EVP_PKEY_RSA_PSS: { if (handleRsaPss) return ExportJWKRsaKey(env, key, target); break; @@ -535,7 +340,7 @@ Maybe GetSecretKeyDetail(Environment* env, Maybe GetAsymmetricKeyDetail(Environment* env, const KeyObjectData& key, Local target) { - switch (EVP_PKEY_id(key.GetAsymmetricKey().get())) { + switch (key.GetAsymmetricKey().id()) { case EVP_PKEY_RSA: // Fall through case EVP_PKEY_RSA_PSS: return GetRsaKeyDetail(env, key, target); @@ -546,6 +351,40 @@ Maybe GetAsymmetricKeyDetail(Environment* env, THROW_ERR_CRYPTO_INVALID_KEYTYPE(env); return Nothing(); } + +KeyObjectData TryParsePrivateKey( + Environment* env, + const PrivateKeyEncodingConfig& config, + const ncrypto::Buffer& buffer) { + std::optional> maybePassphrase = std::nullopt; + if (config.passphrase_.get() != nullptr) { + maybePassphrase = ncrypto::Buffer{ + .data = const_cast(config.passphrase_->data()), + .len = config.passphrase_->size(), + }; + } + + auto res = EVPKeyPointer::TryParsePrivateKey( + static_cast(config.format_), + static_cast( + config.type_.value_or(kKeyEncodingPKCS8)), + std::move(maybePassphrase), + buffer); + + if (!res) { + if (res.error.value() == EVPKeyPointer::PKParseError::NEED_PASSPHRASE) { + THROW_ERR_MISSING_PASSPHRASE(env, + "Passphrase required for encrypted key"); + return {}; + } + ThrowCryptoError( + env, res.openssl_error.value_or(0), "Failed to read private key"); + return {}; + } + + return KeyObjectData::CreateAsymmetric(KeyType::kKeyTypePrivate, + std::move(res.value)); +} } // namespace // This maps true to JustVoid and false to Nothing(). @@ -644,7 +483,7 @@ KeyObjectData::GetPrivateKeyEncodingFromJs( if (IsAnyBufferSource(args[*offset])) { CHECK_IMPLIES(context != kKeyContextInput, result.cipher_ != nullptr); ArrayBufferOrViewContents passphrase(args[*offset]); - if (UNLIKELY(!passphrase.CheckSizeInt32())) { + if (!passphrase.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "passphrase is too big"); return NonCopyableMaybe(); } @@ -677,24 +516,23 @@ KeyObjectData KeyObjectData::GetPrivateKeyFromJs( ByteSource key = ByteSource::FromStringOrBuffer(env, args[(*offset)++]); NonCopyableMaybe config = GetPrivateKeyEncodingFromJs(args, offset, kKeyContextInput); - if (config.IsEmpty()) return {}; - EVPKeyPointer pkey; - ParseKeyResult ret = - ParsePrivateKey(&pkey, config.Release(), key.data(), key.size()); - return GetParsedKey(KeyType::kKeyTypePrivate, - env, - std::move(pkey), - ret, - "Failed to read private key"); - } else { - CHECK(args[*offset]->IsObject() && allow_key_object); - KeyObjectHandle* key; - ASSIGN_OR_RETURN_UNWRAP(&key, args[*offset].As(), KeyObjectData()); - CHECK_EQ(key->Data().GetKeyType(), kKeyTypePrivate); - (*offset) += 4; - return key->Data().addRef(); + if (config.IsEmpty()) return {}; + return TryParsePrivateKey( + env, + config.Release(), + ncrypto::Buffer{ + .data = reinterpret_cast(key.data()), + .len = key.size(), + }); } + + CHECK(args[*offset]->IsObject() && allow_key_object); + KeyObjectHandle* key; + ASSIGN_OR_RETURN_UNWRAP(&key, args[*offset].As(), KeyObjectData()); + CHECK_EQ(key->Data().GetKeyType(), kKeyTypePrivate); + (*offset) += 4; + return key->Data().addRef(); } KeyObjectData KeyObjectData::GetPublicOrPrivateKeyFromJs( @@ -702,7 +540,7 @@ KeyObjectData KeyObjectData::GetPublicOrPrivateKeyFromJs( if (IsAnyBufferSource(args[*offset])) { Environment* env = Environment::GetCurrent(args); ArrayBufferOrViewContents data(args[(*offset)++]); - if (UNLIKELY(!data.CheckSizeInt32())) { + if (!data.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "keyData is too big"); return {}; } @@ -710,57 +548,77 @@ KeyObjectData KeyObjectData::GetPublicOrPrivateKeyFromJs( KeyObjectData::GetPrivateKeyEncodingFromJs( args, offset, kKeyContextInput); if (config_.IsEmpty()) return {}; - - ParseKeyResult ret; PrivateKeyEncodingConfig config = config_.Release(); - EVPKeyPointer pkey; - KeyType type = KeyType::kKeyTypePublic; + + ncrypto::Buffer buffer = { + .data = reinterpret_cast(data.data()), + .len = data.size(), + }; + + std::optional> maybePassphrase = std::nullopt; + if (config.passphrase_.get() != nullptr) { + maybePassphrase = ncrypto::Buffer{ + .data = const_cast(config.passphrase_->data()), + .len = config.passphrase_->size(), + }; + } + if (config.format_ == kKeyFormatPEM) { // For PEM, we can easily determine whether it is a public or private key // by looking for the respective PEM tags. - ret = ParsePublicKeyPEM(&pkey, data.data(), data.size()); - if (ret == ParseKeyResult::kParseKeyNotRecognized) { - type = KeyType::kKeyTypePrivate; - ret = ParsePrivateKey(&pkey, config, data.data(), data.size()); + auto res = EVPKeyPointer::TryParsePublicKeyPEM(buffer); + if (!res) { + if (res.error.value() == EVPKeyPointer::PKParseError::NOT_RECOGNIZED) { + return TryParsePrivateKey(env, config, buffer); + } + ThrowCryptoError(env, + res.openssl_error.value_or(0), + "Failed to read asymmetric key"); + return {}; } - } else { - // For DER, the type determines how to parse it. SPKI, PKCS#8 and SEC1 are - // easy, but PKCS#1 can be a public key or a private key. - bool is_public; - switch (config.type_.ToChecked()) { + return CreateAsymmetric(kKeyTypePublic, std::move(res.value)); + } + + // For DER, the type determines how to parse it. SPKI, PKCS#8 and SEC1 are + // easy, but PKCS#1 can be a public key or a private key. + bool is_public = ([&] { + switch (config.type_.value()) { case kKeyEncodingPKCS1: - is_public = !IsRSAPrivateKey( - reinterpret_cast(data.data()), data.size()); - break; + return !EVPKeyPointer::IsRSAPrivateKey(buffer); case kKeyEncodingSPKI: - is_public = true; - break; + return true; case kKeyEncodingPKCS8: + return false; case kKeyEncodingSEC1: - is_public = false; - break; + return false; default: UNREACHABLE("Invalid key encoding type"); } - - if (is_public) { - ret = ParsePublicKey(&pkey, config, data.data(), data.size()); - } else { - type = KeyType::kKeyTypePrivate; - ret = ParsePrivateKey(&pkey, config, data.data(), data.size()); + })(); + + if (is_public) { + auto res = EVPKeyPointer::TryParsePublicKey( + static_cast(config.format_), + static_cast(config.type_.value()), + buffer); + if (!res) { + ThrowCryptoError(env, + res.openssl_error.value_or(0), + "Failed to read asymmetric key"); + return {}; } + return CreateAsymmetric(KeyType::kKeyTypePublic, std::move(res.value)); } - return GetParsedKey( - type, env, std::move(pkey), ret, "Failed to read asymmetric key"); - } else { - CHECK(args[*offset]->IsObject()); - KeyObjectHandle* key = Unwrap(args[*offset].As()); - CHECK_NOT_NULL(key); - CHECK_NE(key->Data().GetKeyType(), kKeyTypeSecret); - (*offset) += 4; - return key->Data().addRef(); + return TryParsePrivateKey(env, config, buffer); } + + CHECK(args[*offset]->IsObject()); + KeyObjectHandle* key = Unwrap(args[*offset].As()); + CHECK_NOT_NULL(key); + CHECK_NE(key->Data().GetKeyType(), kKeyTypeSecret); + (*offset) += 4; + return key->Data().addRef(); } KeyObjectData KeyObjectData::GetParsedKey(KeyType type, @@ -808,17 +666,10 @@ void KeyObjectData::MemoryInfo(MemoryTracker* tracker) const { // Fall through case kKeyTypePublic: { if (data_->asymmetric_key) { - size_t size = kSizeOf_EVP_PKEY; - size_t len = 0; - if (EVP_PKEY_get_raw_private_key( - data_->asymmetric_key.get(), nullptr, &len) == 1) { - size += len; - } - if (EVP_PKEY_get_raw_public_key( - data_->asymmetric_key.get(), nullptr, &len) == 1) { - size += len; - } - tracker->TrackFieldWithSize("key", size); + tracker->TrackFieldWithSize( + "key", + kSizeOf_EVP_PKEY + data_->asymmetric_key.rawPublicKeySize() + + data_->asymmetric_key.rawPrivateKeySize()); } break; } @@ -1046,7 +897,7 @@ void KeyObjectHandle::InitECRaw(const FunctionCallbackInfo& args) { return args.GetReturnValue().Set(false); } - EVPKeyPointer pkey(EVP_PKEY_new()); + auto pkey = EVPKeyPointer::New(); if (!EVP_PKEY_assign_EC_KEY(pkey.get(), eckey.get())) args.GetReturnValue().Set(false); @@ -1070,10 +921,10 @@ void KeyObjectHandle::InitEDRaw(const FunctionCallbackInfo& args) { MarkPopErrorOnReturn mark_pop_error_on_return; - typedef EVP_PKEY* (*new_key_fn)(int, ENGINE*, const unsigned char*, size_t); - new_key_fn fn = type == kKeyTypePrivate - ? EVP_PKEY_new_raw_private_key - : EVP_PKEY_new_raw_public_key; + typedef EVPKeyPointer (*new_key_fn)( + int, const ncrypto::Buffer&); + new_key_fn fn = type == kKeyTypePrivate ? EVPKeyPointer::NewRawPrivate + : EVPKeyPointer::NewRawPublic; int id = GetOKPCurveFromName(*name); @@ -1082,9 +933,14 @@ void KeyObjectHandle::InitEDRaw(const FunctionCallbackInfo& args) { case EVP_PKEY_X448: case EVP_PKEY_ED25519: case EVP_PKEY_ED448: { - EVPKeyPointer pkey(fn(id, nullptr, key_data.data(), key_data.size())); - if (!pkey) + auto pkey = fn(id, + ncrypto::Buffer{ + .data = key_data.data(), + .len = key_data.size(), + }); + if (!pkey) { return args.GetReturnValue().Set(false); + } key->data_ = KeyObjectData::CreateAsymmetric(type, std::move(pkey)); CHECK(key->data_); break; @@ -1170,28 +1026,27 @@ void KeyObjectHandle::GetKeyDetail(const FunctionCallbackInfo& args) { } Local KeyObjectHandle::GetAsymmetricKeyType() const { - const auto& key = data_.GetAsymmetricKey(); - switch (EVP_PKEY_id(key.get())) { - case EVP_PKEY_RSA: - return env()->crypto_rsa_string(); - case EVP_PKEY_RSA_PSS: - return env()->crypto_rsa_pss_string(); - case EVP_PKEY_DSA: - return env()->crypto_dsa_string(); - case EVP_PKEY_DH: - return env()->crypto_dh_string(); - case EVP_PKEY_EC: - return env()->crypto_ec_string(); - case EVP_PKEY_ED25519: - return env()->crypto_ed25519_string(); - case EVP_PKEY_ED448: - return env()->crypto_ed448_string(); - case EVP_PKEY_X25519: - return env()->crypto_x25519_string(); - case EVP_PKEY_X448: - return env()->crypto_x448_string(); - default: - return Undefined(env()->isolate()); + switch (data_.GetAsymmetricKey().id()) { + case EVP_PKEY_RSA: + return env()->crypto_rsa_string(); + case EVP_PKEY_RSA_PSS: + return env()->crypto_rsa_pss_string(); + case EVP_PKEY_DSA: + return env()->crypto_dsa_string(); + case EVP_PKEY_DH: + return env()->crypto_dh_string(); + case EVP_PKEY_EC: + return env()->crypto_ec_string(); + case EVP_PKEY_ED25519: + return env()->crypto_ed25519_string(); + case EVP_PKEY_ED448: + return env()->crypto_ed448_string(); + case EVP_PKEY_X25519: + return env()->crypto_x25519_string(); + case EVP_PKEY_X448: + return env()->crypto_x448_string(); + default: + return Undefined(env()->isolate()); } } @@ -1207,9 +1062,9 @@ bool KeyObjectHandle::CheckEcKeyData() const { MarkPopErrorOnReturn mark_pop_error_on_return; const auto& key = data_.GetAsymmetricKey(); - EVPKeyCtxPointer ctx(EVP_PKEY_CTX_new(key.get(), nullptr)); + EVPKeyCtxPointer ctx = key.newCtx(); CHECK(ctx); - CHECK_EQ(EVP_PKEY_id(key.get()), EVP_PKEY_EC); + CHECK_EQ(key.id(), EVP_PKEY_EC); if (data_.GetKeyType() == kKeyTypePrivate) { return EVP_PKEY_check(ctx.get()) == 1; @@ -1408,12 +1263,8 @@ WebCryptoKeyExportStatus PKEY_SPKI_Export(const KeyObjectData& key_data, ByteSource* out) { CHECK_EQ(key_data.GetKeyType(), kKeyTypePublic); Mutex::ScopedLock lock(key_data.mutex()); - const auto& m_pkey = key_data.GetAsymmetricKey(); - auto bio = BIOPointer::NewMem(); - CHECK(bio); - if (!i2d_PUBKEY_bio(bio.get(), m_pkey.get())) - return WebCryptoKeyExportStatus::FAILED; - + auto bio = key_data.GetAsymmetricKey().derPublicKey(); + if (!bio) return WebCryptoKeyExportStatus::FAILED; *out = ByteSource::FromBIO(bio); return WebCryptoKeyExportStatus::OK; } diff --git a/src/crypto/crypto_keys.h b/src/crypto/crypto_keys.h index efc6d583dccdea..fd7ff00b21c8e2 100644 --- a/src/crypto/crypto_keys.h +++ b/src/crypto/crypto_keys.h @@ -18,21 +18,23 @@ namespace node { namespace crypto { +// TODO(@jasnell): These static casts are temporarily while this code +// is being shifted over into ncrypto enum PKEncodingType { // RSAPublicKey / RSAPrivateKey according to PKCS#1. - kKeyEncodingPKCS1, + kKeyEncodingPKCS1 = static_cast(EVPKeyPointer::PKEncodingType::PKCS1), // PrivateKeyInfo or EncryptedPrivateKeyInfo according to PKCS#8. - kKeyEncodingPKCS8, + kKeyEncodingPKCS8 = static_cast(EVPKeyPointer::PKEncodingType::PKCS8), // SubjectPublicKeyInfo according to X.509. - kKeyEncodingSPKI, + kKeyEncodingSPKI = static_cast(EVPKeyPointer::PKEncodingType::SPKI), // ECPrivateKey according to SEC1. - kKeyEncodingSEC1 + kKeyEncodingSEC1 = static_cast(EVPKeyPointer::PKEncodingType::SEC1), }; enum PKFormatType { - kKeyFormatDER, - kKeyFormatPEM, - kKeyFormatJWK + kKeyFormatDER = static_cast(EVPKeyPointer::PKFormatType::DER), + kKeyFormatPEM = static_cast(EVPKeyPointer::PKFormatType::PEM), + kKeyFormatJWK = static_cast(EVPKeyPointer::PKFormatType::JWK), }; enum KeyType { @@ -48,16 +50,18 @@ enum KeyEncodingContext { }; enum class ParseKeyResult { + kParseKeyNotRecognized = + static_cast(EVPKeyPointer::PKParseError::NOT_RECOGNIZED), + kParseKeyNeedPassphrase = + static_cast(EVPKeyPointer::PKParseError::NEED_PASSPHRASE), + kParseKeyFailed = static_cast(EVPKeyPointer::PKParseError::FAILED), kParseKeyOk, - kParseKeyNotRecognized, - kParseKeyNeedPassphrase, - kParseKeyFailed }; struct AsymmetricKeyEncodingConfig { bool output_key_object_ = false; PKFormatType format_ = kKeyFormatDER; - v8::Maybe type_ = v8::Nothing(); + std::optional type_ = std::nullopt; }; using PublicKeyEncodingConfig = AsymmetricKeyEncodingConfig; diff --git a/src/crypto/crypto_pbkdf2.cc b/src/crypto/crypto_pbkdf2.cc index 68b7304d85d081..dcaa430aacd3d7 100644 --- a/src/crypto/crypto_pbkdf2.cc +++ b/src/crypto/crypto_pbkdf2.cc @@ -65,12 +65,12 @@ Maybe PBKDF2Traits::AdditionalConfig( ArrayBufferOrViewContents pass(args[offset]); ArrayBufferOrViewContents salt(args[offset + 1]); - if (UNLIKELY(!pass.CheckSizeInt32())) { + if (!pass.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "pass is too large"); return Nothing(); } - if (UNLIKELY(!salt.CheckSizeInt32())) { + if (!salt.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "salt is too large"); return Nothing(); } diff --git a/src/crypto/crypto_rsa.cc b/src/crypto/crypto_rsa.cc index 02e8e24b4054af..6d360554b31d53 100644 --- a/src/crypto/crypto_rsa.cc +++ b/src/crypto/crypto_rsa.cc @@ -203,7 +203,7 @@ WebCryptoCipherStatus RSA_Cipher(Environment* env, Mutex::ScopedLock lock(key_data.mutex()); const auto& m_pkey = key_data.GetAsymmetricKey(); - EVPKeyCtxPointer ctx(EVP_PKEY_CTX_new(m_pkey.get(), nullptr)); + EVPKeyCtxPointer ctx = m_pkey.newCtx(); if (!ctx || init(ctx.get()) <= 0) return WebCryptoCipherStatus::FAILED; @@ -320,7 +320,7 @@ Maybe RSACipherTraits::AdditionalConfig( if (IsAnyBufferSource(args[offset + 2])) { ArrayBufferOrViewContents label(args[offset + 2]); - if (UNLIKELY(!label.CheckSizeInt32())) { + if (!label.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "label is too big"); return Nothing(); } @@ -360,7 +360,7 @@ Maybe ExportJWKRsaKey(Environment* env, Local target) { Mutex::ScopedLock lock(key.mutex()); const auto& m_pkey = key.GetAsymmetricKey(); - int type = EVP_PKEY_id(m_pkey.get()); + int type = m_pkey.id(); CHECK(type == EVP_PKEY_RSA || type == EVP_PKEY_RSA_PSS); // TODO(tniessen): Remove the "else" branch once we drop support for OpenSSL @@ -493,7 +493,7 @@ KeyObjectData ImportJWKRsaKey(Environment* env, } } - EVPKeyPointer pkey(EVP_PKEY_new()); + auto pkey = EVPKeyPointer::New(); CHECK_EQ(EVP_PKEY_set1_RSA(pkey.get(), rsa.get()), 1); return KeyObjectData::CreateAsymmetric(type, std::move(pkey)); @@ -507,7 +507,7 @@ Maybe GetRsaKeyDetail(Environment* env, Mutex::ScopedLock lock(key.mutex()); const auto& m_pkey = key.GetAsymmetricKey(); - int type = EVP_PKEY_id(m_pkey.get()); + int type = m_pkey.id(); CHECK(type == EVP_PKEY_RSA || type == EVP_PKEY_RSA_PSS); // TODO(tniessen): Remove the "else" branch once we drop support for OpenSSL diff --git a/src/crypto/crypto_scrypt.cc b/src/crypto/crypto_scrypt.cc index b63386d876a3b6..1886543e6e01cb 100644 --- a/src/crypto/crypto_scrypt.cc +++ b/src/crypto/crypto_scrypt.cc @@ -62,12 +62,12 @@ Maybe ScryptTraits::AdditionalConfig( ArrayBufferOrViewContents pass(args[offset]); ArrayBufferOrViewContents salt(args[offset + 1]); - if (UNLIKELY(!pass.CheckSizeInt32())) { + if (!pass.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "pass is too large"); return Nothing(); } - if (UNLIKELY(!salt.CheckSizeInt32())) { + if (!salt.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "salt is too large"); return Nothing(); } diff --git a/src/crypto/crypto_sig.cc b/src/crypto/crypto_sig.cc index 903ab29cd7e701..0e808013a31f9d 100644 --- a/src/crypto/crypto_sig.cc +++ b/src/crypto/crypto_sig.cc @@ -34,11 +34,11 @@ namespace crypto { namespace { bool ValidateDSAParameters(EVP_PKEY* key) { /* Validate DSA2 parameters from FIPS 186-4 */ + auto id = EVPKeyPointer::base_id(key); #if OPENSSL_VERSION_MAJOR >= 3 - if (EVP_default_properties_is_fips_enabled(nullptr) && - EVP_PKEY_DSA == EVP_PKEY_base_id(key)) { + if (EVP_default_properties_is_fips_enabled(nullptr) && EVP_PKEY_DSA == id) { #else - if (FIPS_mode() && EVP_PKEY_DSA == EVP_PKEY_base_id(key)) { + if (FIPS_mode() && EVP_PKEY_DSA == id) { #endif const DSA* dsa = EVP_PKEY_get0_DSA(key); const BIGNUM* p; @@ -60,9 +60,8 @@ bool ApplyRSAOptions(const EVPKeyPointer& pkey, EVP_PKEY_CTX* pkctx, int padding, const Maybe& salt_len) { - if (EVP_PKEY_id(pkey.get()) == EVP_PKEY_RSA || - EVP_PKEY_id(pkey.get()) == EVP_PKEY_RSA2 || - EVP_PKEY_id(pkey.get()) == EVP_PKEY_RSA_PSS) { + int id = pkey.id(); + if (id == EVP_PKEY_RSA || id == EVP_PKEY_RSA2 || id == EVP_PKEY_RSA_PSS) { if (EVP_PKEY_CTX_set_rsa_padding(pkctx, padding) <= 0) return false; if (padding == RSA_PKCS1_PSS_PADDING && salt_len.IsJust()) { @@ -85,15 +84,13 @@ std::unique_ptr Node_SignFinal(Environment* env, if (!EVP_DigestFinal_ex(mdctx.get(), m, &m_len)) return nullptr; - int signed_sig_len = EVP_PKEY_size(pkey.get()); - CHECK_GE(signed_sig_len, 0); - size_t sig_len = static_cast(signed_sig_len); + size_t sig_len = pkey.size(); std::unique_ptr sig; { NoArrayBufferZeroFillScope no_zero_fill_scope(env->isolate_data()); sig = ArrayBuffer::NewBackingStore(env->isolate(), sig_len); } - EVPKeyCtxPointer pkctx(EVP_PKEY_CTX_new(pkey.get(), nullptr)); + EVPKeyCtxPointer pkctx = pkey.newCtx(); if (pkctx && EVP_PKEY_sign_init(pkctx.get()) > 0 && ApplyRSAOptions(pkey, pkctx.get(), padding, pss_salt_len) && EVP_PKEY_CTX_set_signature_md(pkctx.get(), EVP_MD_CTX_md(mdctx.get())) > @@ -120,12 +117,12 @@ std::unique_ptr Node_SignFinal(Environment* env, } int GetDefaultSignPadding(const EVPKeyPointer& m_pkey) { - return EVP_PKEY_id(m_pkey.get()) == EVP_PKEY_RSA_PSS ? RSA_PKCS1_PSS_PADDING : - RSA_PKCS1_PADDING; + return m_pkey.id() == EVP_PKEY_RSA_PSS ? RSA_PKCS1_PSS_PADDING + : RSA_PKCS1_PADDING; } unsigned int GetBytesOfRS(const EVPKeyPointer& pkey) { - int bits, base_id = EVP_PKEY_base_id(pkey.get()); + int bits, base_id = pkey.base_id(); if (base_id == EVP_PKEY_DSA) { const DSA* dsa_key = EVP_PKEY_get0_DSA(pkey.get()); @@ -274,23 +271,12 @@ void CheckThrow(Environment* env, SignBase::Error error) { } bool IsOneShot(const EVPKeyPointer& key) { - switch (EVP_PKEY_id(key.get())) { - case EVP_PKEY_ED25519: - case EVP_PKEY_ED448: - return true; - default: - return false; - } + return key.id() == EVP_PKEY_ED25519 || key.id() == EVP_PKEY_ED448; } bool UseP1363Encoding(const EVPKeyPointer& key, const DSASigEnc& dsa_encoding) { - switch (EVP_PKEY_id(key.get())) { - case EVP_PKEY_EC: - case EVP_PKEY_DSA: - return dsa_encoding == kSigEncP1363; - default: - return false; - } + return (key.id() == EVP_PKEY_EC || key.id() == EVP_PKEY_DSA) && + dsa_encoding == kSigEncP1363; } } // namespace @@ -384,7 +370,7 @@ void Sign::SignUpdate(const FunctionCallbackInfo& args) { Decode(args, [](Sign* sign, const FunctionCallbackInfo& args, const char* data, size_t size) { Environment* env = Environment::GetCurrent(args); - if (UNLIKELY(size > INT_MAX)) + if (size > INT_MAX) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "data is too long"); Error err = sign->Update(data, size); crypto::CheckThrow(sign->env(), err); @@ -422,7 +408,8 @@ void Sign::SignFinal(const FunctionCallbackInfo& args) { unsigned int offset = 0; auto data = KeyObjectData::GetPrivateKeyFromJs(args, &offset, true); - if (UNLIKELY(!data)) return; + if (!data) [[unlikely]] + return; const auto& key = data.GetAsymmetricKey(); if (!key) return; @@ -507,7 +494,7 @@ void Verify::VerifyUpdate(const FunctionCallbackInfo& args) { const FunctionCallbackInfo& args, const char* data, size_t size) { Environment* env = Environment::GetCurrent(args); - if (UNLIKELY(size > INT_MAX)) + if (size > INT_MAX) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "data is too long"); Error err = verify->Update(data, size); crypto::CheckThrow(verify->env(), err); @@ -530,7 +517,7 @@ SignBase::Error Verify::VerifyFinal(const EVPKeyPointer& pkey, if (!EVP_DigestFinal_ex(mdctx.get(), m, &m_len)) return kSignPublicKey; - EVPKeyCtxPointer pkctx(EVP_PKEY_CTX_new(pkey.get(), nullptr)); + EVPKeyCtxPointer pkctx = pkey.newCtx(); if (pkctx) { const int init_ret = EVP_PKEY_verify_init(pkctx.get()); if (init_ret == -2) { @@ -568,7 +555,7 @@ void Verify::VerifyFinal(const FunctionCallbackInfo& args) { } ArrayBufferOrViewContents hbuf(args[offset]); - if (UNLIKELY(!hbuf.CheckSizeInt32())) + if (!hbuf.CheckSizeInt32()) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "buffer is too big"); int padding = GetDefaultSignPadding(pkey); @@ -657,7 +644,7 @@ Maybe SignTraits::AdditionalConfig( } ArrayBufferOrViewContents data(args[offset + 5]); - if (UNLIKELY(!data.CheckSizeInt32())) { + if (!data.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "data is too big"); return Nothing(); } @@ -695,7 +682,7 @@ Maybe SignTraits::AdditionalConfig( if (params->mode == SignConfiguration::kVerify) { ArrayBufferOrViewContents signature(args[offset + 10]); - if (UNLIKELY(!signature.CheckSizeInt32())) { + if (!signature.CheckSizeInt32()) [[unlikely]] { THROW_ERR_OUT_OF_RANGE(env, "signature is too big"); return Nothing(); } diff --git a/src/crypto/crypto_spkac.cc b/src/crypto/crypto_spkac.cc index 236de520d8dbf2..d0dcb1d7be5581 100644 --- a/src/crypto/crypto_spkac.cc +++ b/src/crypto/crypto_spkac.cc @@ -23,7 +23,7 @@ void VerifySpkac(const FunctionCallbackInfo& args) { ArrayBufferOrViewContents input(args[0]); if (input.empty()) return args.GetReturnValue().SetEmptyString(); - if (UNLIKELY(!input.CheckSizeInt32())) + if (!input.CheckSizeInt32()) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "spkac is too large"); args.GetReturnValue().Set(ncrypto::VerifySpkac(input.data(), input.size())); @@ -35,7 +35,7 @@ void ExportPublicKey(const FunctionCallbackInfo& args) { ArrayBufferOrViewContents input(args[0]); if (input.empty()) return args.GetReturnValue().SetEmptyString(); - if (UNLIKELY(!input.CheckSizeInt32())) + if (!input.CheckSizeInt32()) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "spkac is too large"); BIOPointer bio = ncrypto::ExportPublicKey(input.data(), input.size()); @@ -51,7 +51,7 @@ void ExportChallenge(const FunctionCallbackInfo& args) { ArrayBufferOrViewContents input(args[0]); if (input.empty()) return args.GetReturnValue().SetEmptyString(); - if (UNLIKELY(!input.CheckSizeInt32())) + if (!input.CheckSizeInt32()) [[unlikely]] return THROW_ERR_OUT_OF_RANGE(env, "spkac is too large"); auto cert = ByteSource::Allocated( diff --git a/src/crypto/crypto_tls.cc b/src/crypto/crypto_tls.cc index 4013c1d8e2ff68..981edd4f3f5fa1 100644 --- a/src/crypto/crypto_tls.cc +++ b/src/crypto/crypto_tls.cc @@ -141,7 +141,7 @@ void KeylogCallback(const SSL* s, const char* line) { const size_t size = strlen(line); Local line_bf = Buffer::Copy(env, line, 1 + size) .FromMaybe(Local()); - if (UNLIKELY(line_bf.IsEmpty())) + if (line_bf.IsEmpty()) [[unlikely]] return; char* data = Buffer::Data(line_bf); @@ -160,12 +160,12 @@ int NewSessionCallback(SSL* s, SSL_SESSION* sess) { // Check if session is small enough to be stored int size = i2d_SSL_SESSION(sess, nullptr); - if (UNLIKELY(size > SecureContext::kMaxSessionSize)) + if (size > SecureContext::kMaxSessionSize) [[unlikely]] return 0; // Serialize session Local session = Buffer::New(env, size).FromMaybe(Local()); - if (UNLIKELY(session.IsEmpty())) + if (session.IsEmpty()) [[unlikely]] return 0; unsigned char* session_data = @@ -181,7 +181,7 @@ int NewSessionCallback(SSL* s, SSL_SESSION* sess) { env, reinterpret_cast(session_id_data), session_id_length).FromMaybe(Local()); - if (UNLIKELY(session_id.IsEmpty())) + if (session_id.IsEmpty()) [[unlikely]] return 0; Local argv[] = { @@ -256,7 +256,7 @@ int SelectALPNCallback( MaybeLocal maybe_callback_result = w->MakeCallback(env->alpn_callback_string(), 1, &callback_arg); - if (UNLIKELY(maybe_callback_result.IsEmpty())) { + if (maybe_callback_result.IsEmpty()) [[unlikely]] { // Implies the callback didn't return, because some exception was thrown // during processing, e.g. if callback returned an invalid ALPN value. return SSL_TLSEXT_ERR_ALERT_FATAL; @@ -324,7 +324,7 @@ int TLSExtStatusCallback(SSL* s, void* arg) { // Outgoing response Local obj = w->ocsp_response().FromMaybe(Local()); - if (UNLIKELY(obj.IsEmpty())) + if (obj.IsEmpty()) [[unlikely]] return SSL_TLSEXT_ERR_NOACK; size_t len = obj->ByteLength(); @@ -626,7 +626,7 @@ void TLSWrap::EncOut() { return; } - if (UNLIKELY(has_active_write_issued_by_prev_listener_)) { + if (has_active_write_issued_by_prev_listener_) [[unlikely]] { Debug(this, "Returning from EncOut(), " "has_active_write_issued_by_prev_listener_ is true"); @@ -704,7 +704,7 @@ void TLSWrap::EncOut() { void TLSWrap::OnStreamAfterWrite(WriteWrap* req_wrap, int status) { Debug(this, "OnStreamAfterWrite(status = %d)", status); - if (UNLIKELY(has_active_write_issued_by_prev_listener_)) { + if (has_active_write_issued_by_prev_listener_) [[unlikely]] { Debug(this, "Notify write finish to the previous_listener_"); CHECK_EQ(write_size_, 0); // we must have restrained writes @@ -827,15 +827,19 @@ void TLSWrap::ClearOut() { unsigned long ssl_err = ERR_peek_error(); // NOLINT(runtime/int) Local context = env()->isolate()->GetCurrentContext(); - if (UNLIKELY(context.IsEmpty())) return; + if (context.IsEmpty()) [[unlikely]] + return; const std::string error_str = GetBIOError(); Local message = OneByteString( env()->isolate(), error_str.c_str(), error_str.size()); - if (UNLIKELY(message.IsEmpty())) return; + if (message.IsEmpty()) [[unlikely]] + return; error = Exception::Error(message); - if (UNLIKELY(error.IsEmpty())) return; + if (error.IsEmpty()) [[unlikely]] + return; Local obj; - if (UNLIKELY(!error->ToObject(context).ToLocal(&obj))) return; + if (!error->ToObject(context).ToLocal(&obj)) [[unlikely]] + return; const char* ls = ERR_lib_error_string(ssl_err); const char* fs = ERR_func_error_string(ssl_err); @@ -1364,7 +1368,7 @@ int TLSWrap::SelectSNIContextCallback(SSL* s, int* ad, void* arg) { Local ctx = p->object()->Get(env->context(), env->sni_context_string()) .FromMaybe(Local()); - if (UNLIKELY(ctx.IsEmpty()) || !ctx->IsObject()) + if (ctx.IsEmpty() || !ctx->IsObject()) [[unlikely]] return SSL_TLSEXT_ERR_NOACK; if (!env->secure_context_constructor_template()->HasInstance(ctx)) { @@ -1439,7 +1443,7 @@ unsigned int TLSWrap::PskServerCallback( Local identity_str = String::NewFromUtf8(env->isolate(), identity).FromMaybe(Local()); - if (UNLIKELY(identity_str.IsEmpty())) + if (identity_str.IsEmpty()) [[unlikely]] return 0; // Make sure there are no utf8 replacement symbols. @@ -1454,7 +1458,7 @@ unsigned int TLSWrap::PskServerCallback( Local psk_val = p->MakeCallback(env->onpskexchange_symbol(), arraysize(argv), argv) .FromMaybe(Local()); - if (UNLIKELY(psk_val.IsEmpty()) || !psk_val->IsArrayBufferView()) + if (psk_val.IsEmpty() || !psk_val->IsArrayBufferView()) [[unlikely]] return 0; ArrayBufferViewContents psk_buf(psk_val); @@ -1487,7 +1491,7 @@ unsigned int TLSWrap::PskClientCallback( if (hint != nullptr) { Local local_hint = String::NewFromUtf8(env->isolate(), hint).FromMaybe(Local()); - if (UNLIKELY(local_hint.IsEmpty())) + if (local_hint.IsEmpty()) [[unlikely]] return 0; argv[0] = local_hint; @@ -1496,14 +1500,14 @@ unsigned int TLSWrap::PskClientCallback( Local ret = p->MakeCallback(env->onpskexchange_symbol(), arraysize(argv), argv) .FromMaybe(Local()); - if (UNLIKELY(ret.IsEmpty()) || !ret->IsObject()) + if (ret.IsEmpty() || !ret->IsObject()) [[unlikely]] return 0; Local obj = ret.As(); Local psk_val = obj->Get(env->context(), env->psk_string()) .FromMaybe(Local()); - if (UNLIKELY(psk_val.IsEmpty()) || !psk_val->IsArrayBufferView()) + if (psk_val.IsEmpty() || !psk_val->IsArrayBufferView()) [[unlikely]] return 0; ArrayBufferViewContents psk_buf(psk_val); @@ -1512,7 +1516,7 @@ unsigned int TLSWrap::PskClientCallback( Local identity_val = obj->Get(env->context(), env->identity_string()) .FromMaybe(Local()); - if (UNLIKELY(identity_val.IsEmpty()) || !identity_val->IsString()) + if (identity_val.IsEmpty() || !identity_val->IsString()) [[unlikely]] return 0; Utf8Value identity_buf(env->isolate(), identity_val); @@ -1561,7 +1565,7 @@ void TLSWrap::CertCbDone(const FunctionCallbackInfo& args) { Local object = w->object(); Local ctx = object->Get(env->context(), env->sni_context_string()) .FromMaybe(Local()); - if (UNLIKELY(ctx.IsEmpty())) + if (ctx.IsEmpty()) [[unlikely]] return; Local cons = env->secure_context_constructor_template(); @@ -1627,7 +1631,8 @@ void TLSWrap::SetKeyCert(const FunctionCallbackInfo& args) { return env->ThrowTypeError("Must give a SecureContext as first argument"); Local ctx = args[0]; - if (UNLIKELY(ctx.IsEmpty())) return; + if (ctx.IsEmpty()) [[unlikely]] + return; Local cons = env->secure_context_constructor_template(); if (cons->HasInstance(ctx)) { diff --git a/src/crypto/crypto_util.cc b/src/crypto/crypto_util.cc index 793c196f8ce538..12ee0cde089702 100644 --- a/src/crypto/crypto_util.cc +++ b/src/crypto/crypto_util.cc @@ -570,7 +570,7 @@ void ThrowCryptoError(Environment* env, #ifndef OPENSSL_NO_ENGINE void SetEngine(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - if (UNLIKELY(env->permission()->enabled())) { + if (env->permission()->enabled()) [[unlikely]] { return THROW_ERR_CRYPTO_CUSTOM_ENGINE_NOT_SUPPORTED( env, "Programmatic selection of OpenSSL engines is unsupported while the " diff --git a/src/crypto/crypto_x509.cc b/src/crypto/crypto_x509.cc index af2f953f0388db..9b9bb7be9a8dac 100644 --- a/src/crypto/crypto_x509.cc +++ b/src/crypto/crypto_x509.cc @@ -21,6 +21,7 @@ using v8::ArrayBufferView; using v8::BackingStore; using v8::Boolean; using v8::Context; +using v8::Date; using v8::EscapableHandleScope; using v8::Function; using v8::FunctionCallbackInfo; @@ -238,6 +239,18 @@ MaybeLocal GetValidTo(Environment* env, const ncrypto::X509View& view) { return ret; } +MaybeLocal GetValidFromDate(Environment* env, + const ncrypto::X509View& view) { + int64_t validFromTime = view.getValidFromTime(); + return Date::New(env->context(), validFromTime * 1000.); +} + +MaybeLocal GetValidToDate(Environment* env, + const ncrypto::X509View& view) { + int64_t validToTime = view.getValidToTime(); + return Date::New(env->context(), validToTime * 1000.); +} + MaybeLocal GetSerialNumber(Environment* env, const ncrypto::X509View& view) { if (auto serial = view.getSerialNumber()) { @@ -349,6 +362,26 @@ void ValidTo(const FunctionCallbackInfo& args) { } } +void ValidFromDate(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + X509Certificate* cert; + ASSIGN_OR_RETURN_UNWRAP(&cert, args.This()); + Local ret; + if (GetValidFromDate(env, cert->view()).ToLocal(&ret)) { + args.GetReturnValue().Set(ret); + } +} + +void ValidToDate(const FunctionCallbackInfo& args) { + Environment* env = Environment::GetCurrent(args); + X509Certificate* cert; + ASSIGN_OR_RETURN_UNWRAP(&cert, args.This()); + Local ret; + if (GetValidToDate(env, cert->view()).ToLocal(&ret)) { + args.GetReturnValue().Set(ret); + } +} + void SerialNumber(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); X509Certificate* cert; @@ -834,6 +867,8 @@ Local X509Certificate::GetConstructorTemplate( SetProtoMethodNoSideEffect(isolate, tmpl, "issuer", Issuer); SetProtoMethodNoSideEffect(isolate, tmpl, "validTo", ValidTo); SetProtoMethodNoSideEffect(isolate, tmpl, "validFrom", ValidFrom); + SetProtoMethodNoSideEffect(isolate, tmpl, "validToDate", ValidToDate); + SetProtoMethodNoSideEffect(isolate, tmpl, "validFromDate", ValidFromDate); SetProtoMethodNoSideEffect( isolate, tmpl, "fingerprint", Fingerprint); SetProtoMethodNoSideEffect( @@ -1001,6 +1036,8 @@ void X509Certificate::RegisterExternalReferences( registry->Register(Issuer); registry->Register(ValidTo); registry->Register(ValidFrom); + registry->Register(ValidToDate); + registry->Register(ValidFromDate); registry->Register(Fingerprint); registry->Register(Fingerprint); registry->Register(Fingerprint); diff --git a/src/debug_utils-inl.h b/src/debug_utils-inl.h index ab00589269d4db..9207f5a4724026 100644 --- a/src/debug_utils-inl.h +++ b/src/debug_utils-inl.h @@ -66,7 +66,8 @@ std::string ToBaseString(const T& value) { inline std::string SPrintFImpl(const char* format) { const char* p = strchr(format, '%'); - if (LIKELY(p == nullptr)) return format; + if (p == nullptr) [[unlikely]] + return format; CHECK_EQ(p[1], '%'); // Only '%%' allowed when there are no arguments. return std::string(format, p + 1) + SPrintFImpl(p + 2); @@ -137,14 +138,16 @@ inline void FORCE_INLINE Debug(EnabledDebugList* list, DebugCategory cat, const char* format, Args&&... args) { - if (!UNLIKELY(list->enabled(cat))) return; + if (!list->enabled(cat)) [[unlikely]] + return; FPrintF(stderr, format, std::forward(args)...); } inline void FORCE_INLINE Debug(EnabledDebugList* list, DebugCategory cat, const char* message) { - if (!UNLIKELY(list->enabled(cat))) return; + if (!list->enabled(cat)) [[unlikely]] + return; FPrintF(stderr, "%s", message); } @@ -193,8 +196,10 @@ inline void FORCE_INLINE Debug(AsyncWrap* async_wrap, const char* format, Args&&... args) { DCHECK_NOT_NULL(async_wrap); - DebugCategory cat = static_cast(async_wrap->provider_type()); - if (!UNLIKELY(async_wrap->env()->enabled_debug_list()->enabled(cat))) return; + if (auto cat = static_cast(async_wrap->provider_type()); + !async_wrap->env()->enabled_debug_list()->enabled(cat)) [[unlikely]] { + return; + } UnconditionalAsyncWrapDebug(async_wrap, format, std::forward(args)...); } diff --git a/src/env-inl.h b/src/env-inl.h index 28a15aa741ddd4..3b041dd28ba32f 100644 --- a/src/env-inl.h +++ b/src/env-inl.h @@ -133,7 +133,7 @@ inline AliasedFloat64Array& AsyncHooks::async_ids_stack() { } v8::Local AsyncHooks::js_execution_async_resources() { - if (UNLIKELY(js_execution_async_resources_.IsEmpty())) { + if (js_execution_async_resources_.IsEmpty()) [[unlikely]] { js_execution_async_resources_.Reset( env()->isolate(), v8::Array::New(env()->isolate())); } @@ -210,13 +210,14 @@ inline bool TickInfo::has_rejection_to_warn() const { } inline Environment* Environment::GetCurrent(v8::Isolate* isolate) { - if (UNLIKELY(!isolate->InContext())) return nullptr; + if (!isolate->InContext()) [[unlikely]] + return nullptr; v8::HandleScope handle_scope(isolate); return GetCurrent(isolate->GetCurrentContext()); } inline Environment* Environment::GetCurrent(v8::Local context) { - if (UNLIKELY(!ContextEmbedderTag::IsNodeContext(context))) { + if (!ContextEmbedderTag::IsNodeContext(context)) [[unlikely]] { return nullptr; } return static_cast( @@ -260,12 +261,6 @@ inline uv_idle_t* Environment::immediate_idle_handle() { return &immediate_idle_handle_; } -inline void Environment::RegisterHandleCleanup(uv_handle_t* handle, - HandleCleanupCb cb, - void* arg) { - handle_cleanup_queue_.push_back(HandleCleanup{handle, cb, arg}); -} - template inline void Environment::CloseHandle(T* handle, OnCloseCallback callback) { handle_cleanup_waiting_++; @@ -464,6 +459,10 @@ inline double Environment::get_default_trigger_async_id() { return default_trigger_async_id; } +inline int64_t Environment::stack_trace_limit() const { + return isolate_data_->options()->stack_trace_limit; +} + inline std::shared_ptr Environment::options() { return options_; } diff --git a/src/env.cc b/src/env.cc index 665b064091d4cc..130524cad713e8 100644 --- a/src/env.cc +++ b/src/env.cc @@ -28,7 +28,6 @@ #include #include #include -#include #include #include #include @@ -153,12 +152,13 @@ void AsyncHooks::push_async_context(double async_id, bool AsyncHooks::pop_async_context(double async_id) { // In case of an exception then this may have already been reset, if the // stack was multiple MakeCallback()'s deep. - if (UNLIKELY(fields_[kStackLength] == 0)) return false; + if (fields_[kStackLength] == 0) [[unlikely]] + return false; // Ask for the async_id to be restored as a check that the stack // hasn't been corrupted. - if (UNLIKELY(fields_[kCheck] > 0 && - async_id_fields_[kExecutionAsyncId] != async_id)) { + if (fields_[kCheck] > 0 && async_id_fields_[kExecutionAsyncId] != async_id) + [[unlikely]] { FailWithCorruptedAsyncStack(async_id); } @@ -167,8 +167,8 @@ bool AsyncHooks::pop_async_context(double async_id) { async_id_fields_[kTriggerAsyncId] = async_ids_stack_[2 * offset + 1]; fields_[kStackLength] = offset; - if (LIKELY(offset < native_execution_async_resources_.size() && - !native_execution_async_resources_[offset].IsEmpty())) { + if (offset < native_execution_async_resources_.size() && + !native_execution_async_resources_[offset].IsEmpty()) [[likely]] { #ifdef DEBUG for (uint32_t i = offset + 1; i < native_execution_async_resources_.size(); i++) { @@ -183,7 +183,7 @@ bool AsyncHooks::pop_async_context(double async_id) { } } - if (UNLIKELY(js_execution_async_resources()->Length() > offset)) { + if (js_execution_async_resources()->Length() > offset) [[unlikely]] { HandleScope handle_scope(env()->isolate()); USE(js_execution_async_resources()->Set( env()->context(), @@ -741,8 +741,7 @@ std::string Environment::GetCwd(const std::string& exec_path) { // This can fail if the cwd is deleted. In that case, fall back to // exec_path. - return exec_path.substr( - 0, exec_path.find_last_of(std::filesystem::path::preferred_separator)); + return exec_path.substr(0, exec_path.find_last_of(kPathSeparator)); } void Environment::add_refs(int64_t diff) { @@ -858,14 +857,12 @@ Environment::Environment(IsolateData* isolate_data, } } - // We are supposed to call builtin_loader_.SetEagerCompile() in - // snapshot mode here because it's beneficial to compile built-ins - // loaded in the snapshot eagerly and include the code of inner functions - // that are likely to be used by user since they are part of the core - // startup. But this requires us to start the coverage collections - // before Environment/Context creation which is not currently possible. - // TODO(joyeecheung): refactor V8ProfilerConnection classes to parse - // JSON without v8 and lift this restriction. + // Compile builtins eagerly when building the snapshot so that inner functions + // of essential builtins that are loaded in the snapshot can have faster first + // invocation. + if (isolate_data->is_building_snapshot()) { + builtin_loader()->SetEagerCompile(); + } // We'll be creating new objects so make sure we've entered the context. HandleScope handle_scope(isolate); @@ -1112,13 +1109,8 @@ void Environment::InitializeLibuv() { } } - // Register clean-up cb to be called to clean up the handles - // when the environment is freed, note that they are not cleaned in - // the one environment per process setup, but will be called in - // FreeEnvironment. - RegisterHandleCleanups(); - StartProfilerIdleNotifier(); + env_handle_initialized_ = true; } void Environment::InitializeCompileCache() { @@ -1154,7 +1146,7 @@ CompileCacheEnableResult Environment::EnableCompileCache( compile_cache_handler_ = std::move(handler); AtExit( [](void* env) { - static_cast(env)->compile_cache_handler()->Persist(); + static_cast(env)->FlushCompileCache(); }, this); } @@ -1171,6 +1163,13 @@ CompileCacheEnableResult Environment::EnableCompileCache( return result; } +void Environment::FlushCompileCache() { + if (!compile_cache_handler_ || compile_cache_handler_->cache_dir().empty()) { + return; + } + compile_cache_handler_->Persist(); +} + void Environment::ExitEnv(StopFlags::Flags flags) { // Should not access non-thread-safe methods here. set_stopping(true); @@ -1189,27 +1188,27 @@ void Environment::ExitEnv(StopFlags::Flags flags) { }); } -void Environment::RegisterHandleCleanups() { - HandleCleanupCb close_and_finish = [](Environment* env, uv_handle_t* handle, - void* arg) { - handle->data = env; +void Environment::ClosePerEnvHandles() { + // If LoadEnvironment and InitializeLibuv are not called, like when building + // snapshots, skip closing the per environment handles. + if (!env_handle_initialized_) { + return; + } - env->CloseHandle(handle, [](uv_handle_t* handle) { + auto close_and_finish = [&](uv_handle_t* handle) { + CloseHandle(handle, [](uv_handle_t* handle) { #ifdef DEBUG memset(handle, 0xab, uv_handle_size(handle->type)); #endif }); }; - auto register_handle = [&](uv_handle_t* handle) { - RegisterHandleCleanup(handle, close_and_finish, nullptr); - }; - register_handle(reinterpret_cast(timer_handle())); - register_handle(reinterpret_cast(immediate_check_handle())); - register_handle(reinterpret_cast(immediate_idle_handle())); - register_handle(reinterpret_cast(&idle_prepare_handle_)); - register_handle(reinterpret_cast(&idle_check_handle_)); - register_handle(reinterpret_cast(&task_queues_async_)); + close_and_finish(reinterpret_cast(timer_handle())); + close_and_finish(reinterpret_cast(immediate_check_handle())); + close_and_finish(reinterpret_cast(immediate_idle_handle())); + close_and_finish(reinterpret_cast(&idle_prepare_handle_)); + close_and_finish(reinterpret_cast(&idle_check_handle_)); + close_and_finish(reinterpret_cast(&task_queues_async_)); } void Environment::CleanupHandles() { @@ -1229,10 +1228,6 @@ void Environment::CleanupHandles() { for (HandleWrap* handle : handle_wrap_queue_) handle->Close(); - for (HandleCleanup& hc : handle_cleanup_queue_) - hc.cb_(this, hc.handle_, hc.arg_); - handle_cleanup_queue_.clear(); - while (handle_cleanup_waiting_ != 0 || request_waiting_ != 0 || !handle_wrap_queue_.IsEmpty()) { @@ -1258,9 +1253,11 @@ void Environment::PrintSyncTrace() const { fprintf( stderr, "(node:%d) WARNING: Detected use of sync API\n", uv_os_getpid()); - PrintStackTrace(isolate(), - StackTrace::CurrentStackTrace( - isolate(), stack_trace_limit(), StackTrace::kDetailed)); + PrintStackTrace( + isolate(), + StackTrace::CurrentStackTrace(isolate(), + static_cast(stack_trace_limit()), + StackTrace::kDetailed)); } MaybeLocal Environment::RunSnapshotSerializeCallback() const { @@ -1286,6 +1283,7 @@ MaybeLocal Environment::RunSnapshotDeserializeMain() const { void Environment::RunCleanup() { started_cleanup_ = true; TRACE_EVENT0(TRACING_CATEGORY_NODE1(environment), "RunCleanup"); + ClosePerEnvHandles(); // Only BaseObject's cleanups are registered as per-realm cleanup hooks now. // Defer the BaseObject cleanup after handles are cleaned up. CleanupHandles(); @@ -1404,7 +1402,7 @@ void Environment::RunAndClearNativeImmediates(bool only_refed) { head.reset(); // Destroy now so that this is also observed by try_catch. - if (UNLIKELY(try_catch.HasCaught())) { + if (try_catch.HasCaught()) [[unlikely]] { if (!try_catch.HasTerminated() && can_call_into_js()) errors::TriggerUncaughtException(isolate(), try_catch); @@ -1861,9 +1859,11 @@ void Environment::Exit(ExitCode exit_code) { fprintf(stderr, "WARNING: Exited the environment with code %d\n", static_cast(exit_code)); - PrintStackTrace(isolate(), - StackTrace::CurrentStackTrace( - isolate(), stack_trace_limit(), StackTrace::kDetailed)); + PrintStackTrace( + isolate(), + StackTrace::CurrentStackTrace(isolate(), + static_cast(stack_trace_limit()), + StackTrace::kDetailed)); } process_exit_handler_(this, exit_code); } @@ -2133,7 +2133,7 @@ size_t Environment::NearHeapLimitCallback(void* data, dir = Environment::GetCwd(env->exec_path_); } DiagnosticFilename name(env, "Heap", "heapsnapshot"); - std::string filename = (std::filesystem::path(dir) / (*name)).string(); + std::string filename = dir + kPathSeparator + (*name); Debug(env, DebugCategory::DIAGNOSTICS, "Start generating %s...\n", *name); @@ -2183,7 +2183,7 @@ inline size_t Environment::SelfSize() const { } void Environment::MemoryInfo(MemoryTracker* tracker) const { - // Iteratable STLs have their own sizes subtracted from the parent + // Iterable STLs have their own sizes subtracted from the parent // by default. tracker->TrackField("isolate_data", isolate_data_); tracker->TrackField("destroy_async_id_list", destroy_async_id_list_); diff --git a/src/env.h b/src/env.h index fc8dbd61525585..55124cd38e75ab 100644 --- a/src/env.h +++ b/src/env.h @@ -682,24 +682,10 @@ class Environment final : public MemoryRetainer { inline const std::vector& argv(); const std::string& exec_path() const; - typedef void (*HandleCleanupCb)(Environment* env, - uv_handle_t* handle, - void* arg); - struct HandleCleanup { - uv_handle_t* handle_; - HandleCleanupCb cb_; - void* arg_; - }; - - void RegisterHandleCleanups(); void CleanupHandles(); void Exit(ExitCode code); void ExitEnv(StopFlags::Flags flags); - - // Register clean-up cb to be called on environment destruction. - inline void RegisterHandleCleanup(uv_handle_t* handle, - HandleCleanupCb cb, - void* arg); + void ClosePerEnvHandles(); template inline void CloseHandle(T* handle, OnCloseCallback callback); @@ -995,7 +981,7 @@ class Environment final : public MemoryRetainer { inline std::shared_ptr options(); inline std::shared_ptr> inspector_host_port(); - inline int32_t stack_trace_limit() const { return 10; } + inline int64_t stack_trace_limit() const; #if HAVE_INSPECTOR void set_coverage_connection( @@ -1045,6 +1031,7 @@ class Environment final : public MemoryRetainer { // Enable built-in compile cache if it has not yet been enabled. // The cache will be persisted to disk on exit. CompileCacheEnableResult EnableCompileCache(const std::string& cache_dir); + void FlushCompileCache(); void RunAndClearNativeImmediates(bool only_refed = false); void RunAndClearInterrupts(); @@ -1103,6 +1090,8 @@ class Environment final : public MemoryRetainer { std::list loaded_addons_; v8::Isolate* const isolate_; IsolateData* const isolate_data_; + + bool env_handle_initialized_ = false; uv_timer_t timer_handle_; uv_check_t immediate_check_handle_; uv_idle_t immediate_idle_handle_; @@ -1215,7 +1204,6 @@ class Environment final : public MemoryRetainer { CleanableQueue cleanable_queue_; HandleWrapQueue handle_wrap_queue_; ReqWrapQueue req_wrap_queue_; - std::list handle_cleanup_queue_; int handle_cleanup_waiting_ = 0; int request_waiting_ = 0; diff --git a/src/histogram.cc b/src/histogram.cc index 4dbdea9be57214..c62a5b8952400f 100644 --- a/src/histogram.cc +++ b/src/histogram.cc @@ -99,8 +99,8 @@ void HistogramImpl::AddMethods(Isolate* isolate, Local tmpl) { void HistogramImpl::RegisterExternalReferences( ExternalReferenceRegistry* registry) { - static bool is_registerd = false; - if (is_registerd) return; + static bool is_registered = false; + if (is_registered) return; registry->Register(GetCount); registry->Register(GetCountBigInt); registry->Register(GetExceeds); @@ -132,7 +132,7 @@ void HistogramImpl::RegisterExternalReferences( registry->Register(FastGetExceeds); registry->Register(FastGetStddev); registry->Register(FastGetPercentile); - is_registerd = true; + is_registered = true; } HistogramBase::HistogramBase( @@ -169,7 +169,8 @@ void HistogramBase::RecordDelta(const FunctionCallbackInfo& args) { (*histogram)->RecordDelta(); } -void HistogramBase::FastRecordDelta(Local receiver) { +void HistogramBase::FastRecordDelta(Local unused, + Local receiver) { HistogramBase* histogram; ASSIGN_OR_RETURN_UNWRAP(&histogram, receiver); (*histogram)->RecordDelta(); @@ -189,7 +190,8 @@ void HistogramBase::Record(const FunctionCallbackInfo& args) { (*histogram)->Record(value); } -void HistogramBase::FastRecord(Local receiver, +void HistogramBase::FastRecord(Local unused, + Local receiver, const int64_t value, FastApiCallbackOptions& options) { if (value < 1) { @@ -436,7 +438,9 @@ void IntervalHistogram::Start(const FunctionCallbackInfo& args) { histogram->OnStart(args[0]->IsTrue() ? StartFlags::RESET : StartFlags::NONE); } -void IntervalHistogram::FastStart(Local receiver, bool reset) { +void IntervalHistogram::FastStart(Local unused, + Local receiver, + bool reset) { IntervalHistogram* histogram; ASSIGN_OR_RETURN_UNWRAP(&histogram, receiver); histogram->OnStart(reset ? StartFlags::RESET : StartFlags::NONE); @@ -448,7 +452,7 @@ void IntervalHistogram::Stop(const FunctionCallbackInfo& args) { histogram->OnStop(); } -void IntervalHistogram::FastStop(Local receiver) { +void IntervalHistogram::FastStop(Local unused, Local receiver) { IntervalHistogram* histogram; ASSIGN_OR_RETURN_UNWRAP(&histogram, receiver); histogram->OnStop(); @@ -564,42 +568,45 @@ void HistogramImpl::DoReset(const FunctionCallbackInfo& args) { (*histogram)->Reset(); } -void HistogramImpl::FastReset(Local receiver) { +void HistogramImpl::FastReset(Local unused, Local receiver) { HistogramImpl* histogram = HistogramImpl::FromJSObject(receiver); (*histogram)->Reset(); } -double HistogramImpl::FastGetCount(Local receiver) { +double HistogramImpl::FastGetCount(Local unused, Local receiver) { HistogramImpl* histogram = HistogramImpl::FromJSObject(receiver); return static_cast((*histogram)->Count()); } -double HistogramImpl::FastGetMin(Local receiver) { +double HistogramImpl::FastGetMin(Local unused, Local receiver) { HistogramImpl* histogram = HistogramImpl::FromJSObject(receiver); return static_cast((*histogram)->Min()); } -double HistogramImpl::FastGetMax(Local receiver) { +double HistogramImpl::FastGetMax(Local unused, Local receiver) { HistogramImpl* histogram = HistogramImpl::FromJSObject(receiver); return static_cast((*histogram)->Max()); } -double HistogramImpl::FastGetMean(Local receiver) { +double HistogramImpl::FastGetMean(Local unused, Local receiver) { HistogramImpl* histogram = HistogramImpl::FromJSObject(receiver); return (*histogram)->Mean(); } -double HistogramImpl::FastGetExceeds(Local receiver) { +double HistogramImpl::FastGetExceeds(Local unused, + Local receiver) { HistogramImpl* histogram = HistogramImpl::FromJSObject(receiver); return static_cast((*histogram)->Exceeds()); } -double HistogramImpl::FastGetStddev(Local receiver) { +double HistogramImpl::FastGetStddev(Local unused, + Local receiver) { HistogramImpl* histogram = HistogramImpl::FromJSObject(receiver); return (*histogram)->Stddev(); } -double HistogramImpl::FastGetPercentile(Local receiver, +double HistogramImpl::FastGetPercentile(Local unused, + Local receiver, const double percentile) { HistogramImpl* histogram = HistogramImpl::FromJSObject(receiver); return static_cast((*histogram)->Percentile(percentile)); diff --git a/src/histogram.h b/src/histogram.h index c3d8fe5f207274..362e82e4436a90 100644 --- a/src/histogram.h +++ b/src/histogram.h @@ -101,14 +101,22 @@ class HistogramImpl { static void GetPercentilesBigInt( const v8::FunctionCallbackInfo& args); - static void FastReset(v8::Local receiver); - static double FastGetCount(v8::Local receiver); - static double FastGetMin(v8::Local receiver); - static double FastGetMax(v8::Local receiver); - static double FastGetMean(v8::Local receiver); - static double FastGetExceeds(v8::Local receiver); - static double FastGetStddev(v8::Local receiver); - static double FastGetPercentile(v8::Local receiver, + static void FastReset(v8::Local unused, + v8::Local receiver); + static double FastGetCount(v8::Local unused, + v8::Local receiver); + static double FastGetMin(v8::Local unused, + v8::Local receiver); + static double FastGetMax(v8::Local unused, + v8::Local receiver); + static double FastGetMean(v8::Local unused, + v8::Local receiver); + static double FastGetExceeds(v8::Local unused, + v8::Local receiver); + static double FastGetStddev(v8::Local unused, + v8::Local receiver); + static double FastGetPercentile(v8::Local unused, + v8::Local receiver, const double percentile); static void AddMethods(v8::Isolate* isolate, @@ -158,10 +166,12 @@ class HistogramBase final : public BaseObject, public HistogramImpl { static void Add(const v8::FunctionCallbackInfo& args); static void FastRecord( + v8::Local unused, v8::Local receiver, const int64_t value, v8::FastApiCallbackOptions& options); // NOLINT(runtime/references) - static void FastRecordDelta(v8::Local receiver); + static void FastRecordDelta(v8::Local unused, + v8::Local receiver); HistogramBase( Environment* env, @@ -233,8 +243,11 @@ class IntervalHistogram final : public HandleWrap, public HistogramImpl { static void Start(const v8::FunctionCallbackInfo& args); static void Stop(const v8::FunctionCallbackInfo& args); - static void FastStart(v8::Local receiver, bool reset); - static void FastStop(v8::Local receiver); + static void FastStart(v8::Local unused, + v8::Local receiver, + bool reset); + static void FastStop(v8::Local unused, + v8::Local receiver); BaseObject::TransferMode GetTransferMode() const override { return TransferMode::kCloneable; diff --git a/src/inspector_profiler.cc b/src/inspector_profiler.cc index c7554e424be327..f09dd04ccd7f6e 100644 --- a/src/inspector_profiler.cc +++ b/src/inspector_profiler.cc @@ -11,7 +11,6 @@ #include "v8-inspector.h" #include -#include #include #include #include "simdutf.h" @@ -249,7 +248,7 @@ void V8ProfilerConnection::WriteProfile(simdjson::ondemand::object* result) { std::string filename = GetFilename(); DCHECK(!filename.empty()); - std::string path = (std::filesystem::path(directory) / filename).string(); + std::string path = directory + kPathSeparator + filename; WriteResult(env_, path.c_str(), profile); } @@ -305,7 +304,7 @@ void V8CoverageConnection::WriteProfile(simdjson::ondemand::object* result) { std::string filename = GetFilename(); DCHECK(!filename.empty()); - std::string path = (std::filesystem::path(directory) / filename).string(); + std::string path = directory + kPathSeparator + filename; // Only insert source map cache when there's source map data at all. if (!source_map_cache_v->IsUndefined()) { diff --git a/src/module_wrap.cc b/src/module_wrap.cc index 48b61e8b760070..e2252639cf4518 100644 --- a/src/module_wrap.cc +++ b/src/module_wrap.cc @@ -151,7 +151,7 @@ Local ModuleWrap::GetHostDefinedOptions( } // new ModuleWrap(url, context, source, lineOffset, columnOffset[, cachedData]); -// new ModuleWrap(url, context, source, lineOffset, columOffset, +// new ModuleWrap(url, context, source, lineOffset, columnOffset, // idSymbol); // new ModuleWrap(url, context, exportNames, evaluationCallback[, cjsModule]) void ModuleWrap::New(const FunctionCallbackInfo& args) { @@ -191,9 +191,9 @@ void ModuleWrap::New(const FunctionCallbackInfo& args) { // cjsModule]) CHECK(args[3]->IsFunction()); } else { - // new ModuleWrap(url, context, source, lineOffset, columOffset[, + // new ModuleWrap(url, context, source, lineOffset, columnOffset[, // cachedData]); - // new ModuleWrap(url, context, source, lineOffset, columOffset, + // new ModuleWrap(url, context, source, lineOffset, columnOffset, // idSymbol); CHECK(args[2]->IsString()); CHECK(args[3]->IsNumber()); diff --git a/src/node.cc b/src/node.cc index ccc1085a84b214..1a2a43bdd37441 100644 --- a/src/node.cc +++ b/src/node.cc @@ -543,7 +543,7 @@ void ResetSignalHandlers() { continue; act.sa_handler = (nr == SIGPIPE || nr == SIGXFSZ) ? SIG_IGN : SIG_DFL; if (act.sa_handler == SIG_DFL) { - // The only bad handler value we can inhert from before exec is SIG_IGN + // The only bad handler value we can inherit from before exec is SIG_IGN // (any actual function pointer is reset to SIG_DFL during exec). // If that's the case, we want to reset it back to SIG_DFL. // However, it's also possible that an embeder (or an LD_PRELOAD-ed @@ -1070,14 +1070,6 @@ InitializeOncePerProcessInternal(const std::vector& args, } if (!per_process::cli_options->run.empty()) { - // TODO(@anonrig): Handle NODE_NO_WARNINGS, NODE_REDIRECT_WARNINGS, - // --disable-warning and --redirect-warnings. - if (per_process::cli_options->per_isolate->per_env->warnings) { - fprintf(stderr, - "ExperimentalWarning: Task runner is an experimental feature and " - "might change at any time\n\n"); - } - auto positional_args = task_runner::GetPositionalArgs(args); result->early_return_ = true; task_runner::RunTask( diff --git a/src/node_buffer.cc b/src/node_buffer.cc index 2302e8d94da325..cd51d9acf9540d 100644 --- a/src/node_buffer.cc +++ b/src/node_buffer.cc @@ -312,7 +312,7 @@ MaybeLocal New(Isolate* isolate, if (length > 0) { store = ArrayBuffer::NewBackingStore(isolate, length); - if (UNLIKELY(!store)) { + if (!store) [[unlikely]] { THROW_ERR_MEMORY_ALLOCATION_FAILED(isolate); return Local(); } @@ -325,7 +325,7 @@ MaybeLocal New(Isolate* isolate, enc); CHECK(actual <= length); - if (LIKELY(actual > 0)) { + if (actual > 0) [[likely]] { if (actual < length) { std::unique_ptr old_store = std::move(store); store = ArrayBuffer::NewBackingStore(isolate, actual); @@ -335,8 +335,9 @@ MaybeLocal New(Isolate* isolate, } Local buf = ArrayBuffer::New(isolate, std::move(store)); Local obj; - if (UNLIKELY(!New(isolate, buf, 0, actual).ToLocal(&obj))) - return MaybeLocal(); + if (!New(isolate, buf, 0, actual).ToLocal(&obj)) [[unlikely]] { + return {}; + } return scope.Escape(obj); } } @@ -1440,58 +1441,6 @@ void CopyArrayBuffer(const FunctionCallbackInfo& args) { memcpy(dest, src, bytes_to_copy); } -size_t convert_latin1_to_utf8_s(const char* src, - size_t src_len, - char* dst, - size_t dst_len) noexcept { - size_t src_pos = 0; - size_t dst_pos = 0; - - const auto safe_len = std::min(src_len, dst_len >> 1); - if (safe_len > 16) { - // convert_latin1_to_utf8 will never write more than input length * 2. - dst_pos += simdutf::convert_latin1_to_utf8(src, safe_len, dst); - src_pos += safe_len; - } - - // Based on: - // https://github.com/simdutf/simdutf/blob/master/src/scalar/latin1_to_utf8/latin1_to_utf8.h - // with an upper limit on the number of bytes to write. - - const auto src_ptr = reinterpret_cast(src); - const auto dst_ptr = reinterpret_cast(dst); - - size_t skip_pos = src_pos; - while (src_pos < src_len && dst_pos < dst_len) { - if (skip_pos <= src_pos && src_pos + 16 <= src_len && - dst_pos + 16 <= dst_len) { - uint64_t v1; - memcpy(&v1, src_ptr + src_pos + 0, 8); - uint64_t v2; - memcpy(&v2, src_ptr + src_pos + 8, 8); - if (((v1 | v2) & UINT64_C(0x8080808080808080)) == 0) { - memcpy(dst_ptr + dst_pos, src_ptr + src_pos, 16); - dst_pos += 16; - src_pos += 16; - } else { - skip_pos = src_pos + 16; - } - } else { - const auto byte = src_ptr[src_pos++]; - if ((byte & 0x80) == 0) { - dst_ptr[dst_pos++] = byte; - } else if (dst_pos + 2 <= dst_len) { - dst_ptr[dst_pos++] = (byte >> 6) | 0b11000000; - dst_ptr[dst_pos++] = (byte & 0b111111) | 0b10000000; - } else { - break; - } - } - } - - return dst_pos; -} - template uint32_t WriteOneByteString(const char* src, uint32_t src_len, @@ -1502,7 +1451,7 @@ uint32_t WriteOneByteString(const char* src, } if (encoding == UTF8) { - return convert_latin1_to_utf8_s(src, src_len, dst, dst_len); + return simdutf::convert_latin1_to_utf8_safe(src, src_len, dst, dst_len); } else if (encoding == LATIN1 || encoding == ASCII) { const auto size = std::min(src_len, dst_len); memcpy(dst, src, size); diff --git a/src/node_builtins.cc b/src/node_builtins.cc index 2bc7155f7c075e..1bec44f6f29b0b 100644 --- a/src/node_builtins.cc +++ b/src/node_builtins.cc @@ -132,6 +132,9 @@ BuiltinLoader::BuiltinCategories BuiltinLoader::GetBuiltinCategories() const { "internal/http2/core", "internal/http2/compat", "internal/streams/lazy_transform", #endif // !HAVE_OPENSSL +#if !NODE_OPENSSL_HAS_QUIC + "internal/quic/quic", +#endif // !NODE_OPENSSL_HAS_QUIC "sqlite", // Experimental. "sys", // Deprecated. "wasi", // Experimental. @@ -185,7 +188,7 @@ MaybeLocal BuiltinLoader::LoadBuiltinSource(Isolate* isolate, auto source = source_.read(); #ifndef NODE_BUILTIN_MODULES_PATH const auto source_it = source->find(id); - if (UNLIKELY(source_it == source->end())) { + if (source_it == source->end()) [[unlikely]] { fprintf(stderr, "Cannot find native builtin: \"%s\".\n", id); ABORT(); } diff --git a/src/node_context_data.h b/src/node_context_data.h index 482e8b9fd3a1a8..d81c75daaae47b 100644 --- a/src/node_context_data.h +++ b/src/node_context_data.h @@ -139,16 +139,16 @@ class ContextEmbedderTag { } static inline bool IsNodeContext(v8::Local context) { - if (UNLIKELY(context.IsEmpty())) { + if (context.IsEmpty()) [[unlikely]] { return false; } - if (UNLIKELY(context->GetNumberOfEmbedderDataFields() <= - ContextEmbedderIndex::kContextTag)) { + if (context->GetNumberOfEmbedderDataFields() <= + ContextEmbedderIndex::kContextTag) [[unlikely]] { return false; } - if (UNLIKELY(context->GetAlignedPointerFromEmbedderData( - ContextEmbedderIndex::kContextTag) != - ContextEmbedderTag::kNodeContextTagPtr)) { + if (context->GetAlignedPointerFromEmbedderData( + ContextEmbedderIndex::kContextTag) != + ContextEmbedderTag::kNodeContextTagPtr) [[unlikely]] { return false; } return true; diff --git a/src/node_contextify.cc b/src/node_contextify.cc index fc137486f5e6ba..5f4529262a464e 100644 --- a/src/node_contextify.cc +++ b/src/node_contextify.cc @@ -55,7 +55,7 @@ using v8::Int32; using v8::Integer; using v8::Intercepted; using v8::Isolate; -using v8::Just; +using v8::JustVoid; using v8::KeyCollectionMode; using v8::Local; using v8::Maybe; @@ -1104,7 +1104,7 @@ void ContextifyScript::New(const FunctionCallbackInfo& args) { TRACE_EVENT_END0(TRACING_CATEGORY_NODE2(vm, script), "ContextifyScript::New"); } -Maybe StoreCodeCacheResult( +Maybe StoreCodeCacheResult( Environment* env, Local target, ScriptCompiler::CompileOptions compile_options, @@ -1113,7 +1113,7 @@ Maybe StoreCodeCacheResult( std::unique_ptr new_cached_data) { Local context; if (!target->GetCreationContext().ToLocal(&context)) { - return Nothing(); + return Nothing(); } if (compile_options == ScriptCompiler::kConsumeCodeCache) { if (target @@ -1122,7 +1122,7 @@ Maybe StoreCodeCacheResult( env->cached_data_rejected_string(), Boolean::New(env->isolate(), source.GetCachedData()->rejected)) .IsNothing()) { - return Nothing(); + return Nothing(); } } if (produce_cached_data) { @@ -1134,7 +1134,7 @@ Maybe StoreCodeCacheResult( new_cached_data->length); if (target->Set(context, env->cached_data_string(), buf.ToLocalChecked()) .IsNothing()) { - return Nothing(); + return Nothing(); } } if (target @@ -1142,10 +1142,10 @@ Maybe StoreCodeCacheResult( env->cached_data_produced_string(), Boolean::New(env->isolate(), cached_data_produced)) .IsNothing()) { - return Nothing(); + return Nothing(); } } - return Just(true); + return JustVoid(); } // TODO(RaisinTen): Reuse in ContextifyContext::CompileFunction(). @@ -1271,10 +1271,10 @@ bool ContextifyScript::EvalMachine(Local context, #if HAVE_INSPECTOR if (break_on_first_line) { - if (UNLIKELY(!env->permission()->is_granted( - env, - permission::PermissionScope::kInspector, - "PauseOnNextJavascriptStatement"))) { + if (!env->permission()->is_granted(env, + permission::PermissionScope::kInspector, + "PauseOnNextJavascriptStatement")) + [[unlikely]] { node::permission::Permission::ThrowAccessDenied( env, permission::PermissionScope::kInspector, diff --git a/src/node_contextify.h b/src/node_contextify.h index 2581db0d7df568..e8a709bad1c88d 100644 --- a/src/node_contextify.h +++ b/src/node_contextify.h @@ -177,7 +177,7 @@ class ContextifyScript : public BaseObject { v8::Global script_; }; -v8::Maybe StoreCodeCacheResult( +v8::Maybe StoreCodeCacheResult( Environment* env, v8::Local target, v8::ScriptCompiler::CompileOptions compile_options, diff --git a/src/node_env_var.cc b/src/node_env_var.cc index fdf86f17d460f5..d19d11dc714e08 100644 --- a/src/node_env_var.cc +++ b/src/node_env_var.cc @@ -19,7 +19,6 @@ using v8::HandleScope; using v8::Integer; using v8::Intercepted; using v8::Isolate; -using v8::Just; using v8::JustVoid; using v8::Local; using v8::Maybe; @@ -320,7 +319,7 @@ Maybe KVStore::AssignFromObject(Local context, // TODO(bnoordhuis) Not super efficient but called infrequently. Not worth // the trouble yet of specializing for RealEnvStore and MapKVStore. -Maybe KVStore::AssignToObject(v8::Isolate* isolate, +Maybe KVStore::AssignToObject(v8::Isolate* isolate, v8::Local context, v8::Local object) { HandleScope scope(isolate); @@ -333,9 +332,9 @@ Maybe KVStore::AssignToObject(v8::Isolate* isolate, ok = ok && key->IsString(); ok = ok && Get(isolate, key.As()).ToLocal(&value); ok = ok && object->Set(context, key, value).To(&ok); - if (!ok) return Nothing(); + if (!ok) return Nothing(); } - return Just(true); + return JustVoid(); } static Intercepted EnvGetter(Local property, diff --git a/src/node_errors.h b/src/node_errors.h index 171e2a2e09011c..d333422ae8cd74 100644 --- a/src/node_errors.h +++ b/src/node_errors.h @@ -108,6 +108,7 @@ void OOMErrorHandler(const char* location, const v8::OOMDetails& details); V(ERR_VM_MODULE_CACHED_DATA_REJECTED, Error) \ V(ERR_VM_MODULE_LINK_FAILURE, Error) \ V(ERR_WASI_NOT_STARTED, Error) \ + V(ERR_ZLIB_INITIALIZATION_FAILED, Error) \ V(ERR_WORKER_INIT_FAILED, Error) \ V(ERR_PROTO_ACCESS, Error) @@ -118,7 +119,11 @@ void OOMErrorHandler(const char* location, const v8::OOMDetails& details); std::string message = SPrintF(format, std::forward(args)...); \ v8::Local js_code = OneByteString(isolate, #code); \ v8::Local js_msg = \ - OneByteString(isolate, message.c_str(), message.length()); \ + v8::String::NewFromUtf8(isolate, \ + message.c_str(), \ + v8::NewStringType::kNormal, \ + message.length()) \ + .ToLocalChecked(); \ v8::Local e = v8::Exception::type(js_msg) \ ->ToObject(isolate->GetCurrentContext()) \ .ToLocalChecked(); \ diff --git a/src/node_external_reference.h b/src/node_external_reference.h index 38a4ff7e6c2c99..8d49a119c21832 100644 --- a/src/node_external_reference.h +++ b/src/node_external_reference.h @@ -12,19 +12,25 @@ namespace node { using CFunctionCallbackWithOneByteString = uint32_t (*)(v8::Local, const v8::FastOneByteString&); -using CFunctionCallback = void (*)(v8::Local receiver); +using CFunctionCallback = void (*)(v8::Local unused, + v8::Local receiver); using CFunctionCallbackReturnDouble = - double (*)(v8::Local receiver); + double (*)(v8::Local unused, v8::Local receiver); using CFunctionCallbackReturnInt32 = - int32_t (*)(v8::Local receiver, + int32_t (*)(v8::Local unused, + v8::Local receiver, const v8::FastOneByteString& input, // NOLINTNEXTLINE(runtime/references) This is V8 api. v8::FastApiCallbackOptions& options); using CFunctionCallbackValueReturnDouble = double (*)(v8::Local receiver); -using CFunctionCallbackWithInt64 = void (*)(v8::Local receiver, +using CFunctionCallbackValueReturnDoubleUnusedReceiver = + double (*)(v8::Local unused, v8::Local receiver); +using CFunctionCallbackWithInt64 = void (*)(v8::Local unused, + v8::Local receiver, int64_t); -using CFunctionCallbackWithBool = void (*)(v8::Local receiver, +using CFunctionCallbackWithBool = void (*)(v8::Local unused, + v8::Local receiver, bool); using CFunctionCallbackWithString = bool (*)(v8::Local, const v8::FastOneByteString& input); @@ -50,11 +56,15 @@ using CFunctionCallbackWithUint8ArrayUint32Int64Bool = using CFunctionWithUint32 = uint32_t (*)(v8::Local, const uint32_t input); using CFunctionWithDoubleReturnDouble = double (*)(v8::Local, + v8::Local, const double); using CFunctionWithInt64Fallback = void (*)(v8::Local, + v8::Local, const int64_t, v8::FastApiCallbackOptions&); -using CFunctionWithBool = void (*)(v8::Local, bool); +using CFunctionWithBool = void (*)(v8::Local, + v8::Local, + bool); using CFunctionWriteString = uint32_t (*)(v8::Local receiver, @@ -83,6 +93,7 @@ class ExternalReferenceRegistry { V(CFunctionCallbackReturnDouble) \ V(CFunctionCallbackReturnInt32) \ V(CFunctionCallbackValueReturnDouble) \ + V(CFunctionCallbackValueReturnDoubleUnusedReceiver) \ V(CFunctionCallbackWithInt64) \ V(CFunctionCallbackWithBool) \ V(CFunctionCallbackWithString) \ diff --git a/src/node_file.cc b/src/node_file.cc index 0bb70eb0fcd42d..6b98a130999909 100644 --- a/src/node_file.cc +++ b/src/node_file.cc @@ -82,6 +82,12 @@ using v8::Value; # define S_ISDIR(mode) (((mode) & S_IFMT) == S_IFDIR) #endif +#ifdef __POSIX__ +constexpr char kPathSeparator = '/'; +#else +const char* const kPathSeparator = "\\/"; +#endif + inline int64_t GetOffset(Local value) { return IsSafeJsInt(value) ? value.As()->Value() : -1; } @@ -1028,8 +1034,9 @@ static void ExistsSync(const FunctionCallbackInfo& args) { static void InternalModuleStat(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); - CHECK(args[0]->IsString()); - BufferValue path(env->isolate(), args[0]); + CHECK_GE(args.Length(), 2); + CHECK(args[1]->IsString()); + BufferValue path(env->isolate(), args[1]); CHECK_NOT_NULL(*path); ToNamespacedPath(env, &path); THROW_IF_INSUFFICIENT_PERMISSIONS( @@ -1047,6 +1054,7 @@ static void InternalModuleStat(const FunctionCallbackInfo& args) { } static int32_t FastInternalModuleStat( + Local unused, Local recv, const FastOneByteString& input, // NOLINTNEXTLINE(runtime/references) This is V8 api. @@ -1062,8 +1070,9 @@ static int32_t FastInternalModuleStat( Environment* env = Environment::GetCurrent(recv->GetCreationContextChecked()); auto path = std::filesystem::path(input.data, input.data + input.length); - if (UNLIKELY(!env->permission()->is_granted( - env, permission::PermissionScope::kFileSystemRead, path.string()))) { + if (!env->permission()->is_granted( + env, permission::PermissionScope::kFileSystemRead, path.string())) + [[unlikely]] { options.fallback = true; return -1; } @@ -1646,9 +1655,9 @@ int MKDirpSync(uv_loop_t* loop, return err; } case UV_ENOENT: { - auto filesystem_path = std::filesystem::path(next_path); - if (filesystem_path.has_parent_path()) { - std::string dirname = filesystem_path.parent_path().string(); + std::string dirname = + next_path.substr(0, next_path.find_last_of(kPathSeparator)); + if (dirname != next_path) { req_wrap->continuation_data()->PushPath(std::move(next_path)); req_wrap->continuation_data()->PushPath(std::move(dirname)); } else if (req_wrap->continuation_data()->paths().empty()) { @@ -1726,9 +1735,9 @@ int MKDirpAsync(uv_loop_t* loop, break; } case UV_ENOENT: { - auto filesystem_path = std::filesystem::path(path); - if (filesystem_path.has_parent_path()) { - std::string dirname = filesystem_path.parent_path().string(); + std::string dirname = + path.substr(0, path.find_last_of(kPathSeparator)); + if (dirname != path) { req_wrap->continuation_data()->PushPath(path); req_wrap->continuation_data()->PushPath(std::move(dirname)); } else if (req_wrap->continuation_data()->paths().empty()) { @@ -3035,15 +3044,16 @@ static void CpSyncCheckPaths(const FunctionCallbackInfo& args) { ToNamespacedPath(env, &src); THROW_IF_INSUFFICIENT_PERMISSIONS( env, permission::PermissionScope::kFileSystemRead, src.ToStringView()); - auto src_path = std::filesystem::path(src.ToStringView()); + + auto src_path = std::filesystem::path(src.ToU8StringView()); BufferValue dest(isolate, args[1]); CHECK_NOT_NULL(*dest); ToNamespacedPath(env, &dest); THROW_IF_INSUFFICIENT_PERMISSIONS( env, permission::PermissionScope::kFileSystemWrite, dest.ToStringView()); - auto dest_path = std::filesystem::path(dest.ToStringView()); + auto dest_path = std::filesystem::path(dest.ToU8StringView()); bool dereference = args[2]->IsTrue(); bool recursive = args[3]->IsTrue(); @@ -3051,6 +3061,7 @@ static void CpSyncCheckPaths(const FunctionCallbackInfo& args) { auto src_status = dereference ? std::filesystem::symlink_status(src_path, error_code) : std::filesystem::status(src_path, error_code); + if (error_code) { #ifdef _WIN32 int errorno = uv_translate_sys_error(error_code.value()); @@ -3074,34 +3085,44 @@ static void CpSyncCheckPaths(const FunctionCallbackInfo& args) { if (!error_code) { // Check if src and dest are identical. if (std::filesystem::equivalent(src_path, dest_path)) { - std::string message = - "src and dest cannot be the same " + dest_path.string(); - return THROW_ERR_FS_CP_EINVAL(env, message.c_str()); + std::u8string message = + u8"src and dest cannot be the same " + dest_path.u8string(); + return THROW_ERR_FS_CP_EINVAL( + env, reinterpret_cast(message.c_str())); } const bool dest_is_dir = dest_status.type() == std::filesystem::file_type::directory; if (src_is_dir && !dest_is_dir) { - std::string message = "Cannot overwrite non-directory " + - src_path.string() + " with directory " + - dest_path.string(); - return THROW_ERR_FS_CP_DIR_TO_NON_DIR(env, message.c_str()); + std::u8string message = u8"Cannot overwrite non-directory " + + src_path.u8string() + u8" with directory " + + dest_path.u8string(); + return THROW_ERR_FS_CP_DIR_TO_NON_DIR( + env, reinterpret_cast(message.c_str())); } if (!src_is_dir && dest_is_dir) { - std::string message = "Cannot overwrite directory " + dest_path.string() + - " with non-directory " + src_path.string(); - return THROW_ERR_FS_CP_NON_DIR_TO_DIR(env, message.c_str()); + std::u8string message = u8"Cannot overwrite directory " + + dest_path.u8string() + u8" with non-directory " + + src_path.u8string(); + return THROW_ERR_FS_CP_NON_DIR_TO_DIR( + env, reinterpret_cast(message.c_str())); } } - std::string dest_path_str = dest_path.string(); + std::u8string dest_path_str = dest_path.u8string(); + std::u8string src_path_str = src_path.u8string(); + if (!src_path_str.ends_with(std::filesystem::path::preferred_separator)) { + src_path_str += std::filesystem::path::preferred_separator; + } // Check if dest_path is a subdirectory of src_path. - if (src_is_dir && dest_path_str.starts_with(src_path.string())) { - std::string message = "Cannot copy " + src_path.string() + - " to a subdirectory of self " + dest_path.string(); - return THROW_ERR_FS_CP_EINVAL(env, message.c_str()); + if (src_is_dir && dest_path_str.starts_with(src_path_str)) { + std::u8string message = u8"Cannot copy " + src_path.u8string() + + u8" to a subdirectory of self " + + dest_path.u8string(); + return THROW_ERR_FS_CP_EINVAL( + env, reinterpret_cast(message.c_str())); } auto dest_parent = dest_path.parent_path(); @@ -3112,9 +3133,11 @@ static void CpSyncCheckPaths(const FunctionCallbackInfo& args) { dest_parent.parent_path() != dest_parent) { if (std::filesystem::equivalent( src_path, dest_path.parent_path(), error_code)) { - std::string message = "Cannot copy " + src_path.string() + - " to a subdirectory of self " + dest_path.string(); - return THROW_ERR_FS_CP_EINVAL(env, message.c_str()); + std::u8string message = u8"Cannot copy " + src_path.u8string() + + u8" to a subdirectory of self " + + dest_path.u8string(); + return THROW_ERR_FS_CP_EINVAL( + env, reinterpret_cast(message.c_str())); } // If equivalent fails, it's highly likely that dest_parent does not exist @@ -3126,25 +3149,29 @@ static void CpSyncCheckPaths(const FunctionCallbackInfo& args) { } if (src_is_dir && !recursive) { - std::string message = - "Recursive option not enabled, cannot copy a directory: " + - src_path.string(); - return THROW_ERR_FS_EISDIR(env, message.c_str()); + std::u8string message = + u8"Recursive option not enabled, cannot copy a directory: " + + src_path.u8string(); + return THROW_ERR_FS_EISDIR(env, + reinterpret_cast(message.c_str())); } switch (src_status.type()) { case std::filesystem::file_type::socket: { - std::string message = "Cannot copy a socket file: " + dest_path.string(); - return THROW_ERR_FS_CP_SOCKET(env, message.c_str()); + std::u8string message = u8"Cannot copy a socket file: " + dest_path_str; + return THROW_ERR_FS_CP_SOCKET( + env, reinterpret_cast(message.c_str())); } case std::filesystem::file_type::fifo: { - std::string message = "Cannot copy a FIFO pipe: " + dest_path.string(); - return THROW_ERR_FS_CP_FIFO_PIPE(env, message.c_str()); + std::u8string message = u8"Cannot copy a FIFO pipe: " + dest_path_str; + return THROW_ERR_FS_CP_FIFO_PIPE( + env, reinterpret_cast(message.c_str())); } case std::filesystem::file_type::unknown: { - std::string message = - "Cannot copy an unknown file type: " + dest_path.string(); - return THROW_ERR_FS_CP_UNKNOWN(env, message.c_str()); + std::u8string message = + u8"Cannot copy an unknown file type: " + dest_path_str; + return THROW_ERR_FS_CP_UNKNOWN( + env, reinterpret_cast(message.c_str())); } default: break; @@ -3215,37 +3242,18 @@ void BindingData::LegacyMainResolve(const FunctionCallbackInfo& args) { Environment* env = Environment::GetCurrent(args); auto isolate = env->isolate(); - Utf8Value utf8_package_json_url(isolate, args[0]); - auto package_json_url = - ada::parse(utf8_package_json_url.ToStringView()); - - if (!package_json_url) { - THROW_ERR_INVALID_URL(isolate, "Invalid URL"); - return; - } + auto utf8_package_path = Utf8Value(isolate, args[0]).ToString(); std::string package_initial_file = ""; - ada::result file_path_url; std::optional initial_file_path; std::string file_path; if (args.Length() >= 2 && args[1]->IsString()) { auto package_config_main = Utf8Value(isolate, args[1]).ToString(); - file_path_url = ada::parse( - std::string("./") + package_config_main, &package_json_url.value()); - - if (!file_path_url) { - THROW_ERR_INVALID_URL(isolate, "Invalid URL"); - return; - } - - initial_file_path = node::url::FileURLToPath(env, *file_path_url); - if (!initial_file_path.has_value()) { - return; - } - + initial_file_path = + PathResolve(env, {utf8_package_path, package_config_main}); FromNamespacedPath(&initial_file_path.value()); package_initial_file = *initial_file_path; @@ -3276,15 +3284,7 @@ void BindingData::LegacyMainResolve(const FunctionCallbackInfo& args) { } } - file_path_url = - ada::parse("./index", &package_json_url.value()); - - if (!file_path_url) { - THROW_ERR_INVALID_URL(isolate, "Invalid URL"); - return; - } - - initial_file_path = node::url::FileURLToPath(env, *file_path_url); + initial_file_path = PathResolve(env, {utf8_package_path, "./index"}); if (!initial_file_path.has_value()) { return; } diff --git a/src/node_http2.cc b/src/node_http2.cc index a07d3ce40d9ac8..593c8b5f07a2a4 100644 --- a/src/node_http2.cc +++ b/src/node_http2.cc @@ -208,6 +208,14 @@ Http2Options::Http2Options(Http2State* http2_state, SessionType type) { option, static_cast(buffer[IDX_OPTIONS_MAX_SETTINGS])); } + + if ((flags & (1 << IDX_OPTIONS_STREAM_RESET_BURST)) && + (flags & (1 << IDX_OPTIONS_STREAM_RESET_RATE))) { + nghttp2_option_set_stream_reset_rate_limit( + option, + static_cast(buffer[IDX_OPTIONS_STREAM_RESET_BURST]), + static_cast(buffer[IDX_OPTIONS_STREAM_RESET_RATE])); + } } #define GRABSETTING(entries, count, name) \ @@ -732,8 +740,9 @@ MaybeLocal Http2SessionPerformanceEntryTraits::GetDetails( void Http2Stream::EmitStatistics() { CHECK_NOT_NULL(session()); - if (LIKELY(!HasHttp2Observer(env()))) + if (!HasHttp2Observer(env())) [[likely]] { return; + } double start = statistics_.start_time / 1e6; double duration = (PERFORMANCE_NOW() / 1e6) - start; @@ -752,9 +761,9 @@ void Http2Stream::EmitStatistics() { } void Http2Session::EmitStatistics() { - if (LIKELY(!HasHttp2Observer(env()))) + if (!HasHttp2Observer(env())) [[likely]] { return; - + } double start = statistics_.start_time / 1e6; double duration = (PERFORMANCE_NOW() / 1e6) - start; @@ -944,7 +953,7 @@ void Http2Session::ConsumeHTTP2Data() { } done: - if (UNLIKELY(ret < 0)) { + if (ret < 0) [[unlikely]] { Isolate* isolate = env()->isolate(); Debug(this, "fatal error receiving data: %d (%s)", @@ -991,10 +1000,10 @@ int Http2Session::OnBeginHeadersCallback(nghttp2_session* handle, BaseObjectPtr stream = session->FindStream(id); // The common case is that we're creating a new stream. The less likely // case is that we're receiving a set of trailers - if (LIKELY(!stream)) { - if (UNLIKELY(!session->CanAddStream() || - Http2Stream::New(session, id, frame->headers.cat) == - nullptr)) { + if (!stream) [[likely]] { + if (!session->CanAddStream() || + Http2Stream::New(session, id, frame->headers.cat) == nullptr) + [[unlikely]] { if (session->rejected_stream_count_++ > session->js_fields_->max_rejected_streams) return NGHTTP2_ERR_CALLBACK_FAILURE; @@ -1029,7 +1038,7 @@ int Http2Session::OnHeaderCallback(nghttp2_session* handle, // If stream is null at this point, either something odd has happened // or the stream was closed locally while header processing was occurring. // either way, do not proceed and close the stream. - if (UNLIKELY(!stream)) + if (!stream) [[unlikely]] return NGHTTP2_ERR_TEMPORAL_CALLBACK_FAILURE; // If the stream has already been destroyed, ignore. @@ -1300,10 +1309,16 @@ int Http2Session::OnDataChunkReceived(nghttp2_session* handle, // Since it has access to the original socket buffer from which the data // was read in the first place, it can use that to minimize ArrayBuffer // allocations. - if (LIKELY(buf.base == nullptr)) + if (buf.base == nullptr) [[likely]] { buf.base = reinterpret_cast(const_cast(data)); - else + } else { memcpy(buf.base, data, avail); + } + if (buf.base == nullptr) [[likely]] { + buf.base = reinterpret_cast(const_cast(data)); + } else { + memcpy(buf.base, data, avail); + } data += avail; len -= avail; stream->EmitRead(avail, buf); @@ -1740,8 +1755,9 @@ void Http2Session::OnStreamAfterWrite(WriteWrap* w, int status) { // queue), but only if a write has not already been scheduled. void Http2Session::MaybeScheduleWrite() { CHECK(!is_write_scheduled()); - if (UNLIKELY(!session_)) + if (!session_) [[unlikely]] { return; + } if (nghttp2_session_want_write(session_.get())) { HandleScope handle_scope(env()->isolate()); @@ -1813,8 +1829,9 @@ void Http2Session::ClearOutgoing(int status) { for (int32_t stream_id : current_pending_rst_streams) { BaseObjectPtr stream = FindStream(stream_id); - if (LIKELY(stream)) + if (stream) [[likely]] { stream->FlushRstStream(); + } } } } @@ -2002,8 +2019,9 @@ Http2Stream* Http2Session::SubmitRequest( *prov, nullptr); CHECK_NE(*ret, NGHTTP2_ERR_NOMEM); - if (LIKELY(*ret > 0)) + if (*ret > 0) [[likely]] { stream = Http2Stream::New(this, *ret, NGHTTP2_HCAT_HEADERS, options); + } return stream; } @@ -2032,8 +2050,8 @@ void Http2Session::OnStreamRead(ssize_t nread, const uv_buf_t& buf_) { statistics_.data_received += nread; - if (LIKELY(stream_buf_offset_ == 0 && - static_cast(nread) != bs->ByteLength())) { + if (stream_buf_offset_ == 0 && static_cast(nread) != bs->ByteLength()) + [[likely]] { // Shrink to the actual amount of used data. std::unique_ptr old_bs = std::move(bs); bs = ArrayBuffer::NewBackingStore(env()->isolate(), nread); diff --git a/src/node_http2_state.h b/src/node_http2_state.h index 4683b71c227ae2..2957a2827f370e 100644 --- a/src/node_http2_state.h +++ b/src/node_http2_state.h @@ -58,6 +58,8 @@ namespace http2 { IDX_OPTIONS_MAX_OUTSTANDING_SETTINGS, IDX_OPTIONS_MAX_SESSION_MEMORY, IDX_OPTIONS_MAX_SETTINGS, + IDX_OPTIONS_STREAM_RESET_RATE, + IDX_OPTIONS_STREAM_RESET_BURST, IDX_OPTIONS_FLAGS }; diff --git a/src/node_internals.h b/src/node_internals.h index fe2d25decd8830..85b666e11f5654 100644 --- a/src/node_internals.h +++ b/src/node_internals.h @@ -110,10 +110,10 @@ void SignalExit(int signal, siginfo_t* info, void* ucontext); std::string GetProcessTitle(const char* default_title); std::string GetHumanReadableProcessName(); -v8::Maybe InitializeBaseContextForSnapshot( +v8::Maybe InitializeBaseContextForSnapshot( v8::Local context); -v8::Maybe InitializeContextRuntime(v8::Local context); -v8::Maybe InitializePrimordials(v8::Local context); +v8::Maybe InitializeContextRuntime(v8::Local context); +v8::Maybe InitializePrimordials(v8::Local context); class NodeArrayBufferAllocator : public ArrayBufferAllocator { public: diff --git a/src/node_messaging.cc b/src/node_messaging.cc index 7422757fcb21bf..a27faf92c172c7 100644 --- a/src/node_messaging.cc +++ b/src/node_messaging.cc @@ -25,6 +25,7 @@ using v8::Global; using v8::HandleScope; using v8::Isolate; using v8::Just; +using v8::JustVoid; using v8::Local; using v8::Maybe; using v8::MaybeLocal; @@ -337,7 +338,11 @@ class SerializerDelegate : public ValueSerializer::Delegate { // methods like toString(). It's probably confusing if that gets lost // in transmission. Local normal_object = Object::New(isolate); - env_->env_vars()->AssignToObject(isolate, env_->context(), normal_object); + if (env_->env_vars() + ->AssignToObject(isolate, env_->context(), normal_object) + .IsNothing()) { + return Nothing(); + } serializer->WriteUint32(kNormalObject); // Instead of a BaseObject. return serializer->WriteValue(env_->context(), normal_object); } @@ -797,7 +802,7 @@ void MessagePort::OnMessage(MessageProcessingMode mode) { // The data_ could be freed or, the handle has been/is being closed. // A possible case for this, is transfer the MessagePort to another // context, it will call the constructor and trigger the async handle empty. - // Because all data was sent from the preivous context. + // Because all data was sent from the previous context. if (IsDetached()) return; HandleScope handle_scope(env()->isolate()); @@ -1389,25 +1394,25 @@ JSTransferable::NestedTransferables() const { return Just(ret); } -Maybe JSTransferable::FinalizeTransferRead( +Maybe JSTransferable::FinalizeTransferRead( Local context, ValueDeserializer* deserializer) { // Call `this[kDeserialize](data)` where `data` comes from the return value // of `this[kTransfer]()` or `this[kClone]()`. HandleScope handle_scope(env()->isolate()); Local data; - if (!deserializer->ReadValue(context).ToLocal(&data)) return Nothing(); + if (!deserializer->ReadValue(context).ToLocal(&data)) return Nothing(); Local method_name = env()->messaging_deserialize_symbol(); Local method; if (!target()->Get(context, method_name).ToLocal(&method)) { - return Nothing(); + return Nothing(); } - if (!method->IsFunction()) return Just(true); + if (!method->IsFunction()) return JustVoid(); if (method.As()->Call(context, target(), 1, &data).IsEmpty()) { - return Nothing(); + return Nothing(); } - return Just(true); + return JustVoid(); } JSTransferable::Data::Data(std::string&& deserialize_info, diff --git a/src/node_messaging.h b/src/node_messaging.h index 6dd34b8bd5a407..169ff0ba19e909 100644 --- a/src/node_messaging.h +++ b/src/node_messaging.h @@ -340,7 +340,7 @@ class JSTransferable : public BaseObject { std::unique_ptr CloneForMessaging() const override; v8::Maybe>> NestedTransferables() const override; - v8::Maybe FinalizeTransferRead( + v8::Maybe FinalizeTransferRead( v8::Local context, v8::ValueDeserializer* deserializer) override; diff --git a/src/node_modules.cc b/src/node_modules.cc index 3bedd2dfecb49c..dfd115a9eccc6b 100644 --- a/src/node_modules.cc +++ b/src/node_modules.cc @@ -289,11 +289,11 @@ const BindingData::PackageConfig* BindingData::TraverseParent( // Stop the search when the process doesn't have permissions // to walk upwards - if (UNLIKELY(is_permissions_enabled && - !env->permission()->is_granted( - env, - permission::PermissionScope::kFileSystemRead, - current_path.generic_string()))) { + if (is_permissions_enabled && + !env->permission()->is_granted( + env, + permission::PermissionScope::kFileSystemRead, + current_path.generic_string())) [[unlikely]] { return nullptr; } @@ -322,14 +322,13 @@ void BindingData::GetNearestParentPackageJSON( BufferValue path_value(realm->isolate(), args[0]); // Check if the path has a trailing slash. If so, add it after // ToNamespacedPath() as it will be deleted by ToNamespacedPath() - bool slashCheck = path_value.ToStringView().ends_with( - std::filesystem::path::preferred_separator); + bool slashCheck = path_value.ToStringView().ends_with(kPathSeparator); ToNamespacedPath(realm->env(), &path_value); std::string path_value_str = path_value.ToString(); if (slashCheck) { - path_value_str.push_back(std::filesystem::path::preferred_separator); + path_value_str.push_back(kPathSeparator); } auto package_json = @@ -349,14 +348,13 @@ void BindingData::GetNearestParentPackageJSONType( BufferValue path_value(realm->isolate(), args[0]); // Check if the path has a trailing slash. If so, add it after // ToNamespacedPath() as it will be deleted by ToNamespacedPath() - bool slashCheck = path_value.ToStringView().ends_with( - std::filesystem::path::preferred_separator); + bool slashCheck = path_value.ToStringView().ends_with(kPathSeparator); ToNamespacedPath(realm->env(), &path_value); std::string path_value_str = path_value.ToString(); if (slashCheck) { - path_value_str.push_back(std::filesystem::path::preferred_separator); + path_value_str.push_back(kPathSeparator); } auto package_json = @@ -435,11 +433,33 @@ void BindingData::GetPackageScopeConfig( .ToLocalChecked()); } +void FlushCompileCache(const FunctionCallbackInfo& args) { + Isolate* isolate = args.GetIsolate(); + Local context = isolate->GetCurrentContext(); + Environment* env = Environment::GetCurrent(context); + + if (!args[0]->IsBoolean() && !args[0]->IsUndefined()) { + THROW_ERR_INVALID_ARG_TYPE(env, + "keepDeserializedCache should be a boolean"); + return; + } + Debug(env, + DebugCategory::COMPILE_CACHE, + "[compile cache] module.flushCompileCache() requested.\n"); + env->FlushCompileCache(); + Debug(env, + DebugCategory::COMPILE_CACHE, + "[compile cache] module.flushCompileCache() finished.\n"); +} + void EnableCompileCache(const FunctionCallbackInfo& args) { - CHECK(args[0]->IsString()); Isolate* isolate = args.GetIsolate(); Local context = isolate->GetCurrentContext(); Environment* env = Environment::GetCurrent(context); + if (!args[0]->IsString()) { + THROW_ERR_INVALID_ARG_TYPE(env, "cacheDir should be a string"); + return; + } Utf8Value value(isolate, args[0]); CompileCacheEnableResult result = env->EnableCompileCache(*value); std::vector> values = { @@ -477,6 +497,7 @@ void BindingData::CreatePerIsolateProperties(IsolateData* isolate_data, SetMethod(isolate, target, "getPackageScopeConfig", GetPackageScopeConfig); SetMethod(isolate, target, "enableCompileCache", EnableCompileCache); SetMethod(isolate, target, "getCompileCacheDir", GetCompileCacheDir); + SetMethod(isolate, target, "flushCompileCache", FlushCompileCache); } void BindingData::CreatePerContextProperties(Local target, @@ -509,6 +530,7 @@ void BindingData::RegisterExternalReferences( registry->Register(GetPackageScopeConfig); registry->Register(EnableCompileCache); registry->Register(GetCompileCacheDir); + registry->Register(FlushCompileCache); } } // namespace modules diff --git a/src/node_options-inl.h b/src/node_options-inl.h index fabf3be149be93..24954e0b583834 100644 --- a/src/node_options-inl.h +++ b/src/node_options-inl.h @@ -447,9 +447,14 @@ void OptionsParser::Parse( case kBoolean: *Lookup(info.field, options) = !is_negation; break; - case kInteger: + case kInteger: { + // Special case to pass --stack-trace-limit down to V8. + if (name == "--stack-trace-limit") { + v8_args->push_back(arg); + } *Lookup(info.field, options) = std::atoll(value.c_str()); break; + } case kUInteger: *Lookup(info.field, options) = std::strtoull(value.c_str(), nullptr, 10); diff --git a/src/node_options.cc b/src/node_options.cc index cfc599ec9a6197..d3b59690e917af 100644 --- a/src/node_options.cc +++ b/src/node_options.cc @@ -406,6 +406,10 @@ EnvironmentOptionsParser::EnvironmentOptionsParser() { "Source Map V3 support for stack traces", &EnvironmentOptions::enable_source_maps, kAllowedInEnvvar); + AddOption("--entry-url", + "Treat the entrypoint as a URL", + &EnvironmentOptions::entry_is_url, + kAllowedInEnvvar); AddOption("--experimental-abortcontroller", "", NoOp{}, kAllowedInEnvvar); AddOption("--experimental-eventsource", "experimental EventSource API", @@ -917,7 +921,10 @@ PerIsolateOptionsParser::PerIsolateOptionsParser( "--perf-basic-prof-only-functions", "", V8Option{}, kAllowedInEnvvar); AddOption("--perf-prof", "", V8Option{}, kAllowedInEnvvar); AddOption("--perf-prof-unwinding-info", "", V8Option{}, kAllowedInEnvvar); - AddOption("--stack-trace-limit", "", V8Option{}, kAllowedInEnvvar); + AddOption("--stack-trace-limit", + "", + &PerIsolateOptions::stack_trace_limit, + kAllowedInEnvvar); AddOption("--disallow-code-generation-from-strings", "disallow eval and friends", V8Option{}, @@ -1183,7 +1190,7 @@ inline uint16_t ParseAndValidatePort(const std::string_view port, if (r.ec == std::errc::result_out_of_range || (result != 0 && result < 1024)) { - errors->push_back(" must be 0 or in range 1024 to 65535."); + errors->push_back("must be 0 or in range 1024 to 65535."); } return result; @@ -1309,6 +1316,11 @@ void GetCLIOptionsValues(const FunctionCallbackInfo& args) { if (item.first == "--abort-on-uncaught-exception") { value = Boolean::New(isolate, s.original_per_env->abort_on_uncaught_exception); + } else if (item.first == "--stack-trace-limit") { + value = + Number::New(isolate, + static_cast( + *_ppop_instance.Lookup(field, opts))); } else { value = undefined_value; } diff --git a/src/node_options.h b/src/node_options.h index 9e656a2815045a..fc7f898a6b9b60 100644 --- a/src/node_options.h +++ b/src/node_options.h @@ -133,6 +133,7 @@ class EnvironmentOptions : public Options { bool experimental_import_meta_resolve = false; std::string input_type; // Value of --input-type std::string type; // Value of --experimental-default-type + bool entry_is_url = false; bool experimental_permission = false; std::vector allow_fs_read; std::vector allow_fs_write; @@ -272,6 +273,7 @@ class PerIsolateOptions : public Options { bool report_uncaught_exception = false; bool report_on_signal = false; bool experimental_shadow_realm = false; + int64_t stack_trace_limit = 10; std::string report_signal = "SIGUSR2"; bool build_snapshot = false; std::string build_snapshot_config; diff --git a/src/node_perf.cc b/src/node_perf.cc index a22eaf1a6f23c5..a868b0e82e0813 100644 --- a/src/node_perf.cc +++ b/src/node_perf.cc @@ -190,8 +190,9 @@ void MarkGarbageCollectionEnd( } env->performance_state()->current_gc_type = 0; // If no one is listening to gc performance entries, do not create them. - if (LIKELY(!state->observers[NODE_PERFORMANCE_ENTRY_TYPE_GC])) + if (!state->observers[NODE_PERFORMANCE_ENTRY_TYPE_GC]) [[likely]] { return; + } double start_time = (state->performance_last_gc_start_mark - env->time_origin()) / diff --git a/src/node_process.h b/src/node_process.h index 142d0e63e18c46..d4f1c0d45dec50 100644 --- a/src/node_process.h +++ b/src/node_process.h @@ -72,7 +72,8 @@ class BindingData : public SnapshotableObject { static BindingData* FromV8Value(v8::Local receiver); static void NumberImpl(BindingData* receiver); - static void FastNumber(v8::Local receiver) { + static void FastNumber(v8::Local unused, + v8::Local receiver) { NumberImpl(FromV8Value(receiver)); } @@ -80,7 +81,8 @@ class BindingData : public SnapshotableObject { static void BigIntImpl(BindingData* receiver); - static void FastBigInt(v8::Local receiver) { + static void FastBigInt(v8::Local unused, + v8::Local receiver) { BigIntImpl(FromV8Value(receiver)); } diff --git a/src/node_realm-inl.h b/src/node_realm-inl.h index fe20ac2b2fea3a..c34b445ccb0bdd 100644 --- a/src/node_realm-inl.h +++ b/src/node_realm-inl.h @@ -9,13 +9,17 @@ namespace node { inline Realm* Realm::GetCurrent(v8::Isolate* isolate) { - if (UNLIKELY(!isolate->InContext())) return nullptr; + if (!isolate->InContext()) [[unlikely]] { + return nullptr; + } v8::HandleScope handle_scope(isolate); return GetCurrent(isolate->GetCurrentContext()); } inline Realm* Realm::GetCurrent(v8::Local context) { - if (UNLIKELY(!ContextEmbedderTag::IsNodeContext(context))) return nullptr; + if (!ContextEmbedderTag::IsNodeContext(context)) [[unlikely]] { + return nullptr; + } return static_cast( context->GetAlignedPointerFromEmbedderData(ContextEmbedderIndex::kRealm)); } @@ -75,7 +79,9 @@ inline T* Realm::GetBindingData() { constexpr size_t binding_index = static_cast(T::binding_type_int); static_assert(binding_index < std::tuple_size_v); auto ptr = binding_data_store_[binding_index]; - if (UNLIKELY(!ptr)) return nullptr; + if (!ptr) [[unlikely]] { + return nullptr; + } T* result = static_cast(ptr.get()); DCHECK_NOT_NULL(result); return result; diff --git a/src/node_report.cc b/src/node_report.cc index 69b8646d40f3f0..cffb9dd1b655af 100644 --- a/src/node_report.cc +++ b/src/node_report.cc @@ -21,7 +21,6 @@ #include #include #include -#include #include constexpr int NODE_REPORT_VERSION = 3; @@ -887,9 +886,8 @@ std::string TriggerNodeReport(Isolate* isolate, report_directory = per_process::cli_options->report_directory; } // Regular file. Append filename to directory path if one was specified - if (!report_directory.empty()) { - std::string pathname = - (std::filesystem::path(report_directory) / filename).string(); + if (report_directory.length() > 0) { + std::string pathname = report_directory + kPathSeparator + filename; outfile.open(pathname, std::ios::out | std::ios::binary); } else { outfile.open(filename, std::ios::out | std::ios::binary); diff --git a/src/node_sqlite.cc b/src/node_sqlite.cc index fbef321dcab16c..df6bb45ad60e7d 100644 --- a/src/node_sqlite.cc +++ b/src/node_sqlite.cc @@ -18,8 +18,11 @@ using v8::Array; using v8::ArrayBuffer; using v8::BigInt; using v8::Boolean; +using v8::ConstructorBehavior; using v8::Context; +using v8::DontDelete; using v8::Exception; +using v8::FunctionCallback; using v8::FunctionCallbackInfo; using v8::FunctionTemplate; using v8::Integer; @@ -31,6 +34,7 @@ using v8::Name; using v8::Null; using v8::Number; using v8::Object; +using v8::SideEffectType; using v8::String; using v8::Uint8Array; using v8::Value; @@ -91,12 +95,16 @@ inline void THROW_ERR_SQLITE_ERROR(Isolate* isolate, const char* message) { DatabaseSync::DatabaseSync(Environment* env, Local object, Local location, - bool open) + bool open, + bool enable_foreign_keys_on_open, + bool enable_dqs_on_open) : BaseObject(env, object) { MakeWeak(); node::Utf8Value utf8_location(env->isolate(), location); location_ = utf8_location.ToString(); connection_ = nullptr; + enable_foreign_keys_on_open_ = enable_foreign_keys_on_open; + enable_dqs_on_open_ = enable_dqs_on_open; if (open) { Open(); @@ -125,6 +133,26 @@ bool DatabaseSync::Open() { int flags = SQLITE_OPEN_READWRITE | SQLITE_OPEN_CREATE; int r = sqlite3_open_v2(location_.c_str(), &connection_, flags, nullptr); CHECK_ERROR_OR_THROW(env()->isolate(), connection_, r, SQLITE_OK, false); + + r = sqlite3_db_config(connection_, + SQLITE_DBCONFIG_DQS_DML, + static_cast(enable_dqs_on_open_), + nullptr); + CHECK_ERROR_OR_THROW(env()->isolate(), connection_, r, SQLITE_OK, false); + r = sqlite3_db_config(connection_, + SQLITE_DBCONFIG_DQS_DDL, + static_cast(enable_dqs_on_open_), + nullptr); + CHECK_ERROR_OR_THROW(env()->isolate(), connection_, r, SQLITE_OK, false); + + int foreign_keys_enabled; + r = sqlite3_db_config(connection_, + SQLITE_DBCONFIG_ENABLE_FKEY, + static_cast(enable_foreign_keys_on_open_), + &foreign_keys_enabled); + CHECK_ERROR_OR_THROW(env()->isolate(), connection_, r, SQLITE_OK, false); + CHECK_EQ(foreign_keys_enabled, enable_foreign_keys_on_open_); + return true; } @@ -166,6 +194,8 @@ void DatabaseSync::New(const FunctionCallbackInfo& args) { } bool open = true; + bool enable_foreign_keys = true; + bool enable_dqs = false; if (args.Length() > 1) { if (!args[1]->IsObject()) { @@ -188,9 +218,50 @@ void DatabaseSync::New(const FunctionCallbackInfo& args) { } open = open_v.As()->Value(); } + + Local enable_foreign_keys_string = + FIXED_ONE_BYTE_STRING(env->isolate(), "enableForeignKeyConstraints"); + Local enable_foreign_keys_v; + if (!options->Get(env->context(), enable_foreign_keys_string) + .ToLocal(&enable_foreign_keys_v)) { + return; + } + if (!enable_foreign_keys_v->IsUndefined()) { + if (!enable_foreign_keys_v->IsBoolean()) { + node::THROW_ERR_INVALID_ARG_TYPE( + env->isolate(), + "The \"options.enableForeignKeyConstraints\" argument must be a " + "boolean."); + return; + } + enable_foreign_keys = enable_foreign_keys_v.As()->Value(); + } + + Local enable_dqs_string = FIXED_ONE_BYTE_STRING( + env->isolate(), "enableDoubleQuotedStringLiterals"); + Local enable_dqs_v; + if (!options->Get(env->context(), enable_dqs_string) + .ToLocal(&enable_dqs_v)) { + return; + } + if (!enable_dqs_v->IsUndefined()) { + if (!enable_dqs_v->IsBoolean()) { + node::THROW_ERR_INVALID_ARG_TYPE( + env->isolate(), + "The \"options.enableDoubleQuotedStringLiterals\" argument must be " + "a boolean."); + return; + } + enable_dqs = enable_dqs_v.As()->Value(); + } } - new DatabaseSync(env, args.This(), args[0].As(), open); + new DatabaseSync(env, + args.This(), + args[0].As(), + open, + enable_foreign_keys, + enable_dqs); } void DatabaseSync::Open(const FunctionCallbackInfo& args) { @@ -612,7 +683,7 @@ void StatementSync::Run(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(result); } -void StatementSync::SourceSQL(const FunctionCallbackInfo& args) { +void StatementSync::SourceSQLGetter(const FunctionCallbackInfo& args) { StatementSync* stmt; ASSIGN_OR_RETURN_UNWRAP(&stmt, args.This()); Environment* env = Environment::GetCurrent(args); @@ -626,7 +697,7 @@ void StatementSync::SourceSQL(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(sql); } -void StatementSync::ExpandedSQL(const FunctionCallbackInfo& args) { +void StatementSync::ExpandedSQLGetter(const FunctionCallbackInfo& args) { StatementSync* stmt; ASSIGN_OR_RETURN_UNWRAP(&stmt, args.This()); Environment* env = Environment::GetCurrent(args); @@ -686,6 +757,23 @@ void IllegalConstructor(const FunctionCallbackInfo& args) { node::THROW_ERR_ILLEGAL_CONSTRUCTOR(Environment::GetCurrent(args)); } +static inline void SetSideEffectFreeGetter( + Isolate* isolate, + Local class_template, + Local name, + FunctionCallback fn) { + Local getter = + FunctionTemplate::New(isolate, + fn, + Local(), + v8::Signature::New(isolate, class_template), + /* length */ 0, + ConstructorBehavior::kThrow, + SideEffectType::kHasNoSideEffect); + class_template->InstanceTemplate()->SetAccessorProperty( + name, getter, Local(), DontDelete); +} + Local StatementSync::GetConstructorTemplate( Environment* env) { Local tmpl = @@ -699,8 +787,14 @@ Local StatementSync::GetConstructorTemplate( SetProtoMethod(isolate, tmpl, "all", StatementSync::All); SetProtoMethod(isolate, tmpl, "get", StatementSync::Get); SetProtoMethod(isolate, tmpl, "run", StatementSync::Run); - SetProtoMethod(isolate, tmpl, "sourceSQL", StatementSync::SourceSQL); - SetProtoMethod(isolate, tmpl, "expandedSQL", StatementSync::ExpandedSQL); + SetSideEffectFreeGetter(isolate, + tmpl, + FIXED_ONE_BYTE_STRING(isolate, "sourceSQL"), + StatementSync::SourceSQLGetter); + SetSideEffectFreeGetter(isolate, + tmpl, + FIXED_ONE_BYTE_STRING(isolate, "expandedSQL"), + StatementSync::ExpandedSQLGetter); SetProtoMethod(isolate, tmpl, "setAllowBareNamedParameters", diff --git a/src/node_sqlite.h b/src/node_sqlite.h index f62b2e991cb95a..40abf7b0d3edc3 100644 --- a/src/node_sqlite.h +++ b/src/node_sqlite.h @@ -21,7 +21,9 @@ class DatabaseSync : public BaseObject { DatabaseSync(Environment* env, v8::Local object, v8::Local location, - bool open); + bool open, + bool enable_foreign_keys_on_open, + bool enable_dqs_on_open); void MemoryInfo(MemoryTracker* tracker) const override; static void New(const v8::FunctionCallbackInfo& args); static void Open(const v8::FunctionCallbackInfo& args); @@ -43,6 +45,8 @@ class DatabaseSync : public BaseObject { std::string location_; sqlite3* connection_; std::unordered_set statements_; + bool enable_foreign_keys_on_open_; + bool enable_dqs_on_open_; }; class StatementSync : public BaseObject { @@ -60,8 +64,9 @@ class StatementSync : public BaseObject { static void All(const v8::FunctionCallbackInfo& args); static void Get(const v8::FunctionCallbackInfo& args); static void Run(const v8::FunctionCallbackInfo& args); - static void SourceSQL(const v8::FunctionCallbackInfo& args); - static void ExpandedSQL(const v8::FunctionCallbackInfo& args); + static void SourceSQLGetter(const v8::FunctionCallbackInfo& args); + static void ExpandedSQLGetter( + const v8::FunctionCallbackInfo& args); static void SetAllowBareNamedParameters( const v8::FunctionCallbackInfo& args); static void SetReadBigInts(const v8::FunctionCallbackInfo& args); diff --git a/src/node_task_runner.cc b/src/node_task_runner.cc index 08138e326691f1..b4814907742532 100644 --- a/src/node_task_runner.cc +++ b/src/node_task_runner.cc @@ -69,7 +69,7 @@ ProcessRunner::ProcessRunner(std::shared_ptr result, command_args_ = { options_.file, "/d", "/s", "/c", "\"" + command_str + "\""}; } else { - // If the file is not cmd.exe, and it is unclear wich shell is being used, + // If the file is not cmd.exe, and it is unclear which shell is being used, // so assume -c is the correct syntax (Unix-like shells use -c for this // purpose). command_args_ = {options_.file, "-c", command_str}; @@ -149,7 +149,7 @@ std::string EscapeShell(const std::string_view input) { #endif } - static const std::string_view forbidden_characters = + static constexpr std::string_view forbidden_characters = "[\t\n\r \"#$&'()*;<>?\\\\`|~]"; // Check if input contains any forbidden characters @@ -191,7 +191,7 @@ std::string EscapeShell(const std::string_view input) { void ProcessRunner::ExitCallback(uv_process_t* handle, int64_t exit_status, int term_signal) { - auto self = reinterpret_cast(handle->data); + const auto self = static_cast(handle->data); uv_close(reinterpret_cast(handle), nullptr); self->OnExit(exit_status, term_signal); } @@ -205,6 +205,9 @@ void ProcessRunner::OnExit(int64_t exit_status, int term_signal) { } void ProcessRunner::Run() { + // keeps the string alive until destructor + cwd = package_json_path_.parent_path().string(); + options_.cwd = cwd.c_str(); if (int r = uv_spawn(loop_, &process_, &options_)) { fprintf(stderr, "Error: %s\n", uv_strerror(r)); } @@ -246,14 +249,16 @@ FindPackageJson(const std::filesystem::path& cwd) { return {{package_json_path, raw_content, path_env_var}}; } -void RunTask(std::shared_ptr result, +void RunTask(const std::shared_ptr& result, std::string_view command_id, const std::vector& positional_args) { auto cwd = std::filesystem::current_path(); auto package_json = FindPackageJson(cwd); if (!package_json.has_value()) { - fprintf(stderr, "Can't read package.json\n"); + fprintf(stderr, + "Can't find package.json for directory %s\n", + cwd.string().c_str()); result->exit_code_ = ExitCode::kGenericUserError; return; } @@ -267,11 +272,21 @@ void RunTask(std::shared_ptr result, simdjson::ondemand::parser json_parser; simdjson::ondemand::document document; simdjson::ondemand::object main_object; - simdjson::error_code error = json_parser.iterate(raw_json).get(document); + if (json_parser.iterate(raw_json).get(document)) { + fprintf(stderr, "Can't parse %s\n", path.string().c_str()); + result->exit_code_ = ExitCode::kGenericUserError; + return; + } // If document is not an object, throw an error. - if (error || document.get_object().get(main_object)) { - fprintf(stderr, "Can't parse package.json\n"); + if (auto root_error = document.get_object().get(main_object)) { + if (root_error == simdjson::error_code::INCORRECT_TYPE) { + fprintf(stderr, + "Root value unexpected not an object for %s\n\n", + path.string().c_str()); + } else { + fprintf(stderr, "Can't parse %s\n", path.string().c_str()); + } result->exit_code_ = ExitCode::kGenericUserError; return; } @@ -279,34 +294,45 @@ void RunTask(std::shared_ptr result, // If package_json object doesn't have "scripts" field, throw an error. simdjson::ondemand::object scripts_object; if (main_object["scripts"].get_object().get(scripts_object)) { - fprintf(stderr, "Can't find \"scripts\" field in package.json\n"); + fprintf( + stderr, "Can't find \"scripts\" field in %s\n", path.string().c_str()); result->exit_code_ = ExitCode::kGenericUserError; return; } // If the command_id is not found in the scripts object, throw an error. std::string_view command; - if (scripts_object[command_id].get_string().get(command)) { - fprintf(stderr, - "Missing script: \"%.*s\"\n\n", - static_cast(command_id.size()), - command_id.data()); - fprintf(stderr, "Available scripts are:\n"); - - // Reset the object to iterate over it again - scripts_object.reset(); - simdjson::ondemand::value value; - for (auto field : scripts_object) { - std::string_view key_str; - std::string_view value_str; - if (!field.unescaped_key().get(key_str) && !field.value().get(value) && - !value.get_string().get(value_str)) { - fprintf(stderr, - " %.*s: %.*s\n", - static_cast(key_str.size()), - key_str.data(), - static_cast(value_str.size()), - value_str.data()); + if (auto command_error = + scripts_object[command_id].get_string().get(command)) { + if (command_error == simdjson::error_code::INCORRECT_TYPE) { + fprintf(stderr, + "Script \"%.*s\" is unexpectedly not a string for %s\n\n", + static_cast(command_id.size()), + command_id.data(), + path.string().c_str()); + } else { + fprintf(stderr, + "Missing script: \"%.*s\" for %s\n\n", + static_cast(command_id.size()), + command_id.data(), + path.string().c_str()); + fprintf(stderr, "Available scripts are:\n"); + + // Reset the object to iterate over it again + scripts_object.reset(); + simdjson::ondemand::value value; + for (auto field : scripts_object) { + std::string_view key_str; + std::string_view value_str; + if (!field.unescaped_key().get(key_str) && !field.value().get(value) && + !value.get_string().get(value_str)) { + fprintf(stderr, + " %.*s: %.*s\n", + static_cast(key_str.size()), + key_str.data(), + static_cast(value_str.size()), + value_str.data()); + } } } result->exit_code_ = ExitCode::kGenericUserError; diff --git a/src/node_task_runner.h b/src/node_task_runner.h index a498637f017dc9..ec8f2adf45aa4a 100644 --- a/src/node_task_runner.h +++ b/src/node_task_runner.h @@ -44,6 +44,7 @@ class ProcessRunner { std::vector env_vars_{}; std::unique_ptr env {}; // memory for options_.env std::unique_ptr arg {}; // memory for options_.args + std::string cwd; // OnExit is the callback function that is called when the process exits. void OnExit(int64_t exit_status, int term_signal); @@ -78,7 +79,7 @@ class ProcessRunner { std::optional> FindPackageJson(const std::filesystem::path& cwd); -void RunTask(std::shared_ptr result, +void RunTask(const std::shared_ptr& result, std::string_view command_id, const PositionalArgs& positional_args); PositionalArgs GetPositionalArgs(const std::vector& args); diff --git a/src/node_util.cc b/src/node_util.cc index ea16a472c27c67..c99e26752c7d93 100644 --- a/src/node_util.cc +++ b/src/node_util.cc @@ -23,6 +23,7 @@ using v8::Isolate; using v8::KeyCollectionMode; using v8::Local; using v8::LocalVector; +using v8::Name; using v8::Object; using v8::ObjectTemplate; using v8::ONLY_CONFIGURABLE; @@ -262,28 +263,32 @@ static void GetCallSite(const FunctionCallbackInfo& args) { // Frame 0 is node:util. It should be skipped. for (int i = 1; i < frame_count; ++i) { - Local obj = Object::New(isolate); Local stack_frame = stack->GetFrame(isolate, i); - Utf8Value function_name(isolate, stack_frame->GetFunctionName()); - Utf8Value script_name(isolate, stack_frame->GetScriptName()); - - obj->Set(env->context(), - env->function_name_string(), - String::NewFromUtf8(isolate, *function_name).ToLocalChecked()) - .Check(); - obj->Set(env->context(), - env->script_name_string(), - String::NewFromUtf8(isolate, *script_name).ToLocalChecked()) - .Check(); - obj->Set(env->context(), - env->line_number_string(), - Integer::NewFromUnsigned(isolate, stack_frame->GetLineNumber())) - .Check(); - obj->Set(env->context(), - env->column_string(), - Integer::NewFromUnsigned(isolate, stack_frame->GetColumn())) - .Check(); + Local function_name = stack_frame->GetFunctionName(); + if (function_name.IsEmpty()) { + function_name = v8::String::Empty(isolate); + } + + Local script_name = stack_frame->GetScriptName(); + if (script_name.IsEmpty()) { + script_name = v8::String::Empty(isolate); + } + + Local names[] = { + env->function_name_string(), + env->script_name_string(), + env->line_number_string(), + env->column_string(), + }; + Local values[] = { + function_name, + script_name, + Integer::NewFromUnsigned(isolate, stack_frame->GetLineNumber()), + Integer::NewFromUnsigned(isolate, stack_frame->GetColumn()), + }; + Local obj = Object::New( + isolate, v8::Null(isolate), names, values, arraysize(names)); callsite_objects.push_back(obj); } diff --git a/src/node_version.h b/src/node_version.h index 3d8e257ba2a154..c69cff04d061d3 100644 --- a/src/node_version.h +++ b/src/node_version.h @@ -23,13 +23,13 @@ #define SRC_NODE_VERSION_H_ #define NODE_MAJOR_VERSION 22 -#define NODE_MINOR_VERSION 9 -#define NODE_PATCH_VERSION 1 +#define NODE_MINOR_VERSION 10 +#define NODE_PATCH_VERSION 0 #define NODE_VERSION_IS_LTS 0 #define NODE_VERSION_LTS_CODENAME "" -#define NODE_VERSION_IS_RELEASE 0 +#define NODE_VERSION_IS_RELEASE 1 #ifndef NODE_STRINGIFY #define NODE_STRINGIFY(n) NODE_STRINGIFY_HELPER(n) diff --git a/src/node_wasi.cc b/src/node_wasi.cc index ad1da44a01f437..468c2e59903fef 100644 --- a/src/node_wasi.cc +++ b/src/node_wasi.cc @@ -241,20 +241,23 @@ inline void EinvalError() {} template R WASI::WasiFunction::FastCallback( + Local unused, Local receiver, Args... args, // NOLINTNEXTLINE(runtime/references) This is V8 api. FastApiCallbackOptions& options) { WASI* wasi = reinterpret_cast(BaseObject::FromJSObject(receiver)); - if (UNLIKELY(wasi == nullptr)) return EinvalError(); + if (wasi == nullptr) [[unlikely]] { + return EinvalError(); + } - if (UNLIKELY(options.wasm_memory == nullptr || wasi->memory_.IsEmpty())) { + if (options.wasm_memory == nullptr || wasi->memory_.IsEmpty()) [[unlikely]] { // fallback to slow path which to throw an error about missing memory. options.fallback = true; return EinvalError(); } uint8_t* memory = nullptr; - CHECK(LIKELY(options.wasm_memory->getStorageIfAligned(&memory))); + CHECK(options.wasm_memory->getStorageIfAligned(&memory)); return F(*wasi, {reinterpret_cast(memory), options.wasm_memory->length()}, diff --git a/src/node_wasi.h b/src/node_wasi.h index 25551936e6be36..ef7d2e83b6728a 100644 --- a/src/node_wasi.h +++ b/src/node_wasi.h @@ -160,7 +160,8 @@ class WASI : public BaseObject, v8::Local); private: - static R FastCallback(v8::Local receiver, + static R FastCallback(v8::Local unused, + v8::Local receiver, Args..., v8::FastApiCallbackOptions&); diff --git a/src/node_wasm_web_api.cc b/src/node_wasm_web_api.cc index ec66df951f7fb5..6e66b3d5fbf08a 100644 --- a/src/node_wasm_web_api.cc +++ b/src/node_wasm_web_api.cc @@ -104,12 +104,12 @@ void WasmStreamingObject::Push(const FunctionCallbackInfo& args) { size_t offset; size_t size; - if (LIKELY(chunk->IsArrayBufferView())) { + if (chunk->IsArrayBufferView()) [[likely]] { Local view = chunk.As(); bytes = view->Buffer()->Data(); offset = view->ByteOffset(); size = view->ByteLength(); - } else if (LIKELY(chunk->IsArrayBuffer())) { + } else if (chunk->IsArrayBuffer()) [[likely]] { Local buffer = chunk.As(); bytes = buffer->Data(); offset = 0; diff --git a/src/node_webstorage.cc b/src/node_webstorage.cc index 15d142e3d8f6e6..b27e173b53ec83 100644 --- a/src/node_webstorage.cc +++ b/src/node_webstorage.cc @@ -24,12 +24,16 @@ using v8::IndexedPropertyHandlerConfiguration; using v8::Integer; using v8::Intercepted; using v8::Isolate; +using v8::JustVoid; using v8::Local; using v8::Map; using v8::Maybe; using v8::MaybeLocal; using v8::Name; using v8::NamedPropertyHandlerConfiguration; +using v8::NewStringType; +using v8::Nothing; +using v8::Null; using v8::Object; using v8::PropertyAttribute; using v8::PropertyCallbackInfo; @@ -37,6 +41,7 @@ using v8::PropertyDescriptor; using v8::PropertyHandlerFlags; using v8::String; using v8::Uint32; +using v8::Undefined; using v8::Value; #define THROW_SQLITE_ERROR(env, r) \ @@ -96,7 +101,7 @@ void Storage::MemoryInfo(MemoryTracker* tracker) const { tracker->TrackField("location", location_); } -bool Storage::Open() { +Maybe Storage::Open() { static const int kCurrentSchemaVersion = 1; static constexpr std::string_view get_schema_version_sql = "SELECT schema_version FROM nodejs_webstorage_state"; @@ -161,22 +166,23 @@ bool Storage::Open() { sqlite3* db = db_.get(); if (db != nullptr) { - return true; + return JustVoid(); } int r = sqlite3_open(location_.c_str(), &db); - CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, false); + CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, Nothing()); r = sqlite3_exec(db, init_sql_v0.data(), 0, 0, nullptr); - CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, false); + CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, Nothing()); // Get the current schema version, used to determine schema migrations. sqlite3_stmt* s = nullptr; r = sqlite3_prepare_v2( db, get_schema_version_sql.data(), get_schema_version_sql.size(), &s, 0); r = sqlite3_exec(db, init_sql_v0.data(), 0, 0, nullptr); - CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, false); + CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, Nothing()); auto stmt = stmt_unique_ptr(s); - CHECK_ERROR_OR_THROW(env(), sqlite3_step(stmt.get()), SQLITE_ROW, false); + CHECK_ERROR_OR_THROW( + env(), sqlite3_step(stmt.get()), SQLITE_ROW, Nothing()); CHECK(sqlite3_column_type(stmt.get(), 0) == SQLITE_INTEGER); int schema_version = sqlite3_column_int(stmt.get(), 0); stmt = nullptr; // Force finalization. @@ -184,7 +190,7 @@ bool Storage::Open() { if (schema_version > kCurrentSchemaVersion) { THROW_ERR_INVALID_STATE( env(), "localStorage was created with a newer version of Node.js"); - return false; + return Nothing(); } if (schema_version < kCurrentSchemaVersion) { @@ -193,11 +199,11 @@ bool Storage::Open() { "UPDATE nodejs_webstorage_state SET schema_version = " + std::to_string(kCurrentSchemaVersion) + ";"; r = sqlite3_exec(db, set_user_version_sql.c_str(), 0, 0, nullptr); - CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, false); + CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, Nothing()); } db_ = conn_unique_ptr(db); - return true; + return JustVoid(); } void Storage::New(const FunctionCallbackInfo& args) { @@ -222,9 +228,9 @@ void Storage::New(const FunctionCallbackInfo& args) { new Storage(env, args.This(), location.ToStringView()); } -void Storage::Clear() { - if (!Open()) { - return; +Maybe Storage::Clear() { + if (!Open().IsJust()) { + return Nothing(); } static constexpr std::string_view sql = "DELETE FROM nodejs_webstorage"; @@ -233,13 +239,15 @@ void Storage::Clear() { env(), sqlite3_prepare_v2(db_.get(), sql.data(), sql.size(), &s, 0), SQLITE_OK, - void()); + Nothing()); auto stmt = stmt_unique_ptr(s); - CHECK_ERROR_OR_THROW(env(), sqlite3_step(stmt.get()), SQLITE_DONE, void()); + CHECK_ERROR_OR_THROW( + env(), sqlite3_step(stmt.get()), SQLITE_DONE, Nothing()); + return JustVoid(); } -Local Storage::Enumerate() { - if (!Open()) { +MaybeLocal Storage::Enumerate() { + if (!Open().IsJust()) { return Local(); } @@ -256,7 +264,7 @@ Local Storage::Enumerate() { if (!String::NewFromTwoByte(env()->isolate(), reinterpret_cast( sqlite3_column_blob(stmt.get(), 0)), - v8::NewStringType::kNormal, + NewStringType::kNormal, size) .ToLocal(&value)) { return Local(); @@ -267,8 +275,8 @@ Local Storage::Enumerate() { return Array::New(env()->isolate(), values.data(), values.size()); } -Local Storage::Length() { - if (!Open()) { +MaybeLocal Storage::Length() { + if (!Open().IsJust()) { return {}; } @@ -285,14 +293,13 @@ Local Storage::Length() { return Integer::New(env()->isolate(), result); } -Local Storage::Load(Local key) { +MaybeLocal Storage::Load(Local key) { if (key->IsSymbol()) { auto symbol_map = symbols_.Get(env()->isolate()); - MaybeLocal result = symbol_map->Get(env()->context(), key); - return result.FromMaybe(Local()); + return symbol_map->Get(env()->context(), key); } - if (!Open()) { + if (!Open().IsJust()) { return {}; } @@ -306,28 +313,26 @@ Local Storage::Load(Local key) { auto key_size = utf16key.length() * sizeof(uint16_t); r = sqlite3_bind_blob(stmt.get(), 1, utf16key.out(), key_size, SQLITE_STATIC); CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, Local()); - auto value = Local(); r = sqlite3_step(stmt.get()); if (r == SQLITE_ROW) { CHECK(sqlite3_column_type(stmt.get(), 0) == SQLITE_BLOB); auto size = sqlite3_column_bytes(stmt.get(), 0) / sizeof(uint16_t); - if (!String::NewFromTwoByte(env()->isolate(), - reinterpret_cast( - sqlite3_column_blob(stmt.get(), 0)), - v8::NewStringType::kNormal, - size) - .ToLocal(&value)) { - return {}; - } + return String::NewFromTwoByte(env()->isolate(), + reinterpret_cast( + sqlite3_column_blob(stmt.get(), 0)), + NewStringType::kNormal, + size) + .As(); } else if (r != SQLITE_DONE) { THROW_SQLITE_ERROR(env(), r); + return {}; + } else { + return Null(env()->isolate()); } - - return value; } -Local Storage::LoadKey(const int index) { - if (!Open()) { +MaybeLocal Storage::LoadKey(const int index) { + if (!Open().IsJust()) { return {}; } @@ -340,65 +345,64 @@ Local Storage::LoadKey(const int index) { r = sqlite3_bind_int(stmt.get(), 1, index); CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, Local()); - auto value = Local(); r = sqlite3_step(stmt.get()); if (r == SQLITE_ROW) { CHECK(sqlite3_column_type(stmt.get(), 0) == SQLITE_BLOB); auto size = sqlite3_column_bytes(stmt.get(), 0) / sizeof(uint16_t); - if (!String::NewFromTwoByte(env()->isolate(), - reinterpret_cast( - sqlite3_column_blob(stmt.get(), 0)), - v8::NewStringType::kNormal, - size) - .ToLocal(&value)) { - return {}; - } + return String::NewFromTwoByte(env()->isolate(), + reinterpret_cast( + sqlite3_column_blob(stmt.get(), 0)), + NewStringType::kNormal, + size) + .As(); } else if (r != SQLITE_DONE) { THROW_SQLITE_ERROR(env(), r); + return {}; + } else { + return Null(env()->isolate()); } - - return value; } -bool Storage::Remove(Local key) { +Maybe Storage::Remove(Local key) { if (key->IsSymbol()) { auto symbol_map = symbols_.Get(env()->isolate()); Maybe result = symbol_map->Delete(env()->context(), key); - return !result.IsNothing(); + return result.IsNothing() ? Nothing() : JustVoid(); } - if (!Open()) { - return false; + if (!Open().IsJust()) { + return Nothing(); } static constexpr std::string_view sql = "DELETE FROM nodejs_webstorage WHERE key = ?"; sqlite3_stmt* s = nullptr; int r = sqlite3_prepare_v2(db_.get(), sql.data(), sql.size(), &s, 0); - CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, false); + CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, Nothing()); auto stmt = stmt_unique_ptr(s); TwoByteValue utf16key(env()->isolate(), key); auto key_size = utf16key.length() * sizeof(uint16_t); r = sqlite3_bind_blob(stmt.get(), 1, utf16key.out(), key_size, SQLITE_STATIC); - CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, false); - CHECK_ERROR_OR_THROW(env(), sqlite3_step(stmt.get()), SQLITE_DONE, false); - return true; + CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, Nothing()); + CHECK_ERROR_OR_THROW( + env(), sqlite3_step(stmt.get()), SQLITE_DONE, Nothing()); + return JustVoid(); } -bool Storage::Store(Local key, Local value) { +Maybe Storage::Store(Local key, Local value) { if (key->IsSymbol()) { auto symbol_map = symbols_.Get(env()->isolate()); MaybeLocal result = symbol_map->Set(env()->context(), key, value); - return !result.IsEmpty(); + return result.IsEmpty() ? Nothing() : JustVoid(); } Local val; if (!value->ToString(env()->context()).ToLocal(&val)) { - return false; + return Nothing(); } - if (!Open()) { - return false; + if (!Open().IsJust()) { + return Nothing(); } static constexpr std::string_view sql = @@ -409,23 +413,23 @@ bool Storage::Store(Local key, Local value) { TwoByteValue utf16key(env()->isolate(), key); TwoByteValue utf16val(env()->isolate(), val); int r = sqlite3_prepare_v2(db_.get(), sql.data(), sql.size(), &s, 0); - CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, false); + CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, Nothing()); auto stmt = stmt_unique_ptr(s); auto key_size = utf16key.length() * sizeof(uint16_t); r = sqlite3_bind_blob(stmt.get(), 1, utf16key.out(), key_size, SQLITE_STATIC); - CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, false); + CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, Nothing()); auto val_size = utf16val.length() * sizeof(uint16_t); r = sqlite3_bind_blob(stmt.get(), 2, utf16val.out(), val_size, SQLITE_STATIC); - CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, false); + CHECK_ERROR_OR_THROW(env(), r, SQLITE_OK, Nothing()); r = sqlite3_step(stmt.get()); if (r == SQLITE_CONSTRAINT) { ThrowQuotaExceededException(env()->context()); - return false; + return Nothing(); } - CHECK_ERROR_OR_THROW(env(), r, SQLITE_DONE, false); - return true; + CHECK_ERROR_OR_THROW(env(), r, SQLITE_DONE, Nothing()); + return JustVoid(); } static MaybeLocal Uint32ToName(Local context, uint32_t index) { @@ -453,12 +457,11 @@ static void GetItem(const FunctionCallbackInfo& info) { return; } - Local result = storage->Load(prop); - if (result.IsEmpty()) { - info.GetReturnValue().SetNull(); - } else { - info.GetReturnValue().Set(result); + Local result; + if (!storage->Load(prop).ToLocal(&result)) { + return; } + info.GetReturnValue().Set(result); } static void Key(const FunctionCallbackInfo& info) { @@ -481,10 +484,8 @@ static void Key(const FunctionCallbackInfo& info) { return; } - Local result = storage->LoadKey(index); - if (result.IsEmpty()) { - info.GetReturnValue().SetNull(); - } else { + Local result; + if (storage->LoadKey(index).ToLocal(&result)) { info.GetReturnValue().Set(result); } } @@ -555,12 +556,12 @@ static Intercepted StorageGetter(Local property, Storage* storage; ASSIGN_OR_RETURN_UNWRAP(&storage, info.This(), Intercepted::kNo); - Local result = storage->Load(property); + Local result; - if (result.IsEmpty()) { - info.GetReturnValue().SetUndefined(); - } else { + if (storage->Load(property).ToLocal(&result) && !result->IsNull()) { info.GetReturnValue().Set(result); + } else { + info.GetReturnValue().Set(Undefined(info.GetIsolate())); } return Intercepted::kYes; @@ -572,7 +573,7 @@ static Intercepted StorageSetter(Local property, Storage* storage; ASSIGN_OR_RETURN_UNWRAP(&storage, info.This(), Intercepted::kNo); - if (storage->Store(property, value)) { + if (storage->Store(property, value).IsJust()) { info.GetReturnValue().Set(value); } @@ -587,8 +588,8 @@ static Intercepted StorageQuery(Local property, Storage* storage; ASSIGN_OR_RETURN_UNWRAP(&storage, info.This(), Intercepted::kNo); - Local result = storage->Load(property); - if (result.IsEmpty()) { + Local result; + if (!storage->Load(property).ToLocal(&result) || result->IsNull()) { return Intercepted::kNo; } @@ -601,9 +602,7 @@ static Intercepted StorageDeleter(Local property, Storage* storage; ASSIGN_OR_RETURN_UNWRAP(&storage, info.This(), Intercepted::kNo); - if (storage->Remove(property)) { - info.GetReturnValue().Set(true); - } + info.GetReturnValue().Set(storage->Remove(property).IsJust()); return Intercepted::kYes; } @@ -611,7 +610,11 @@ static Intercepted StorageDeleter(Local property, static void StorageEnumerator(const PropertyCallbackInfo& info) { Storage* storage; ASSIGN_OR_RETURN_UNWRAP(&storage, info.This()); - info.GetReturnValue().Set(storage->Enumerate()); + Local result; + if (!storage->Enumerate().ToLocal(&result)) { + return; + } + info.GetReturnValue().Set(result); } static Intercepted StorageDefiner(Local property, @@ -697,7 +700,11 @@ static Intercepted IndexedDefiner(uint32_t index, static void StorageLengthGetter(const FunctionCallbackInfo& info) { Storage* storage; ASSIGN_OR_RETURN_UNWRAP(&storage, info.This()); - info.GetReturnValue().Set(storage->Length()); + Local result; + if (!storage->Length().ToLocal(&result)) { + return; + } + info.GetReturnValue().Set(result); } static void Initialize(Local target, diff --git a/src/node_webstorage.h b/src/node_webstorage.h index 2c0c3ab688cae5..c2548d32e993fd 100644 --- a/src/node_webstorage.h +++ b/src/node_webstorage.h @@ -33,19 +33,19 @@ class Storage : public BaseObject { void MemoryInfo(MemoryTracker* tracker) const override; static void New(const v8::FunctionCallbackInfo& args); - void Clear(); - v8::Local Enumerate(); - v8::Local Length(); - v8::Local Load(v8::Local key); - v8::Local LoadKey(const int index); - bool Remove(v8::Local key); - bool Store(v8::Local key, v8::Local value); + v8::Maybe Clear(); + v8::MaybeLocal Enumerate(); + v8::MaybeLocal Length(); + v8::MaybeLocal Load(v8::Local key); + v8::MaybeLocal LoadKey(const int index); + v8::Maybe Remove(v8::Local key); + v8::Maybe Store(v8::Local key, v8::Local value); SET_MEMORY_INFO_NAME(Storage) SET_SELF_SIZE(Storage) private: - bool Open(); + v8::Maybe Open(); ~Storage() override; std::string location_; diff --git a/src/node_zlib.cc b/src/node_zlib.cc index 3ff3ac13112330..90307cd4984ae5 100644 --- a/src/node_zlib.cc +++ b/src/node_zlib.cc @@ -25,6 +25,7 @@ #include "async_wrap-inl.h" #include "env-inl.h" +#include "node_errors.h" #include "node_external_reference.h" #include "threadpoolwork-inl.h" #include "util-inl.h" @@ -271,6 +272,8 @@ class CompressionStream : public AsyncWrap, public ThreadPoolWork { CHECK_EQ(unreported_allocations_, 0); } + Environment* env() const { return this->ThreadPoolWork::env(); } + void Close() { if (write_in_progress_) { pending_close_ = true; @@ -493,7 +496,9 @@ class CompressionStream : public AsyncWrap, public ThreadPoolWork { size += sizeof(size_t); CompressionStream* ctx = static_cast(data); char* memory = UncheckedMalloc(size); - if (UNLIKELY(memory == nullptr)) return nullptr; + if (memory == nullptr) [[unlikely]] { + return nullptr; + } *reinterpret_cast(memory) = size; ctx->unreported_allocations_.fetch_add(size, std::memory_order_relaxed); @@ -501,7 +506,9 @@ class CompressionStream : public AsyncWrap, public ThreadPoolWork { } static void FreeForZlib(void* data, void* pointer) { - if (UNLIKELY(pointer == nullptr)) return; + if (pointer == nullptr) [[unlikely]] { + return; + } CompressionStream* ctx = static_cast(data); char* real_pointer = static_cast(pointer) - sizeof(size_t); size_t real_size = *reinterpret_cast(real_pointer); @@ -694,7 +701,11 @@ class BrotliCompressionStream final : static_cast*>(wrap)); if (err.IsError()) { wrap->EmitError(err); - args.GetReturnValue().Set(false); + // TODO(addaleax): Sometimes we generate better error codes in C++ land, + // e.g. ERR_BROTLI_PARAM_SET_FAILED -- it's hard to access them with + // the current bindings setup, though. + THROW_ERR_ZLIB_INITIALIZATION_FAILED(wrap->env(), + "Initialization failed"); return; } @@ -708,12 +719,11 @@ class BrotliCompressionStream final : err = wrap->context()->SetParams(i, data[i]); if (err.IsError()) { wrap->EmitError(err); - args.GetReturnValue().Set(false); + THROW_ERR_ZLIB_INITIALIZATION_FAILED(wrap->env(), + "Initialization failed"); return; } } - - args.GetReturnValue().Set(true); } static void Params(const FunctionCallbackInfo& args) { diff --git a/src/path.cc b/src/path.cc index fade21c8af9414..4068e1d892d6b7 100644 --- a/src/path.cc +++ b/src/path.cc @@ -7,11 +7,11 @@ namespace node { #ifdef _WIN32 -constexpr bool IsPathSeparator(char c) noexcept { +constexpr bool IsPathSeparator(const char c) noexcept { return c == '\\' || c == '/'; } #else // POSIX -constexpr bool IsPathSeparator(char c) noexcept { +constexpr bool IsPathSeparator(const char c) noexcept { return c == '/'; } #endif // _WIN32 diff --git a/src/path.h b/src/path.h index 79252dae4d2b87..2045e7b44a9bb1 100644 --- a/src/path.h +++ b/src/path.h @@ -10,9 +10,8 @@ namespace node { -class Environment; +constexpr bool IsPathSeparator(const char c) noexcept; -constexpr bool IsPathSeparator(char c) noexcept; std::string NormalizeString(const std::string_view path, bool allowAboveRoot, const std::string_view separator); diff --git a/src/permission/fs_permission.cc b/src/permission/fs_permission.cc index 8b79850d156feb..ba5a78df18217d 100644 --- a/src/permission/fs_permission.cc +++ b/src/permission/fs_permission.cc @@ -17,12 +17,20 @@ namespace { std::string WildcardIfDir(const std::string& res) noexcept { - auto path = std::filesystem::path(res); - auto file_status = std::filesystem::status(path); - if (file_status.type() == std::filesystem::file_type::directory) { - path /= "*"; + uv_fs_t req; + int rc = uv_fs_stat(nullptr, &req, res.c_str(), nullptr); + if (rc == 0) { + const uv_stat_t* const s = static_cast(req.ptr); + if ((s->st_mode & S_IFMT) == S_IFDIR) { + // add wildcard when directory + if (res.back() == node::kPathSeparator) { + return res + "*"; + } + return res + node::kPathSeparator + "*"; + } } - return path.string(); + uv_fs_req_cleanup(&req); + return res; } void FreeRecursivelyNode( @@ -219,8 +227,8 @@ void FSPermission::RadixTree::Insert(const std::string& path) { } } - if (UNLIKELY(per_process::enabled_debug_list.enabled( - DebugCategory::PERMISSION_MODEL))) { + if (per_process::enabled_debug_list.enabled(DebugCategory::PERMISSION_MODEL)) + [[unlikely]] { per_process::Debug(DebugCategory::PERMISSION_MODEL, "Inserting %s\n", path); PrintTree(root_node_); } diff --git a/src/permission/fs_permission.h b/src/permission/fs_permission.h index 313a1344c6fed5..22b29b017e2061 100644 --- a/src/permission/fs_permission.h +++ b/src/permission/fs_permission.h @@ -5,7 +5,6 @@ #include "v8.h" -#include #include #include "permission/permission_base.h" #include "util.h" @@ -107,7 +106,7 @@ class FSPermission final : public PermissionBase { // path = /home/subdirectory // child = subdirectory/* if (idx >= path.length() && - child->prefix[i] == std::filesystem::path::preferred_separator) { + child->prefix[i] == node::kPathSeparator) { continue; } diff --git a/src/permission/permission.h b/src/permission/permission.h index 55309b6ba551c2..8e5bb87803656f 100644 --- a/src/permission/permission.h +++ b/src/permission/permission.h @@ -28,7 +28,7 @@ namespace permission { #define THROW_IF_INSUFFICIENT_PERMISSIONS(env, perm_, resource_, ...) \ do { \ - if (UNLIKELY(!(env)->permission()->is_granted(env, perm_, resource_))) { \ + if (!env->permission()->is_granted(env, perm_, resource_)) [[unlikely]] { \ node::permission::Permission::ThrowAccessDenied( \ (env), perm_, resource_); \ return __VA_ARGS__; \ @@ -38,7 +38,7 @@ namespace permission { #define ASYNC_THROW_IF_INSUFFICIENT_PERMISSIONS( \ env, wrap, perm_, resource_, ...) \ do { \ - if (UNLIKELY(!(env)->permission()->is_granted(env, perm_, resource_))) { \ + if (!env->permission()->is_granted(env, perm_, resource_)) [[unlikely]] { \ node::permission::Permission::AsyncThrowAccessDenied( \ (env), wrap, perm_, resource_); \ return __VA_ARGS__; \ @@ -52,7 +52,9 @@ class Permission { FORCE_INLINE bool is_granted(Environment* env, const PermissionScope permission, const std::string_view& res = "") const { - if (LIKELY(!enabled_)) return true; + if (!enabled_) [[likely]] { + return true; + } return is_scope_granted(env, permission, res); } diff --git a/src/quic/application.cc b/src/quic/application.cc index c4f397fc837c3a..876290bbbbb2c1 100644 --- a/src/quic/application.cc +++ b/src/quic/application.cc @@ -480,7 +480,7 @@ class DefaultApplication final : public Session::Application { // the data until after we're sure it's written. }; - if (LIKELY(!stream->is_eos())) { + if (!stream->is_eos()) [[likely]] { int ret = stream->Pull(std::move(next), bob::Options::OPTIONS_SYNC, stream_data->data, diff --git a/src/quic/application.h b/src/quic/application.h index fc228fafe357e5..79b9941f62b2b4 100644 --- a/src/quic/application.h +++ b/src/quic/application.h @@ -10,8 +10,7 @@ #include "sessionticket.h" #include "streams.h" -namespace node { -namespace quic { +namespace node::quic { // An Application implements the ALPN-protocol specific semantics on behalf // of a QUIC Session. @@ -154,8 +153,7 @@ struct Session::Application::StreamData final { std::string ToString() const; }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/bindingdata.h b/src/quic/bindingdata.h index f5c19d1fbb8498..cbc8c9436de928 100644 --- a/src/quic/bindingdata.h +++ b/src/quic/bindingdata.h @@ -16,8 +16,7 @@ #include #include "defs.h" -namespace node { -namespace quic { +namespace node::quic { class Endpoint; class Packet; @@ -267,8 +266,7 @@ struct CallbackScope final : public CallbackScopeBase { explicit CallbackScope(T* ptr) : CallbackScopeBase(ptr->env()), ref(ptr) {} }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/cid.cc b/src/quic/cid.cc index f6eb9301a9215c..fdc636145210b2 100644 --- a/src/quic/cid.cc +++ b/src/quic/cid.cc @@ -8,8 +8,7 @@ #include "ncrypto.h" #include "quic/defs.h" -namespace node { -namespace quic { +namespace node::quic { // ============================================================================ // CID @@ -150,6 +149,5 @@ const CID::Factory& CID::Factory::random() { return instance; } -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC diff --git a/src/quic/cid.h b/src/quic/cid.h index 89d8fd2f2a0fed..15aa6d90748007 100644 --- a/src/quic/cid.h +++ b/src/quic/cid.h @@ -7,8 +7,7 @@ #include #include "defs.h" -namespace node { -namespace quic { +namespace node::quic { // CIDS are used to identify endpoints participating in a QUIC session. // Once created, CID instances are immutable. @@ -122,8 +121,7 @@ class CID::Factory { // of CID::Factory that implement the QUIC Load Balancers spec. }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/data.h b/src/quic/data.h index ef36e248bbf562..7f97403d61f414 100644 --- a/src/quic/data.h +++ b/src/quic/data.h @@ -13,8 +13,7 @@ #include #include "defs.h" -namespace node { -namespace quic { +namespace node::quic { struct Path final : public ngtcp2_path { Path(const SocketAddress& local, const SocketAddress& remote); @@ -143,8 +142,7 @@ class QuicError final : public MemoryRetainer { const ngtcp2_ccerr* ptr_ = nullptr; }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/defs.h b/src/quic/defs.h index 0e2e3095ad8676..628b2b754a36a5 100644 --- a/src/quic/defs.h +++ b/src/quic/defs.h @@ -9,15 +9,14 @@ #include #include -namespace node { -namespace quic { +namespace node::quic { #define NGTCP2_SUCCESS 0 #define NGTCP2_ERR(V) (V != NGTCP2_SUCCESS) #define NGTCP2_OK(V) (V == NGTCP2_SUCCESS) #define IF_QUIC_DEBUG(env) \ - if (UNLIKELY(env->enabled_debug_list()->enabled(DebugCategory::QUIC))) + if (env->enabled_debug_list()->enabled(DebugCategory::QUIC)) [[unlikely]] #define DISALLOW_COPY(Name) \ Name(const Name&) = delete; \ @@ -243,5 +242,4 @@ class DebugIndentScope { static int indent_; }; -} // namespace quic -} // namespace node +} // namespace node::quic diff --git a/src/quic/endpoint.cc b/src/quic/endpoint.cc index 4c89ecdd91803e..f116534a283ab1 100644 --- a/src/quic/endpoint.cc +++ b/src/quic/endpoint.cc @@ -86,7 +86,9 @@ STAT_STRUCT(Endpoint, ENDPOINT) namespace { #ifdef DEBUG bool is_diagnostic_packet_loss(double probability) { - if (LIKELY(probability == 0.0)) return false; + if (probability == 0.0) [[unlikely]] { + return false; + } unsigned char c = 255; CHECK(ncrypto::CSPRNG(&c, 1)); return (static_cast(c) / 255) < probability; @@ -233,6 +235,7 @@ bool SetOption(Environment* env, Maybe Endpoint::Options::From(Environment* env, Local value) { if (value.IsEmpty() || !value->IsObject()) { + if (value->IsUndefined()) return Just(Endpoint::Options()); THROW_ERR_INVALID_ARG_TYPE(env, "options must be an object"); return Nothing(); } @@ -624,7 +627,7 @@ void Endpoint::InitPerContext(Realm* realm, Local target) { #undef V #define V(name, _) IDX_STATS_ENDPOINT_##name, - enum IDX_STATS_ENDPONT { ENDPOINT_STATS(V) IDX_STATS_ENDPOINT_COUNT }; + enum IDX_STATS_ENDPOINT { ENDPOINT_STATS(V) IDX_STATS_ENDPOINT_COUNT }; NODE_DEFINE_CONSTANT(target, IDX_STATS_ENDPOINT_COUNT); #undef V @@ -656,6 +659,25 @@ void Endpoint::InitPerContext(Realm* realm, Local target) { NODE_DEFINE_CONSTANT(target, DEFAULT_REGULARTOKEN_EXPIRATION); NODE_DEFINE_CONSTANT(target, DEFAULT_MAX_PACKET_LENGTH); + static constexpr auto CLOSECONTEXT_CLOSE = + static_cast(CloseContext::CLOSE); + static constexpr auto CLOSECONTEXT_BIND_FAILURE = + static_cast(CloseContext::BIND_FAILURE); + static constexpr auto CLOSECONTEXT_LISTEN_FAILURE = + static_cast(CloseContext::LISTEN_FAILURE); + static constexpr auto CLOSECONTEXT_RECEIVE_FAILURE = + static_cast(CloseContext::RECEIVE_FAILURE); + static constexpr auto CLOSECONTEXT_SEND_FAILURE = + static_cast(CloseContext::SEND_FAILURE); + static constexpr auto CLOSECONTEXT_START_FAILURE = + static_cast(CloseContext::START_FAILURE); + NODE_DEFINE_CONSTANT(target, CLOSECONTEXT_CLOSE); + NODE_DEFINE_CONSTANT(target, CLOSECONTEXT_BIND_FAILURE); + NODE_DEFINE_CONSTANT(target, CLOSECONTEXT_LISTEN_FAILURE); + NODE_DEFINE_CONSTANT(target, CLOSECONTEXT_RECEIVE_FAILURE); + NODE_DEFINE_CONSTANT(target, CLOSECONTEXT_SEND_FAILURE); + NODE_DEFINE_CONSTANT(target, CLOSECONTEXT_START_FAILURE); + SetConstructorFunction(realm->context(), target, "Endpoint", @@ -682,6 +704,7 @@ Endpoint::Endpoint(Environment* env, udp_(this), addrLRU_(options_.address_lru_size) { MakeWeak(); + STAT_RECORD_TIMESTAMP(Stats, created_at); IF_QUIC_DEBUG(env) { Debug(this, "Endpoint created. Options %s", options.ToString()); } @@ -704,6 +727,7 @@ SocketAddress Endpoint::local_address() const { void Endpoint::MarkAsBusy(bool on) { Debug(this, "Marking endpoint as %s", on ? "busy" : "not busy"); + if (on) STAT_INCREMENT(Stats, server_busy_count); state_->busy = on ? 1 : 0; } @@ -805,7 +829,7 @@ void Endpoint::Send(Packet* packet) { // When diagnostic packet loss is enabled, the packet will be randomly // dropped. This can happen to any type of packet. We use this only in // testing to test various reliability issues. - if (UNLIKELY(is_diagnostic_packet_loss(options_.tx_loss))) { + if (is_diagnostic_packet_loss(options_.tx_loss)) [[unlikely]] { packet->Done(0); // Simulating tx packet loss return; @@ -874,7 +898,9 @@ void Endpoint::SendVersionNegotiation(const PathDescriptor& options) { bool Endpoint::SendStatelessReset(const PathDescriptor& options, size_t source_len) { - if (UNLIKELY(options_.disable_stateless_reset)) return false; + if (options_.disable_stateless_reset) [[unlikely]] { + return false; + } Debug(this, "Sending stateless reset on path %s with len %" PRIu64, options, @@ -1087,6 +1113,7 @@ void Endpoint::Destroy(CloseContext context, int status) { state_->bound = 0; state_->receiving = 0; BindingData::Get(env()).listening_endpoints.erase(this); + STAT_RECORD_TIMESTAMP(Stats, destroyed_at); EmitClose(close_context_, close_status_); } @@ -1473,14 +1500,14 @@ void Endpoint::Receive(const uv_buf_t& buf, #ifdef DEBUG // When diagnostic packet loss is enabled, the packet will be randomly // dropped. - if (UNLIKELY(is_diagnostic_packet_loss(options_.rx_loss))) { + if (is_diagnostic_packet_loss(options_.rx_loss)) [[unlikely]] { // Simulating rx packet loss return; } #endif // DEBUG // TODO(@jasnell): Implement blocklist support - // if (UNLIKELY(block_list_->Apply(remote_address))) { + // if (block_list_->Apply(remote_address)) [[unlikely]] { // Debug(this, "Ignoring blocked remote address: %s", remote_address); // return; // } @@ -1495,7 +1522,7 @@ void Endpoint::Receive(const uv_buf_t& buf, // checks. It is critical at this point that we do as little work as possible // to avoid a DOS vector. std::shared_ptr backing = env()->release_managed_buffer(buf); - if (UNLIKELY(!backing)) { + if (!backing) [[unlikely]] { // At this point something bad happened and we need to treat this as a fatal // case. There's likely no way to test this specific condition reliably. return Destroy(CloseContext::RECEIVE_FAILURE, UV_ENOMEM); @@ -1519,9 +1546,9 @@ void Endpoint::Receive(const uv_buf_t& buf, // QUIC currently requires CID lengths of max NGTCP2_MAX_CIDLEN. Ignore any // packet with a non-standard CID length. - if (UNLIKELY(pversion_cid.dcidlen > NGTCP2_MAX_CIDLEN || - pversion_cid.scidlen > NGTCP2_MAX_CIDLEN)) { - Debug(this, "Packet had incorrectly sized CIDs, igoring"); + if (pversion_cid.dcidlen > NGTCP2_MAX_CIDLEN || + pversion_cid.scidlen > NGTCP2_MAX_CIDLEN) [[unlikely]] { + Debug(this, "Packet had incorrectly sized CIDs, ignoring"); return; // Ignore the packet! } diff --git a/src/quic/endpoint.h b/src/quic/endpoint.h index f04131185e29a7..194f7c3d84c33c 100644 --- a/src/quic/endpoint.h +++ b/src/quic/endpoint.h @@ -17,8 +17,7 @@ #include "sessionticket.h" #include "tokens.h" -namespace node { -namespace quic { +namespace node::quic { #define ENDPOINT_CC(V) \ V(RENO, reno) \ @@ -92,7 +91,7 @@ class Endpoint final : public AsyncWrap, public Packet::Listener { // Similar to stateless resets, we enforce a limit on the number of retry // packets that can be generated and sent for a remote host. Generating // retry packets consumes a modest amount of resources and it's fairly - // trivial for a malcious peer to trigger generation of a large number of + // trivial for a malicious peer to trigger generation of a large number of // retries, so limiting them helps prevent a DOS vector. uint64_t max_retries = DEFAULT_MAX_RETRY_LIMIT; @@ -453,8 +452,7 @@ class Endpoint final : public AsyncWrap, public Packet::Listener { friend class Session; }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/http3.cc b/src/quic/http3.cc index 11196a5f6ca060..f6858521cd3283 100644 --- a/src/quic/http3.cc +++ b/src/quic/http3.cc @@ -17,8 +17,7 @@ #include "session.h" #include "sessionticket.h" -namespace node { -namespace quic { +namespace node::quic { namespace { struct Http3HeadersTraits { @@ -616,7 +615,9 @@ class Http3Application final : public Session::Application { #define NGHTTP3_CALLBACK_SCOPE(name) \ auto name = From(conn, conn_user_data); \ - if (UNLIKELY(name->is_destroyed())) return NGHTTP3_ERR_CALLBACK_FAILURE; \ + if (name->is_destroyed()) [[unlikely]] { \ + return NGHTTP3_ERR_CALLBACK_FAILURE; \ + } \ NgHttp3CallbackScope scope(name->env()); static nghttp3_ssize on_read_data_callback(nghttp3_conn* conn, @@ -831,7 +832,6 @@ std::unique_ptr createHttp3Application( return std::make_unique(session, options); } -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC diff --git a/src/quic/http3.h b/src/quic/http3.h index b56d28b0dd202d..94860c9b771830 100644 --- a/src/quic/http3.h +++ b/src/quic/http3.h @@ -5,14 +5,11 @@ #include "session.h" -namespace node { -namespace quic { - +namespace node::quic { std::unique_ptr createHttp3Application( Session* session, const Session::Application_Options& options); -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/logstream.h b/src/quic/logstream.h index b9d3f8df974477..b5f2b59e3aa747 100644 --- a/src/quic/logstream.h +++ b/src/quic/logstream.h @@ -9,8 +9,7 @@ #include #include -namespace node { -namespace quic { +namespace node::quic { // The LogStream is a utility that the QUIC impl uses to publish both QLog // and Keylog diagnostic data (one instance for each). @@ -76,8 +75,7 @@ class LogStream : public AsyncWrap, public StreamBase { void ensure_space(size_t amt); }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/packet.cc b/src/quic/packet.cc index 1311f4828bd835..9fee1f84bb2b93 100644 --- a/src/quic/packet.cc +++ b/src/quic/packet.cc @@ -117,10 +117,10 @@ Packet* Packet::Create(Environment* env, const char* diagnostic_label) { if (BindingData::Get(env).packet_freelist.empty()) { Local obj; - if (UNLIKELY(!GetConstructorTemplate(env) - ->InstanceTemplate() - ->NewInstance(env->context()) - .ToLocal(&obj))) { + if (!GetConstructorTemplate(env) + ->InstanceTemplate() + ->NewInstance(env->context()) + .ToLocal(&obj)) [[unlikely]] { return nullptr; } @@ -138,10 +138,10 @@ Packet* Packet::Clone() const { auto& binding = BindingData::Get(env()); if (binding.packet_freelist.empty()) { Local obj; - if (UNLIKELY(!GetConstructorTemplate(env()) - ->InstanceTemplate() - ->NewInstance(env()->context()) - .ToLocal(&obj))) { + if (!GetConstructorTemplate(env()) + ->InstanceTemplate() + ->NewInstance(env()->context()) + .ToLocal(&obj)) [[unlikely]] { return nullptr; } diff --git a/src/quic/packet.h b/src/quic/packet.h index 761fb3af4e609d..58ab6f46fa8d21 100644 --- a/src/quic/packet.h +++ b/src/quic/packet.h @@ -18,8 +18,7 @@ #include "defs.h" #include "tokens.h" -namespace node { -namespace quic { +namespace node::quic { struct PathDescriptor { uint32_t version; @@ -147,8 +146,7 @@ class Packet final : public ReqWrap { std::shared_ptr data_; }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/preferredaddress.h b/src/quic/preferredaddress.h index 37003e2eb704c7..369c09c8463dc5 100644 --- a/src/quic/preferredaddress.h +++ b/src/quic/preferredaddress.h @@ -10,8 +10,7 @@ #include #include "defs.h" -namespace node { -namespace quic { +namespace node::quic { // PreferredAddress is a helper class used only when a client Session receives // an advertised preferred address from a server. The helper provides @@ -67,8 +66,7 @@ class PreferredAddress final { const ngtcp2_preferred_addr* paddr_; }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/session.cc b/src/quic/session.cc index ec54a67588d9b2..b98ce4a132af60 100644 --- a/src/quic/session.cc +++ b/src/quic/session.cc @@ -532,13 +532,13 @@ Session::Session(Endpoint* endpoint, auto& state = BindingData::Get(env()); - if (UNLIKELY(config_.options.qlog)) { + if (config_.options.qlog) [[unlikely]] { qlog_stream_ = LogStream::Create(env()); if (qlog_stream_) defineProperty(state.qlog_string(), qlog_stream_->object()); } - if (UNLIKELY(config_.options.tls_options.keylog)) { + if (config_.options.tls_options.keylog) [[unlikely]] { keylog_stream_ = LogStream::Create(env()); if (keylog_stream_) defineProperty(state.keylog_string(), keylog_stream_->object()); @@ -1308,7 +1308,7 @@ void Session::SendConnectionClose() { ssize_t nwrite = ngtcp2_conn_write_connection_close( *this, &path, nullptr, vec.base, vec.len, last_error_, uv_hrtime()); - if (UNLIKELY(nwrite < 0)) { + if (nwrite < 0) [[unlikely]] { packet->Done(UV_ECANCELED); last_error_ = QuicError::ForNgtcp2Error(NGTCP2_INTERNAL_ERROR); Close(CloseMethod::SILENT); @@ -1617,7 +1617,9 @@ void Session::EmitPathValidation(PathValidationResult result, const std::optional& oldPath) { DCHECK(!is_destroyed()); if (!env()->can_call_into_js()) return; - if (LIKELY(state_->path_validation == 0)) return; + if (state_->path_validation == 0) [[likely]] { + return; + } auto isolate = env()->isolate(); CallbackScope cb_scope(this); @@ -1658,7 +1660,7 @@ void Session::EmitSessionTicket(Store&& ticket) { // If there is nothing listening for the session ticket, don't bother // emitting. - if (LIKELY(!wants_session_ticket())) { + if (!wants_session_ticket()) [[likely]] { Debug(this, "Session ticket was discarded"); return; } @@ -1682,10 +1684,16 @@ void Session::EmitStream(BaseObjectPtr stream) { if (is_destroyed()) return; if (!env()->can_call_into_js()) return; CallbackScope cb_scope(this); - Local arg = stream->object(); + auto isolate = env()->isolate(); + Local argv[] = { + stream->object(), + Integer::NewFromUnsigned(isolate, + static_cast(stream->direction())), + }; Debug(this, "Notifying JavaScript of stream created"); - MakeCallback(BindingData::Get(env()).stream_created_callback(), 1, &arg); + MakeCallback( + BindingData::Get(env()).stream_created_callback(), arraysize(argv), argv); } void Session::EmitVersionNegotiation(const ngtcp2_pkt_hd& hd, @@ -1709,7 +1717,7 @@ void Session::EmitVersionNegotiation(const ngtcp2_pkt_hd& hd, versions.AllocateSufficientStorage(nsv); for (size_t n = 0; n < nsv; n++) versions[n] = to_integer(sv[n]); - // supported are the versons we acutually support expressed as a range. + // supported are the versions we acutually support expressed as a range. // The first value is the minimum version, the second is the maximum. Local supported[] = {to_integer(config_.options.min_version), to_integer(config_.options.version)}; @@ -1741,7 +1749,9 @@ void Session::EmitKeylog(const char* line) { #define NGTCP2_CALLBACK_SCOPE(name) \ auto name = Impl::From(conn, user_data); \ - if (UNLIKELY(name->is_destroyed())) return NGTCP2_ERR_CALLBACK_FAILURE; \ + if (name->is_destroyed()) [[unlikely]] { \ + return NGTCP2_ERR_CALLBACK_FAILURE; \ + } \ NgTcp2CallbackScope scope(session->env()); struct Session::Impl { @@ -2018,7 +2028,9 @@ struct Session::Impl { ngtcp2_encryption_level level, void* user_data) { auto session = Impl::From(conn, user_data); - if (UNLIKELY(session->is_destroyed())) return NGTCP2_ERR_CALLBACK_FAILURE; + if (session->is_destroyed()) [[unlikely]] { + return NGTCP2_ERR_CALLBACK_FAILURE; + } CHECK(!session->is_server()); if (level != NGTCP2_ENCRYPTION_LEVEL_1RTT) return NGTCP2_SUCCESS; @@ -2077,7 +2089,9 @@ struct Session::Impl { ngtcp2_encryption_level level, void* user_data) { auto session = Impl::From(conn, user_data); - if (UNLIKELY(session->is_destroyed())) return NGTCP2_ERR_CALLBACK_FAILURE; + if (session->is_destroyed()) [[unlikely]] { + return NGTCP2_ERR_CALLBACK_FAILURE; + } CHECK(session->is_server()); if (level != NGTCP2_ENCRYPTION_LEVEL_1RTT) return NGTCP2_SUCCESS; @@ -2358,6 +2372,11 @@ void Session::InitPerContext(Realm* realm, Local target) { NODE_DEFINE_CONSTANT(target, QUIC_PROTO_MAX); NODE_DEFINE_CONSTANT(target, QUIC_PROTO_MIN); + NODE_DEFINE_STRING_CONSTANT( + target, "DEFAULT_CIPHERS", TLSContext::DEFAULT_CIPHERS); + NODE_DEFINE_STRING_CONSTANT( + target, "DEFAULT_GROUPS", TLSContext::DEFAULT_GROUPS); + #define V(name, _) IDX_STATS_SESSION_##name, enum SessionStatsIdx { SESSION_STATS(V) IDX_STATS_SESSION_COUNT }; #undef V diff --git a/src/quic/session.h b/src/quic/session.h index 41a797dad6844d..f980af9611c6c7 100644 --- a/src/quic/session.h +++ b/src/quic/session.h @@ -25,8 +25,7 @@ #include "tlscontext.h" #include "transportparams.h" -namespace node { -namespace quic { +namespace node::quic { class Endpoint; @@ -438,8 +437,7 @@ class Session final : public AsyncWrap, private SessionTicket::AppData::Source { friend class TransportParams; }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/sessionticket.h b/src/quic/sessionticket.h index 5776459d550d13..a2e6e3758f6f9c 100644 --- a/src/quic/sessionticket.h +++ b/src/quic/sessionticket.h @@ -11,8 +11,7 @@ #include "data.h" #include "defs.h" -namespace node { -namespace quic { +namespace node::quic { // A TLS 1.3 Session resumption ticket. Encapsulates both the TLS // ticket and the encoded QUIC transport parameters. The encoded @@ -61,7 +60,7 @@ class SessionTicket final : public MemoryRetainer { // SessionTicket::AppData is a utility class that is used only during the // generation or access of TLS stateless sesson tickets. It exists solely to // provide a easier way for Session::Application instances to set relevant -// metadata in the session ticket when it is created, and the exract and +// metadata in the session ticket when it is created, and the extract and // subsequently verify that data when a ticket is received and is being // validated. The app data is completely opaque to anything other than the // server-side of the Session::Application that sets it. @@ -103,8 +102,7 @@ class SessionTicket::AppData final { SSL* ssl_; }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/streams.cc b/src/quic/streams.cc index a627bf37d61b67..ec6bfb80a56a00 100644 --- a/src/quic/streams.cc +++ b/src/quic/streams.cc @@ -871,7 +871,9 @@ bool Stream::AddHeader(const Header& header) { const auto push = [&](auto raw) { Local value; - if (UNLIKELY(!raw.ToLocal(&value))) return false; + if (!raw.ToLocal(&value)) [[unlikely]] { + return false; + } headers_.push_back(value); return true; }; diff --git a/src/quic/streams.h b/src/quic/streams.h index 1bab5b245fcc50..0bacb37faf542d 100644 --- a/src/quic/streams.h +++ b/src/quic/streams.h @@ -15,8 +15,7 @@ #include "bindingdata.h" #include "data.h" -namespace node { -namespace quic { +namespace node::quic { class Session; @@ -155,7 +154,7 @@ class Stream : public AsyncWrap, void BeginHeaders(HeadersKind kind); // Returns false if the header cannot be added. This will typically happen - // if the application does not support headers, a maximimum number of headers + // if the application does not support headers, a maximum number of headers // have already been added, or the maximum total header length is reached. bool AddHeader(const Header& header); void set_headers_kind(HeadersKind kind); @@ -227,8 +226,7 @@ class Stream : public AsyncWrap, void Unschedule(); }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/tlscontext.cc b/src/quic/tlscontext.cc index 76d11c3d837e2e..358bad2ee3697f 100644 --- a/src/quic/tlscontext.cc +++ b/src/quic/tlscontext.cc @@ -274,7 +274,7 @@ crypto::SSLCtxPointer TLSContext::Initialize() { break; } case Side::CLIENT: { - ctx_.reset(SSL_CTX_new(TLS_client_method())); + ctx.reset(SSL_CTX_new(TLS_client_method())); CHECK_EQ(ngtcp2_crypto_quictls_configure_client_context(ctx.get()), 0); SSL_CTX_set_session_cache_mode( @@ -515,8 +515,8 @@ crypto::SSLPointer TLSSession::Initialize( ngtcp2_conn_set_tls_native_handle(*session_, ssl.get()); // Enable tracing if the `--trace-tls` command line flag is used. - if (UNLIKELY(session_->env()->options()->trace_tls || - options.enable_tls_trace)) { + if (session_->env()->options()->trace_tls || options.enable_tls_trace) + [[unlikely]] { EnableTrace(session_->env(), &bio_trace_, *this); } diff --git a/src/quic/tlscontext.h b/src/quic/tlscontext.h index 3ffbd7770443dd..3f2f8aff42a8a5 100644 --- a/src/quic/tlscontext.h +++ b/src/quic/tlscontext.h @@ -13,8 +13,7 @@ #include "defs.h" #include "sessionticket.h" -namespace node { -namespace quic { +namespace node::quic { class Session; class TLSContext; @@ -29,7 +28,7 @@ class TLSSession final : public MemoryRetainer { public: static const TLSSession& From(const SSL* ssl); - // The constructor is public in order to satisify the call to std::make_unique + // The constructor is public in order to satisfy the call to std::make_unique // in TLSContext::NewSession. It should not be called directly. TLSSession(Session* session, std::shared_ptr context, @@ -221,8 +220,7 @@ class TLSContext final : public MemoryRetainer, friend class TLSSession; }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/tokens.cc b/src/quic/tokens.cc index fcb2a24ca3b6f8..d3ebde6225da56 100644 --- a/src/quic/tokens.cc +++ b/src/quic/tokens.cc @@ -10,8 +10,7 @@ #include "nbytes.h" #include "ncrypto.h" -namespace node { -namespace quic { +namespace node::quic { // ============================================================================ // TokenSecret @@ -300,7 +299,6 @@ RegularToken::operator const char*() const { return reinterpret_cast(ptr_.base); } -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC diff --git a/src/quic/tokens.h b/src/quic/tokens.h index bd62faee9eabd6..4f9f25dd12324b 100644 --- a/src/quic/tokens.h +++ b/src/quic/tokens.h @@ -10,8 +10,7 @@ #include "cid.h" #include "defs.h" -namespace node { -namespace quic { +namespace node::quic { // TokenSecrets are used to generate things like stateless reset tokens, // retry tokens, and token packets. They are always QUIC_TOKENSECRET_LEN @@ -251,8 +250,7 @@ class RegularToken final : public MemoryRetainer { const ngtcp2_vec ptr_; }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/quic/transportparams.h b/src/quic/transportparams.h index 349ddd5c948bdc..af6af3fc0266b3 100644 --- a/src/quic/transportparams.h +++ b/src/quic/transportparams.h @@ -13,8 +13,7 @@ #include "data.h" #include "tokens.h" -namespace node { -namespace quic { +namespace node::quic { class Endpoint; class Session; @@ -160,8 +159,7 @@ class TransportParams final { QuicError error_ = QuicError::TRANSPORT_NO_ERROR; }; -} // namespace quic -} // namespace node +} // namespace node::quic #endif // HAVE_OPENSSL && NODE_OPENSSL_HAS_QUIC #endif // defined(NODE_WANT_INTERNALS) && NODE_WANT_INTERNALS diff --git a/src/spawn_sync.cc b/src/spawn_sync.cc index 36570d069ad00b..5f20e9cc0881f9 100644 --- a/src/spawn_sync.cc +++ b/src/spawn_sync.cc @@ -41,6 +41,7 @@ using v8::Int32; using v8::Integer; using v8::Isolate; using v8::Just; +using v8::JustVoid; using v8::Local; using v8::Maybe; using v8::MaybeLocal; @@ -441,7 +442,7 @@ MaybeLocal SyncProcessRunner::Run(Local options) { CHECK_EQ(lifecycle_, kUninitialized); - Maybe r = TryInitializeAndRunLoop(options); + Maybe r = TryInitializeAndRunLoop(options); CloseHandlesAndDeleteLoop(); if (r.IsNothing()) return MaybeLocal(); @@ -450,7 +451,7 @@ MaybeLocal SyncProcessRunner::Run(Local options) { return scope.Escape(result); } -Maybe SyncProcessRunner::TryInitializeAndRunLoop(Local options) { +Maybe SyncProcessRunner::TryInitializeAndRunLoop(Local options) { int r; // There is no recovery from failure inside TryInitializeAndRunLoop - the @@ -461,7 +462,7 @@ Maybe SyncProcessRunner::TryInitializeAndRunLoop(Local options) { uv_loop_ = new uv_loop_t; if (uv_loop_ == nullptr) { SetError(UV_ENOMEM); - return Just(false); + return JustVoid(); } r = uv_loop_init(uv_loop_); @@ -469,21 +470,21 @@ Maybe SyncProcessRunner::TryInitializeAndRunLoop(Local options) { delete uv_loop_; uv_loop_ = nullptr; SetError(r); - return Just(false); + return JustVoid(); } - if (!ParseOptions(options).To(&r)) return Nothing(); + if (!ParseOptions(options).To(&r)) return Nothing(); if (r < 0) { SetError(r); - return Just(false); + return JustVoid(); } if (timeout_ > 0) { r = uv_timer_init(uv_loop_, &uv_timer_); if (r < 0) { SetError(r); - return Just(false); + return JustVoid(); } uv_unref(reinterpret_cast(&uv_timer_)); @@ -498,7 +499,7 @@ Maybe SyncProcessRunner::TryInitializeAndRunLoop(Local options) { r = uv_timer_start(&uv_timer_, KillTimerCallback, timeout_, 0); if (r < 0) { SetError(r); - return Just(false); + return JustVoid(); } } @@ -506,7 +507,7 @@ Maybe SyncProcessRunner::TryInitializeAndRunLoop(Local options) { r = uv_spawn(uv_loop_, &uv_process_, &uv_process_options_); if (r < 0) { SetError(r); - return Just(false); + return JustVoid(); } uv_process_.data = this; @@ -515,7 +516,7 @@ Maybe SyncProcessRunner::TryInitializeAndRunLoop(Local options) { r = pipe->Start(); if (r < 0) { SetPipeError(r); - return Just(false); + return JustVoid(); } } } @@ -527,10 +528,9 @@ Maybe SyncProcessRunner::TryInitializeAndRunLoop(Local options) { // If we get here the process should have exited. CHECK_GE(exit_status_, 0); - return Just(true); + return JustVoid(); } - void SyncProcessRunner::CloseHandlesAndDeleteLoop() { CHECK_LT(lifecycle_, kHandlesClosed); diff --git a/src/spawn_sync.h b/src/spawn_sync.h index d5b74e67d83094..8243737130af1b 100644 --- a/src/spawn_sync.h +++ b/src/spawn_sync.h @@ -155,7 +155,7 @@ class SyncProcessRunner { inline Environment* env() const; v8::MaybeLocal Run(v8::Local options); - v8::Maybe TryInitializeAndRunLoop(v8::Local options); + v8::Maybe TryInitializeAndRunLoop(v8::Local options); void CloseHandlesAndDeleteLoop(); void CloseStdioPipes(); diff --git a/src/string_bytes.h b/src/string_bytes.h index fde5070ffb66a7..53bc003fbda436 100644 --- a/src/string_bytes.h +++ b/src/string_bytes.h @@ -37,19 +37,19 @@ class StringBytes { public: class InlineDecoder : public MaybeStackBuffer { public: - inline v8::Maybe Decode(Environment* env, + inline v8::Maybe Decode(Environment* env, v8::Local string, enum encoding enc) { size_t storage; if (!StringBytes::StorageSize(env->isolate(), string, enc).To(&storage)) - return v8::Nothing(); + return v8::Nothing(); AllocateSufficientStorage(storage); const size_t length = StringBytes::Write(env->isolate(), out(), storage, string, enc); // No zero terminator is included when using this method. SetLength(length); - return v8::Just(true); + return v8::JustVoid(); } inline size_t size() const { return length(); } diff --git a/src/string_decoder.cc b/src/string_decoder.cc index 4c30c1119f1bd8..5493bc1d9cc871 100644 --- a/src/string_decoder.cc +++ b/src/string_decoder.cc @@ -114,7 +114,7 @@ MaybeLocal StringDecoder::DecodeData(Isolate* isolate, state_[kMissingBytes] -= found_bytes; state_[kBufferedBytes] += found_bytes; - if (LIKELY(MissingBytes() == 0)) { + if (MissingBytes() == 0) [[likely]] { // If no more bytes are missing, create a small string that we // will later prepend. if (!MakeString(isolate, @@ -132,7 +132,7 @@ MaybeLocal StringDecoder::DecodeData(Isolate* isolate, // It could be that trying to finish the previous chunk already // consumed all data that we received in this chunk. - if (UNLIKELY(nread == 0)) { + if (nread == 0) [[unlikely]] { body = !prepend.IsEmpty() ? prepend : String::Empty(isolate); prepend = Local(); } else { diff --git a/src/timers.cc b/src/timers.cc index 127806fbcdfd3e..c7ba22a078fb5f 100644 --- a/src/timers.cc +++ b/src/timers.cc @@ -33,7 +33,8 @@ void BindingData::SlowGetLibuvNow(const FunctionCallbackInfo& args) { args.GetReturnValue().Set(Number::New(args.GetIsolate(), now)); } -double BindingData::FastGetLibuvNow(Local receiver) { +double BindingData::FastGetLibuvNow(Local unused, + Local receiver) { return GetLibuvNowImpl(FromJSObject(receiver)); } @@ -47,7 +48,9 @@ void BindingData::SlowScheduleTimer(const FunctionCallbackInfo& args) { ScheduleTimerImpl(Realm::GetBindingData(args), duration); } -void BindingData::FastScheduleTimer(Local receiver, int64_t duration) { +void BindingData::FastScheduleTimer(Local unused, + Local receiver, + int64_t duration) { ScheduleTimerImpl(FromJSObject(receiver), duration); } @@ -61,7 +64,9 @@ void BindingData::SlowToggleTimerRef( args[0]->IsTrue()); } -void BindingData::FastToggleTimerRef(Local receiver, bool ref) { +void BindingData::FastToggleTimerRef(Local unused, + Local receiver, + bool ref) { ToggleTimerRefImpl(FromJSObject(receiver), ref); } @@ -75,7 +80,9 @@ void BindingData::SlowToggleImmediateRef( args[0]->IsTrue()); } -void BindingData::FastToggleImmediateRef(Local receiver, bool ref) { +void BindingData::FastToggleImmediateRef(Local unused, + Local receiver, + bool ref) { ToggleImmediateRefImpl(FromJSObject(receiver), ref); } diff --git a/src/timers.h b/src/timers.h index d514a70fc25990..fd3cbf66f7c516 100644 --- a/src/timers.h +++ b/src/timers.h @@ -26,23 +26,29 @@ class BindingData : public SnapshotableObject { static void SetupTimers(const v8::FunctionCallbackInfo& args); static void SlowGetLibuvNow(const v8::FunctionCallbackInfo& args); - static double FastGetLibuvNow(v8::Local receiver); + static double FastGetLibuvNow(v8::Local unused, + v8::Local receiver); static double GetLibuvNowImpl(BindingData* data); static void SlowScheduleTimer( const v8::FunctionCallbackInfo& args); - static void FastScheduleTimer(v8::Local receiver, + static void FastScheduleTimer(v8::Local unused, + v8::Local receiver, int64_t duration); static void ScheduleTimerImpl(BindingData* data, int64_t duration); static void SlowToggleTimerRef( const v8::FunctionCallbackInfo& args); - static void FastToggleTimerRef(v8::Local receiver, bool ref); + static void FastToggleTimerRef(v8::Local unused, + v8::Local receiver, + bool ref); static void ToggleTimerRefImpl(BindingData* data, bool ref); static void SlowToggleImmediateRef( const v8::FunctionCallbackInfo& args); - static void FastToggleImmediateRef(v8::Local receiver, bool ref); + static void FastToggleImmediateRef(v8::Local unused, + v8::Local receiver, + bool ref); static void ToggleImmediateRefImpl(BindingData* data, bool ref); static void CreatePerIsolateProperties(IsolateData* isolate_data, diff --git a/src/tracing/trace_event.h b/src/tracing/trace_event.h index be0f55a409a71b..a662a081dc3bf3 100644 --- a/src/tracing/trace_event.h +++ b/src/tracing/trace_event.h @@ -321,7 +321,9 @@ class TraceEventHelper { static inline const uint8_t* GetCategoryGroupEnabled(const char* group) { v8::TracingController* controller = GetTracingController(); static const uint8_t disabled = 0; - if (UNLIKELY(controller == nullptr)) return &disabled; + if (controller == nullptr) [[unlikely]] { + return &disabled; + } return controller->GetCategoryGroupEnabled(group); } }; @@ -483,13 +485,13 @@ static V8_INLINE uint64_t AddTraceEventWithTimestampImpl( const char* scope, uint64_t id, uint64_t bind_id, int32_t num_args, const char** arg_names, const uint8_t* arg_types, const uint64_t* arg_values, unsigned int flags, int64_t timestamp) { - std::unique_ptr arg_convertables[2]; + std::unique_ptr arg_convertibles[2]; if (num_args > 0 && arg_types[0] == TRACE_VALUE_TYPE_CONVERTABLE) { - arg_convertables[0].reset(reinterpret_cast( + arg_convertibles[0].reset(reinterpret_cast( static_cast(arg_values[0]))); } if (num_args > 1 && arg_types[1] == TRACE_VALUE_TYPE_CONVERTABLE) { - arg_convertables[1].reset(reinterpret_cast( + arg_convertibles[1].reset(reinterpret_cast( static_cast(arg_values[1]))); } // DCHECK_LE(num_args, 2); @@ -498,7 +500,7 @@ static V8_INLINE uint64_t AddTraceEventWithTimestampImpl( if (controller == nullptr) return 0; return controller->AddTraceEventWithTimestamp( phase, category_group_enabled, name, scope, id, bind_id, num_args, - arg_names, arg_types, arg_values, arg_convertables, flags, timestamp); + arg_names, arg_types, arg_values, arg_convertibles, flags, timestamp); } static V8_INLINE void AddMetadataEventImpl( diff --git a/src/udp_wrap.cc b/src/udp_wrap.cc index 531de7fe2c4756..994176ea6301e9 100644 --- a/src/udp_wrap.cc +++ b/src/udp_wrap.cc @@ -589,7 +589,7 @@ ssize_t UDPWrap::Send(uv_buf_t* bufs_ptr, msg_size += bufs_ptr[i].len; int err = 0; - if (!UNLIKELY(env()->options()->test_udp_no_try_send)) { + if (!env()->options()->test_udp_no_try_send) [[unlikely]] { err = uv_udp_try_send(&handle_, bufs_ptr, count, addr); if (err == UV_ENOSYS || err == UV_EAGAIN) { err = 0; diff --git a/src/undici_version.h b/src/undici_version.h index 8a50f720dbfd9b..a581c6c22a9606 100644 --- a/src/undici_version.h +++ b/src/undici_version.h @@ -2,5 +2,5 @@ // Refer to tools/dep_updaters/update-undici.sh #ifndef SRC_UNDICI_VERSION_H_ #define SRC_UNDICI_VERSION_H_ -#define UNDICI_VERSION "6.19.8" +#define UNDICI_VERSION "6.20.0" #endif // SRC_UNDICI_VERSION_H_ diff --git a/src/util-inl.h b/src/util-inl.h index 9dd61a98933685..47d6a73c7927cf 100644 --- a/src/util-inl.h +++ b/src/util-inl.h @@ -247,7 +247,7 @@ T* UncheckedRealloc(T* pointer, size_t n) { void* allocated = realloc(pointer, full_size); - if (UNLIKELY(allocated == nullptr)) { + if (allocated == nullptr) [[unlikely]] { // Tell V8 that memory is low and retry. LowMemoryNotification(); allocated = realloc(pointer, full_size); @@ -326,7 +326,7 @@ v8::MaybeLocal ToV8Value(v8::Local context, std::string_view str, v8::Isolate* isolate) { if (isolate == nullptr) isolate = context->GetIsolate(); - if (UNLIKELY(str.size() >= static_cast(v8::String::kMaxLength))) { + if (str.size() >= static_cast(v8::String::kMaxLength)) [[unlikely]] { // V8 only has a TODO comment about adding an exception when the maximum // string size is exceeded. ThrowErrStringTooLong(isolate); diff --git a/src/util.cc b/src/util.cc index f9aad9ef5a6213..34263cc9f3dc06 100644 --- a/src/util.cc +++ b/src/util.cc @@ -675,8 +675,9 @@ void SetConstructorFunction(Local context, Local name, Local tmpl, SetConstructorFunctionFlag flag) { - if (LIKELY(flag == SetConstructorFunctionFlag::SET_CLASS_NAME)) + if (flag == SetConstructorFunctionFlag::SET_CLASS_NAME) [[likely]] { tmpl->SetClassName(name); + } that->Set(context, name, tmpl->GetFunction(context).ToLocalChecked()).Check(); } @@ -694,8 +695,9 @@ void SetConstructorFunction(Isolate* isolate, Local name, Local tmpl, SetConstructorFunctionFlag flag) { - if (LIKELY(flag == SetConstructorFunctionFlag::SET_CLASS_NAME)) + if (flag == SetConstructorFunctionFlag::SET_CLASS_NAME) [[likely]] { tmpl->SetClassName(name); + } that->Set(name, tmpl); } diff --git a/src/util.h b/src/util.h index a6da8720c499df..b1f316eebc7199 100644 --- a/src/util.h +++ b/src/util.h @@ -38,6 +38,7 @@ #include #include +#include #include #include #include @@ -57,6 +58,8 @@ namespace node { +constexpr char kPathSeparator = std::filesystem::path::preferred_separator; + #ifdef _WIN32 /* MAX_PATH is in characters, not bytes. Make sure we have enough headroom. */ #define PATH_MAX_BYTES (MAX_PATH * 4) @@ -155,12 +158,8 @@ void DumpJavaScriptBacktrace(FILE* fp); } while (0) #ifdef __GNUC__ -#define LIKELY(expr) __builtin_expect(!!(expr), 1) -#define UNLIKELY(expr) __builtin_expect(!!(expr), 0) #define PRETTY_FUNCTION_NAME __PRETTY_FUNCTION__ #else -#define LIKELY(expr) expr -#define UNLIKELY(expr) expr #if defined(_MSC_VER) #define PRETTY_FUNCTION_NAME __FUNCSIG__ #else @@ -171,11 +170,11 @@ void DumpJavaScriptBacktrace(FILE* fp); #define STRINGIFY_(x) #x #define STRINGIFY(x) STRINGIFY_(x) -#define CHECK(expr) \ - do { \ - if (UNLIKELY(!(expr))) { \ - ERROR_AND_ABORT(expr); \ - } \ +#define CHECK(expr) \ + do { \ + if (!(expr)) [[unlikely]] { \ + ERROR_AND_ABORT(expr); \ + } \ } while (0) #define CHECK_EQ(a, b) CHECK((a) == (b)) @@ -320,7 +319,7 @@ class KVStore { virtual std::shared_ptr Clone(v8::Isolate* isolate) const; virtual v8::Maybe AssignFromObject(v8::Local context, v8::Local entries); - v8::Maybe AssignToObject(v8::Isolate* isolate, + v8::Maybe AssignToObject(v8::Isolate* isolate, v8::Local context, v8::Local object); @@ -562,6 +561,10 @@ class BufferValue : public MaybeStackBuffer { inline std::string_view ToStringView() const { return std::string_view(out(), length()); } + inline std::u8string_view ToU8StringView() const { + return std::u8string_view(reinterpret_cast(out()), + length()); + } }; #define SPREAD_BUFFER_ARG(val, name) \ diff --git a/test/README.md b/test/README.md index 1a3eec09373412..a79c97e941d542 100644 --- a/test/README.md +++ b/test/README.md @@ -38,7 +38,7 @@ For the tests to run on Windows, be sure to clone Node.js source code with the | `tick-processor` | No | Tests for the V8 tick processor integration.[^4] | | `v8-updates` | No | Tests for V8 performance integration. | -[^1]: [Documentation](./common/README.md) +[^1]: [Documentation](../test/common/README.md) [^2]: Tests for networking related modules may also be present in other directories, but those tests do not make outbound connections. diff --git a/test/abort/test-abort-fatal-error.js b/test/abort/test-abort-fatal-error.js index 6c197ceb028540..be7d3c6f65c3b5 100644 --- a/test/abort/test-abort-fatal-error.js +++ b/test/abort/test-abort-fatal-error.js @@ -27,11 +27,12 @@ if (common.isWindows) const assert = require('assert'); const exec = require('child_process').exec; -let cmdline = `ulimit -c 0; ${process.execPath}`; -cmdline += ' --max-old-space-size=16 --max-semi-space-size=4'; -cmdline += ' -e "a = []; for (i = 0; i < 1e9; i++) { a.push({}) }"'; +const cmdline = + common.escapePOSIXShell`ulimit -c 0; "${ + process.execPath + }" --max-old-space-size=16 --max-semi-space-size=4 -e "a = []; for (i = 0; i < 1e9; i++) { a.push({}) }"`; -exec(cmdline, function(err, stdout, stderr) { +exec(...cmdline, common.mustCall((err, stdout, stderr) => { if (!err) { console.log(stdout); console.log(stderr); @@ -39,4 +40,4 @@ exec(cmdline, function(err, stdout, stderr) { } assert(common.nodeProcessAborted(err.code, err.signal)); -}); +})); diff --git a/test/addons/non-node-context/test-make-buffer.js b/test/addons/non-node-context/test-make-buffer.js index 344ea973d76c0c..26458137b05bca 100644 --- a/test/addons/non-node-context/test-make-buffer.js +++ b/test/addons/non-node-context/test-make-buffer.js @@ -6,7 +6,7 @@ const { makeBufferInNewContext, } = require(`./build/${common.buildType}/binding`); -// Because the `Buffer` function and its protoype property only (currently) +// Because the `Buffer` function and its prototype property only (currently) // exist in a Node.js instance’s main context, trying to create buffers from // another context throws an exception. assert.throws( diff --git a/test/addons/openssl-providers/providers.cjs b/test/addons/openssl-providers/providers.cjs index 901ea5041ba34e..2dabbf020e2a41 100644 --- a/test/addons/openssl-providers/providers.cjs +++ b/test/addons/openssl-providers/providers.cjs @@ -13,7 +13,7 @@ const { getProviders } = require(`./build/${common.buildType}/binding`); // For the providers defined here, the expectation is that the listed ciphers // and hash algorithms are only provided by the named provider. These are for -// basic checks and are not intended to list evey cipher or hash algorithm +// basic checks and are not intended to list every cipher or hash algorithm // supported by the provider. const providers = { 'default': { diff --git a/test/async-hooks/init-hooks.js b/test/async-hooks/init-hooks.js index 2c5167156b615d..5b891879c0ce51 100644 --- a/test/async-hooks/init-hooks.js +++ b/test/async-hooks/init-hooks.js @@ -192,28 +192,28 @@ class ActivityCollector { _before(uid) { const h = this._getActivity(uid, 'before'); this._stamp(h, 'before'); - this._maybeLog(uid, h && h.type, 'before'); + this._maybeLog(uid, h?.type, 'before'); this.onbefore(uid); } _after(uid) { const h = this._getActivity(uid, 'after'); this._stamp(h, 'after'); - this._maybeLog(uid, h && h.type, 'after'); + this._maybeLog(uid, h?.type, 'after'); this.onafter(uid); } _destroy(uid) { const h = this._getActivity(uid, 'destroy'); this._stamp(h, 'destroy'); - this._maybeLog(uid, h && h.type, 'destroy'); + this._maybeLog(uid, h?.type, 'destroy'); this.ondestroy(uid); } _promiseResolve(uid) { const h = this._getActivity(uid, 'promiseResolve'); this._stamp(h, 'promiseResolve'); - this._maybeLog(uid, h && h.type, 'promiseResolve'); + this._maybeLog(uid, h?.type, 'promiseResolve'); this.onpromiseResolve(uid); } diff --git a/test/async-hooks/test-callback-error.js b/test/async-hooks/test-callback-error.js index f6af8e0018d534..ad70feb70c9571 100644 --- a/test/async-hooks/test-callback-error.js +++ b/test/async-hooks/test-callback-error.js @@ -62,12 +62,14 @@ assert.ok(!arg); let program = process.execPath; let args = [ '--abort-on-uncaught-exception', __filename, 'test_callback_abort' ]; - const options = { encoding: 'utf8' }; + let options = {}; if (!common.isWindows) { - program = `ulimit -c 0 && exec ${program} ${args.join(' ')}`; + [program, options] = common.escapePOSIXShell`ulimit -c 0 && exec "${program}" ${args[0]} "${args[1]}" ${args[2]}`; args = []; options.shell = true; } + + options.encoding = 'utf8'; const child = spawnSync(program, args, options); if (common.isWindows) { assert.strictEqual(child.status, 134); diff --git a/test/common/README.md b/test/common/README.md index 72e34fd4302b15..5f5ff75fca2431 100644 --- a/test/common/README.md +++ b/test/common/README.md @@ -112,6 +112,33 @@ Creates a 10 MiB file of all null characters. Indicates if there is more than 1gb of total memory. +### ``escapePOSIXShell`shell command` `` + +Escapes values in a string template literal to pass them as env variable. On Windows, this function +does not escape anything (which is fine for most paths, as `"` is not a valid +char in a path on Windows), so for tests that must pass on Windows, you should +use it only to escape paths, inside double quotes. +This function is meant to be used for tagged template strings. + +```js +const { escapePOSIXShell } = require('../common'); +const fixtures = require('../common/fixtures'); +const { execSync } = require('node:child_process'); +const origin = fixtures.path('origin'); +const destination = fixtures.path('destination'); + +execSync(...escapePOSIXShell`cp "${origin}" "${destination}"`); + +// When you need to specify specific options, and/or additional env variables: +const [cmd, opts] = escapePOSIXShell`cp "${origin}" "${destination}"`; +console.log(typeof cmd === 'string'); // true +console.log(opts === undefined || typeof opts.env === 'object'); // true +execSync(cmd, { ...opts, stdio: 'ignore' }); +execSync(cmd, { stdio: 'ignore', env: { ...opts?.env, KEY: 'value' } }); +``` + +When possible, avoid using a shell; that way, there's no need to escape values. + ### `expectsError(validator[, exact])` * `validator` [\][] | [\][] | @@ -187,13 +214,6 @@ Returns an instance of all possible `ArrayBufferView`s of the provided Buffer. Returns an instance of all possible `BufferSource`s of the provided Buffer, consisting of all `ArrayBufferView` and an `ArrayBuffer`. -### `getCallSite(func)` - -* `func` [\][] -* return [\][] - -Returns the file name and line number for the provided Function. - ### `getTTYfd()` Attempts to get a valid TTY file descriptor. Returns `-1` if it fails. @@ -853,24 +873,6 @@ socket.write(frame.data); The serialized `Buffer` may be retrieved using the `frame.data` property. -### Class: DataFrame extends Frame - -The `http2.DataFrame` is a subclass of `http2.Frame` that serializes a `DATA` -frame. - - - -```js -// id is the 32-bit stream identifier -// payload is a Buffer containing the DATA payload -// padlen is an 8-bit integer giving the number of padding bytes to include -// final is a boolean indicating whether the End-of-stream flag should be set, -// defaults to false. -const frame = new http2.DataFrame(id, payload, padlen, final); - -socket.write(frame.data); -``` - ### Class: HeadersFrame The `http2.HeadersFrame` is a subclass of `http2.Frame` that serializes a @@ -1138,19 +1140,6 @@ are likely sufficient to hold a single file of `size` bytes. This is useful for skipping tests that require hundreds of megabytes or even gigabytes of temporary files, but it is inaccurate and susceptible to race conditions. -## UDP pair helper - -The `common/udppair` module exports a function `makeUDPPair` and a class -`FakeUDPWrap`. - -`FakeUDPWrap` emits `'send'` events when data is to be sent on it, and provides -an `emitReceived()` API for actin as if data has been received on it. - -`makeUDPPair` returns an object `{ clientSide, serverSide }` where each side -is an `FakeUDPWrap` connected to the other side. - -There is no difference between client or server side beyond their names. - ## WPT module ### `harness` diff --git a/test/common/assertSnapshot.js b/test/common/assertSnapshot.js index e0793ce5394cda..dbd4a6079861fc 100644 --- a/test/common/assertSnapshot.js +++ b/test/common/assertSnapshot.js @@ -25,7 +25,7 @@ function replaceWindowsPaths(str) { } function replaceFullPaths(str) { - return str.replaceAll(path.resolve(__dirname, '../..'), ''); + return str.replaceAll('\\\'', "'").replaceAll(path.resolve(__dirname, '../..'), ''); } function transform(...args) { diff --git a/test/common/crypto.js b/test/common/crypto.js index ba47285df49a43..10432d7e7a7e32 100644 --- a/test/common/crypto.js +++ b/test/common/crypto.js @@ -33,23 +33,6 @@ const modp2buf = Buffer.from([ 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, ]); -function testDH({ publicKey: alicePublicKey, privateKey: alicePrivateKey }, - { publicKey: bobPublicKey, privateKey: bobPrivateKey }, - expectedValue) { - const buf1 = crypto.diffieHellman({ - privateKey: alicePrivateKey, - publicKey: bobPublicKey, - }); - const buf2 = crypto.diffieHellman({ - privateKey: bobPrivateKey, - publicKey: alicePublicKey, - }); - assert.deepStrictEqual(buf1, buf2); - - if (expectedValue !== undefined) - assert.deepStrictEqual(buf1, expectedValue); -} - // Asserts that the size of the given key (in chars or bytes) is within 10% of // the expected size. function assertApproximateSize(key, expectedSize) { @@ -117,7 +100,6 @@ const sec1EncExp = (cipher) => getRegExpForPEM('EC PRIVATE KEY', cipher); module.exports = { modp2buf, - testDH, assertApproximateSize, testEncryptDecrypt, testSignVerify, diff --git a/test/common/dns.js b/test/common/dns.js index d854c73629a07c..738f2299dd79dc 100644 --- a/test/common/dns.js +++ b/test/common/dns.js @@ -196,11 +196,11 @@ function writeDNSPacket(parsed) { buffers.push(new Uint16Array([ parsed.id, - parsed.flags === undefined ? kStandardResponseFlags : parsed.flags, - parsed.questions && parsed.questions.length, - parsed.answers && parsed.answers.length, - parsed.authorityAnswers && parsed.authorityAnswers.length, - parsed.additionalRecords && parsed.additionalRecords.length, + parsed.flags ?? kStandardResponseFlags, + parsed.questions?.length, + parsed.answers?.length, + parsed.authorityAnswers?.length, + parsed.additionalRecords?.length, ])); for (const q of parsed.questions) { diff --git a/test/common/http2.js b/test/common/http2.js index 6df1c29c09eecd..1968b5bcfd39d0 100644 --- a/test/common/http2.js +++ b/test/common/http2.js @@ -81,24 +81,6 @@ class SettingsFrame extends Frame { } } -class DataFrame extends Frame { - constructor(id, payload, padlen = 0, final = false) { - let len = payload.length; - let flags = 0; - if (final) flags |= FLAG_EOS; - const buffers = [payload]; - if (padlen > 0) { - buffers.unshift(Buffer.from([padlen])); - buffers.push(PADDING.slice(0, padlen)); - len += padlen + 1; - flags |= FLAG_PADDED; - } - super(len, 0, flags, id); - buffers.unshift(this[kFrameData]); - this[kFrameData] = Buffer.concat(buffers); - } -} - class HeadersFrame extends Frame { constructor(id, payload, padlen = 0, final = false) { let len = payload.length; @@ -138,7 +120,6 @@ class AltSvcFrame extends Frame { module.exports = { Frame, AltSvcFrame, - DataFrame, HeadersFrame, SettingsFrame, PingFrame, diff --git a/test/common/index.js b/test/common/index.js index 172cdb6b049824..b95e812c5a9fab 100644 --- a/test/common/index.js +++ b/test/common/index.js @@ -30,7 +30,7 @@ const net = require('net'); // Do not require 'os' until needed so that test-os-checked-function can // monkey patch it. If 'os' is required here, that test will fail. const path = require('path'); -const { inspect } = require('util'); +const { inspect, getCallSite } = require('util'); const { isMainThread } = require('worker_threads'); const { isModuleNamespaceObject } = require('util/types'); @@ -249,15 +249,13 @@ const PIPE = (() => { // `$node --abort-on-uncaught-exception $file child` // the process aborts. function childShouldThrowAndAbort() { - let testCmd = ''; + const escapedArgs = escapePOSIXShell`"${process.argv[0]}" --abort-on-uncaught-exception "${process.argv[1]}" child`; if (!isWindows) { // Do not create core files, as it can take a lot of disk space on // continuous testing and developers' machines - testCmd += 'ulimit -c 0 && '; + escapedArgs[0] = 'ulimit -c 0 && ' + escapedArgs[0]; } - testCmd += `"${process.argv[0]}" --abort-on-uncaught-exception `; - testCmd += `"${process.argv[1]}" child`; - const child = exec(testCmd); + const child = exec(...escapedArgs); child.on('exit', function onExit(exitCode, signal) { const errMsg = 'Test should have aborted ' + `but instead exited with exit code ${exitCode}` + @@ -551,25 +549,13 @@ function canCreateSymLink() { return true; } -function getCallSite(top) { - const originalStackFormatter = Error.prepareStackTrace; - Error.prepareStackTrace = (err, stack) => - `${stack[0].getFileName()}:${stack[0].getLineNumber()}`; - const err = new Error(); - Error.captureStackTrace(err, top); - // With the V8 Error API, the stack is not formatted until it is accessed - err.stack; // eslint-disable-line no-unused-expressions - Error.prepareStackTrace = originalStackFormatter; - return err.stack; -} - function mustNotCall(msg) { - const callSite = getCallSite(mustNotCall); + const callSite = getCallSite()[1]; return function mustNotCall(...args) { const argsInfo = args.length > 0 ? `\ncalled with arguments: ${args.map((arg) => inspect(arg)).join(', ')}` : ''; assert.fail( - `${msg || 'function should not have been called'} at ${callSite}` + + `${msg || 'function should not have been called'} at ${callSite.scriptName}:${callSite.lineNumber}` + argsInfo); }; } @@ -900,6 +886,32 @@ function spawnPromisified(...args) { }); } +/** + * Escape values in a string template literal. On Windows, this function + * does not escape anything (which is fine for paths, as `"` is not a valid char + * in a path on Windows), so you should use it only to escape paths – or other + * values on tests which are skipped on Windows. + * This function is meant to be used for tagged template strings. + * @returns {[string, object | undefined]} An array that can be passed as + * arguments to `exec` or `execSync`. + */ +function escapePOSIXShell(cmdParts, ...args) { + if (common.isWindows) { + // On Windows, paths cannot contain `"`, so we can return the string unchanged. + return [String.raw({ raw: cmdParts }, ...args)]; + } + // On POSIX shells, we can pass values via the env, as there's a standard way for referencing a variable. + const env = { ...process.env }; + let cmd = cmdParts[0]; + for (let i = 0; i < args.length; i++) { + const envVarName = `ESCAPED_${i}`; + env[envVarName] = args[i]; + cmd += '${' + envVarName + '}' + cmdParts[i + 1]; + } + + return [cmd, { env }]; +}; + function getPrintedStackTrace(stderr) { const lines = stderr.split('\n'); @@ -963,12 +975,12 @@ const common = { childShouldThrowAndAbort, createZeroFilledFile, defaultAutoSelectFamilyAttemptTimeout, + escapePOSIXShell, expectsError, expectRequiredModule, expectWarning, getArrayBufferViews, getBufferSources, - getCallSite, getPrintedStackTrace, getTTYfd, hasIntl, diff --git a/test/common/index.mjs b/test/common/index.mjs index 007ce233fbea0d..748977b85f8012 100644 --- a/test/common/index.mjs +++ b/test/common/index.mjs @@ -11,11 +11,11 @@ const { childShouldThrowAndAbort, createZeroFilledFile, enoughTestMem, + escapePOSIXShell, expectsError, expectWarning, getArrayBufferViews, getBufferSources, - getCallSite, getTTYfd, hasCrypto, hasIntl, @@ -65,11 +65,11 @@ export { createRequire, createZeroFilledFile, enoughTestMem, + escapePOSIXShell, expectsError, expectWarning, getArrayBufferViews, getBufferSources, - getCallSite, getPort, getTTYfd, hasCrypto, diff --git a/test/common/inspector-helper.js b/test/common/inspector-helper.js index 9d5e3b15aece4b..864538f245e2ea 100644 --- a/test/common/inspector-helper.js +++ b/test/common/inspector-helper.js @@ -255,8 +255,8 @@ class InspectorSession { const callFrame = message.params.callFrames[0]; const location = callFrame.location; const scriptPath = this._scriptsIdsByUrl.get(location.scriptId); - assert.strictEqual(scriptPath.toString(), - expectedScriptPath.toString(), + assert.strictEqual(decodeURIComponent(scriptPath), + decodeURIComponent(expectedScriptPath), `${scriptPath} !== ${expectedScriptPath}`); assert.strictEqual(location.lineNumber, line); return true; diff --git a/test/common/report.js b/test/common/report.js index 6e41561186570d..6fce96590c3f54 100644 --- a/test/common/report.js +++ b/test/common/report.js @@ -251,7 +251,7 @@ function _validateContent(report, fields = []) { assert(typeof usage.free_memory, 'string'); assert(typeof usage.total_memory, 'string'); assert(typeof usage.available_memory, 'string'); - // This field may not exsit + // This field may not exist if (report.resourceUsage.constrained_memory) { assert(typeof report.resourceUsage.constrained_memory, 'string'); } diff --git a/test/common/tick.js b/test/common/tick.js index b02315b10ca96f..7bb86a81d52a6b 100644 --- a/test/common/tick.js +++ b/test/common/tick.js @@ -1,5 +1,4 @@ 'use strict'; -require('../common'); module.exports = function tick(x, cb) { function ontick() { diff --git a/test/common/udppair.js b/test/common/udppair.js deleted file mode 100644 index 3f2b0aed78a169..00000000000000 --- a/test/common/udppair.js +++ /dev/null @@ -1,101 +0,0 @@ -'use strict'; -const { internalBinding } = require('internal/test/binding'); -const { JSUDPWrap } = internalBinding('js_udp_wrap'); -const EventEmitter = require('events'); - -// FakeUDPWrap is a testing utility that emulates a UDP connection -// for the sake of making UDP tests more deterministic. -class FakeUDPWrap extends EventEmitter { - constructor() { - super(); - - this._handle = new JSUDPWrap(); - - this._handle.onreadstart = () => this._startReading(); - this._handle.onreadstop = () => this._stopReading(); - this._handle.onwrite = - (wrap, buffers, addr) => this._write(wrap, buffers, addr); - this._handle.getsockname = (obj) => { - Object.assign(obj, { address: '127.0.0.1', family: 'IPv4', port: 1337 }); - return 0; - }; - - this.reading = false; - this.bufferedReceived = []; - this.emitBufferedImmediate = null; - } - - _emitBuffered = () => { - if (!this.reading) return; - if (this.bufferedReceived.length > 0) { - this.emitReceived(this.bufferedReceived.shift()); - this.emitBufferedImmediate = setImmediate(this._emitBuffered); - } else { - this.emit('wantRead'); - } - }; - - _startReading() { - this.reading = true; - this.emitBufferedImmediate = setImmediate(this._emitBuffered); - } - - _stopReading() { - this.reading = false; - clearImmediate(this.emitBufferedImmediate); - } - - _write(wrap, buffers, addr) { - this.emit('send', { buffers, addr }); - setImmediate(() => this._handle.onSendDone(wrap, 0)); - } - - afterBind() { - this._handle.onAfterBind(); - } - - emitReceived(info) { - if (!this.reading) { - this.bufferedReceived.push(info); - return; - } - - const { - buffers, - addr: { - family = 4, - address = '127.0.0.1', - port = 1337, - }, - flags = 0, - } = info; - - let familyInt; - switch (family) { - case 'IPv4': familyInt = 4; break; - case 'IPv6': familyInt = 6; break; - default: throw new Error('bad family'); - } - - for (const buffer of buffers) { - this._handle.emitReceived(buffer, familyInt, address, port, flags); - } - } -} - -function makeUDPPair() { - const serverSide = new FakeUDPWrap(); - const clientSide = new FakeUDPWrap(); - - serverSide.on('send', - (chk) => setImmediate(() => clientSide.emitReceived(chk))); - clientSide.on('send', - (chk) => setImmediate(() => serverSide.emitReceived(chk))); - - return { serverSide, clientSide }; -} - -module.exports = { - FakeUDPWrap, - makeUDPPair, -}; diff --git a/test/es-module/test-cjs-legacyMainResolve.js b/test/es-module/test-cjs-legacyMainResolve.js index 0bfeb567a22b1f..edb567bce403f2 100644 --- a/test/es-module/test-cjs-legacyMainResolve.js +++ b/test/es-module/test-cjs-legacyMainResolve.js @@ -82,7 +82,7 @@ describe('legacyMainResolve', () => { {}, '' ), - { message: /instance of URL/, code: 'ERR_INVALID_ARG_TYPE' }, + { code: 'ERR_INTERNAL_ASSERTION' }, ); }); @@ -166,4 +166,12 @@ describe('legacyMainResolve', () => { { message: /"base" argument must be/, code: 'ERR_INVALID_ARG_TYPE' }, ); }); + + it('should interpret main as a path, not a URL', () => { + const packageJsonUrl = fixtures.fileURL('/es-modules/legacy-main-resolver/package.json'); + assert.deepStrictEqual( + legacyMainResolve(packageJsonUrl, { main: '../folder%25with percentage#/' }, packageJsonUrl), + fixtures.fileURL('/es-modules/folder%25with percentage#/index.js'), + ); + }); }); diff --git a/test/es-module/test-esm-detect-ambiguous.mjs b/test/es-module/test-esm-detect-ambiguous.mjs index fce455cf18bb60..5d5a2744afcd14 100644 --- a/test/es-module/test-esm-detect-ambiguous.mjs +++ b/test/es-module/test-esm-detect-ambiguous.mjs @@ -252,6 +252,18 @@ describe('Module syntax detection', { concurrency: !process.env.TEST_PARALLEL }, strictEqual(signal, null); }); + it('reports unfinished top-level `await`', async () => { + const { stdout, stderr, code, signal } = await spawnPromisified(process.execPath, [ + '--no-warnings', + fixtures.path('es-modules/tla/unresolved.js'), + ]); + + strictEqual(stderr, ''); + strictEqual(stdout, ''); + strictEqual(code, 13); + strictEqual(signal, null); + }); + it('permits top-level `await` above import/export syntax', async () => { const { stdout, stderr, code, signal } = await spawnPromisified(process.execPath, [ '--eval', diff --git a/test/es-module/test-esm-extensionless-esm-and-wasm.mjs b/test/es-module/test-esm-extensionless-esm-and-wasm.mjs index a8931dfbabd154..17786b61291ab6 100644 --- a/test/es-module/test-esm-extensionless-esm-and-wasm.mjs +++ b/test/es-module/test-esm-extensionless-esm-and-wasm.mjs @@ -1,8 +1,8 @@ // Flags: --experimental-wasm-modules -import { mustNotCall, spawnPromisified } from '../common/index.mjs'; +import { spawnPromisified } from '../common/index.mjs'; import * as fixtures from '../common/fixtures.mjs'; import { describe, it } from 'node:test'; -import { match, ok, strictEqual } from 'node:assert'; +import { match, strictEqual } from 'node:assert'; describe('extensionless ES modules within a "type": "module" package scope', { concurrency: !process.env.TEST_PARALLEL, @@ -91,13 +91,7 @@ describe('extensionless Wasm within no package scope', { concurrency: !process.e strictEqual(signal, null); }); - // This succeeds with `--experimental-default-type=module` - it('should error on import', async () => { - try { - await import(fixtures.fileURL('es-modules/noext-wasm')); - mustNotCall(); - } catch (err) { - ok(err instanceof SyntaxError); - } + it('should run on import', async () => { + await import(fixtures.fileURL('es-modules/noext-wasm')); }); }); diff --git a/test/es-module/test-esm-import-meta-resolve.mjs b/test/es-module/test-esm-import-meta-resolve.mjs index a9c26b7aecb926..f3b153062192af 100644 --- a/test/es-module/test-esm-import-meta-resolve.mjs +++ b/test/es-module/test-esm-import-meta-resolve.mjs @@ -45,7 +45,7 @@ assert.deepStrictEqual( { default: 'some://weird/protocol' }, ); assert.deepStrictEqual( - { ...await import(`data:text/javascript,export default import.meta.resolve("baz/", ${JSON.stringify(fixtures)})`) }, + { ...await import(`data:text/javascript,export default import.meta.resolve("baz/", ${encodeURIComponent(JSON.stringify(fixtures))})`) }, { default: fixtures + 'node_modules/baz/' }, ); assert.deepStrictEqual( diff --git a/test/es-module/test-esm-loader-entry-url.mjs b/test/es-module/test-esm-loader-entry-url.mjs new file mode 100644 index 00000000000000..e0b931693654a0 --- /dev/null +++ b/test/es-module/test-esm-loader-entry-url.mjs @@ -0,0 +1,97 @@ +import { spawnPromisified } from '../common/index.mjs'; +import * as fixtures from '../common/fixtures.mjs'; +import assert from 'node:assert'; +import { execPath } from 'node:process'; +import { describe, it } from 'node:test'; + +// Helper function to assert the spawned process +async function assertSpawnedProcess(args, options = {}, expected = {}) { + const { code, signal, stderr, stdout } = await spawnPromisified(execPath, args, options); + + if (expected.stderr) { + assert.match(stderr, expected.stderr); + } + + if (expected.stdout) { + assert.match(stdout, expected.stdout); + } + + assert.strictEqual(code, expected.code ?? 0); + assert.strictEqual(signal, expected.signal ?? null); +} + +// Common expectation for experimental feature warning in stderr +const experimentalFeatureWarning = { stderr: /--entry-url is an experimental feature/ }; + +describe('--entry-url', { concurrency: true }, () => { + it('should reject loading a path that contains %', async () => { + await assertSpawnedProcess( + ['--entry-url', './test-esm-double-encoding-native%20.mjs'], + { cwd: fixtures.fileURL('es-modules') }, + { + code: 1, + stderr: /ERR_MODULE_NOT_FOUND/, + } + ); + }); + + it('should support loading properly encoded Unix path', async () => { + await assertSpawnedProcess( + ['--entry-url', fixtures.fileURL('es-modules/test-esm-double-encoding-native%20.mjs').pathname], + {}, + experimentalFeatureWarning + ); + }); + + it('should support loading absolute URLs', async () => { + await assertSpawnedProcess( + ['--entry-url', fixtures.fileURL('printA.js')], + {}, + { + ...experimentalFeatureWarning, + stdout: /^A\r?\n$/, + } + ); + }); + + it('should support loading relative URLs', async () => { + await assertSpawnedProcess( + ['--entry-url', 'es-modules/print-entrypoint.mjs?key=value#hash'], + { cwd: fixtures.fileURL('./') }, + { + ...experimentalFeatureWarning, + stdout: /print-entrypoint\.mjs\?key=value#hash\r?\n$/, + } + ); + }); + + it('should support loading `data:` URLs', async () => { + await assertSpawnedProcess( + ['--entry-url', 'data:text/javascript,console.log(import.meta.url)'], + {}, + { + ...experimentalFeatureWarning, + stdout: /^data:text\/javascript,console\.log\(import\.meta\.url\)\r?\n$/, + } + ); + }); + + it('should support loading TypeScript URLs', { skip: !process.config.variables.node_use_amaro }, async () => { + const typescriptUrls = [ + 'typescript/cts/test-require-ts-file.cts', + 'typescript/mts/test-import-ts-file.mts', + ]; + + for (const url of typescriptUrls) { + await assertSpawnedProcess( + ['--entry-url', '--experimental-strip-types', fixtures.fileURL(url)], + {}, + { + ...experimentalFeatureWarning, + stdout: /Hello, TypeScript!/, + } + ); + } + }); + +}); diff --git a/test/es-module/test-esm-loader-hooks.mjs b/test/es-module/test-esm-loader-hooks.mjs index a36e726810431e..c697a5493c6d8b 100644 --- a/test/es-module/test-esm-loader-hooks.mjs +++ b/test/es-module/test-esm-loader-hooks.mjs @@ -822,7 +822,7 @@ describe('Loader hooks', { concurrency: !process.env.TEST_PARALLEL }, () => { const { code, signal, stdout, stderr } = await spawnPromisified(execPath, [ '--no-warnings', '--experimental-loader', - `data:text/javascript,const fixtures=${JSON.stringify(fixtures.path('empty.js'))};export ${ + `data:text/javascript,const fixtures=${encodeURI(JSON.stringify(fixtures.path('empty.js')))};export ${ encodeURIComponent(function resolve(s, c, n) { if (s.endsWith('entry-point')) { return { diff --git a/test/es-module/test-import-module-conditional-exports-module.mjs b/test/es-module/test-import-module-conditional-exports-module.mjs new file mode 100644 index 00000000000000..c751996c804529 --- /dev/null +++ b/test/es-module/test-import-module-conditional-exports-module.mjs @@ -0,0 +1,29 @@ +// Flags: --experimental-require-module + +import '../common/index.mjs'; +import assert from 'node:assert'; +import * as staticImport from '../fixtures/es-modules/module-condition/import.mjs'; +import { import as _import } from '../fixtures/es-modules/module-condition/dynamic_import.js'; + +async function dynamicImport(id) { + const result = await _import(id); + return result.resolved; +} + +assert.deepStrictEqual({ ...staticImport }, { + import_module_require: 'import', + module_and_import: 'module', + module_and_require: 'module', + module_import_require: 'module', + module_only: 'module', + module_require_import: 'module', + require_module_import: 'module', +}); + +assert.strictEqual(await dynamicImport('import-module-require'), 'import'); +assert.strictEqual(await dynamicImport('module-and-import'), 'module'); +assert.strictEqual(await dynamicImport('module-and-require'), 'module'); +assert.strictEqual(await dynamicImport('module-import-require'), 'module'); +assert.strictEqual(await dynamicImport('module-only'), 'module'); +assert.strictEqual(await dynamicImport('module-require-import'), 'module'); +assert.strictEqual(await dynamicImport('require-module-import'), 'module'); diff --git a/test/es-module/test-loaders-workers-spawned.mjs b/test/es-module/test-loaders-workers-spawned.mjs index 7b16bdfcd3e2b0..68c1b863f7cae1 100644 --- a/test/es-module/test-loaders-workers-spawned.mjs +++ b/test/es-module/test-loaders-workers-spawned.mjs @@ -76,7 +76,7 @@ describe('Worker threads do not spawn infinitely', { concurrency: !process.env.T assert.strictEqual(stderr, ''); // The worker code should always run before the --import, but the console.log might arrive late. - assert.match(stdout, /^A\r?\nA\r?\n(B\r?\nC|C\r?\nB)\r?\nD\r?\n$/); + assert.match(stdout, /^A\r?\n(A\r?\nB\r?\nC|A\r?\nC\r?\nB|C\r?\nA\r?\nB)\r?\nD\r?\n$/); assert.strictEqual(code, 0); assert.strictEqual(signal, null); }); diff --git a/test/es-module/test-require-module-conditional-exports-module.js b/test/es-module/test-require-module-conditional-exports-module.js new file mode 100644 index 00000000000000..2a9e83869053d3 --- /dev/null +++ b/test/es-module/test-require-module-conditional-exports-module.js @@ -0,0 +1,15 @@ +// Flags: --experimental-require-module +'use strict'; + +require('../common'); +const assert = require('assert'); + +const loader = require('../fixtures/es-modules/module-condition/require.cjs'); + +assert.strictEqual(loader.require('import-module-require').resolved, 'module'); +assert.strictEqual(loader.require('module-and-import').resolved, 'module'); +assert.strictEqual(loader.require('module-and-require').resolved, 'module'); +assert.strictEqual(loader.require('module-import-require').resolved, 'module'); +assert.strictEqual(loader.require('module-only').resolved, 'module'); +assert.strictEqual(loader.require('module-require-import').resolved, 'module'); +assert.strictEqual(loader.require('require-module-import').resolved, 'require'); diff --git a/test/es-module/test-require-module-feature-detect.js b/test/es-module/test-require-module-feature-detect.js new file mode 100644 index 00000000000000..d6d4bb64f49f5f --- /dev/null +++ b/test/es-module/test-require-module-feature-detect.js @@ -0,0 +1,38 @@ +'use strict'; + +// This tests that process.features.require_module can be used to feature-detect +// require(esm) without triggering a warning. + +require('../common'); +const { spawnSyncAndAssert } = require('../common/child_process'); + +spawnSyncAndAssert(process.execPath, [ + '--no-warnings', + '--experimental-require-module', + '-p', + 'process.features.require_module', +], { + trim: true, + stdout: 'true', + stderr: '', // Should not emit warnings. +}); + +// It is not enabled by default. +spawnSyncAndAssert(process.execPath, [ + '-p', + 'process.features.require_module', +], { + trim: true, + stdout: 'false', + stderr: '', // Should not emit warnings. +}); + +spawnSyncAndAssert(process.execPath, [ + '--no-experimental-require-module', + '-p', + 'process.features.require_module', +], { + trim: true, + stdout: 'false', + stderr: '', // Should not emit warnings. +}); diff --git a/test/es-module/test-require-module-preload.js b/test/es-module/test-require-module-preload.js index cd51e201b63df8..f2e572969a050c 100644 --- a/test/es-module/test-require-module-preload.js +++ b/test/es-module/test-require-module-preload.js @@ -1,70 +1,117 @@ 'use strict'; require('../common'); -const { spawnSyncAndExitWithoutError } = require('../common/child_process'); -const fixtures = require('../common/fixtures'); - +const { spawnSyncAndAssert } = require('../common/child_process'); +const { fixturesDir } = require('../common/fixtures'); const stderr = /ExperimentalWarning: Support for loading ES Module in require/; -// Test named exports. -{ - spawnSyncAndExitWithoutError( - process.execPath, - [ '--experimental-require-module', '-r', fixtures.path('../fixtures/es-module-loaders/module-named-exports.mjs') ], - { - stderr, - } - ); -} +function testPreload(preloadFlag) { + // Test named exports. + { + spawnSyncAndAssert( + process.execPath, + [ + '--experimental-require-module', + preloadFlag, + './es-module-loaders/module-named-exports.mjs', + './printA.js', + ], + { + cwd: fixturesDir + }, + { + stdout: 'A', + stderr, + trim: true, + } + ); + } -// Test ESM that import ESM. -{ - spawnSyncAndExitWithoutError( - process.execPath, - [ '--experimental-require-module', '-r', fixtures.path('../fixtures/es-modules/import-esm.mjs') ], - { - stderr, - stdout: 'world', - trim: true, - } - ); -} + // Test ESM that import ESM. + { + spawnSyncAndAssert( + process.execPath, + [ + '--experimental-require-module', + preloadFlag, + './es-modules/import-esm.mjs', + './printA.js', + ], + { + cwd: fixturesDir + }, + { + stderr, + stdout: /^world\s+A$/, + trim: true, + } + ); + } -// Test ESM that import CJS. -{ - spawnSyncAndExitWithoutError( - process.execPath, - [ '--experimental-require-module', '-r', fixtures.path('../fixtures/es-modules/cjs-exports.mjs') ], - { - stdout: 'ok', - stderr, - trim: true, - } - ); -} + // Test ESM that import CJS. + { + spawnSyncAndAssert( + process.execPath, + [ + '--experimental-require-module', + preloadFlag, + './es-modules/cjs-exports.mjs', + './printA.js', + ], + { + cwd: fixturesDir + }, + { + stdout: /^ok\s+A$/, + stderr, + trim: true, + } + ); + } -// Test ESM that require() CJS. -// Can't use the common/index.mjs here because that checks the globals, and -// -r injects a bunch of globals. -{ - spawnSyncAndExitWithoutError( - process.execPath, - [ '--experimental-require-module', '-r', fixtures.path('../fixtures/es-modules/require-cjs.mjs') ], - { - stdout: 'world', - stderr, - trim: true, - } - ); + // Test ESM that require() CJS. + // Can't use the common/index.mjs here because that checks the globals, and + // -r injects a bunch of globals. + { + spawnSyncAndAssert( + process.execPath, + [ + '--experimental-require-module', + preloadFlag, + './es-modules/require-cjs.mjs', + './printA.js', + ], + { + cwd: fixturesDir + }, + { + stdout: /^world\s+A$/, + stderr, + trim: true, + } + ); + } } -// Test "type": "module" and "main" field in package.json. +testPreload('--require'); +testPreload('--import'); + +// Test "type": "module" and "main" field in package.json, this is only for --require because +// --import does not support extension-less preloads. { - spawnSyncAndExitWithoutError( + spawnSyncAndAssert( process.execPath, - [ '--experimental-require-module', '-r', fixtures.path('../fixtures/es-modules/package-type-module') ], + [ + '--experimental-require-module', + '--require', + './es-modules/package-type-module', + './printA.js', + ], + { + cwd: fixturesDir + }, { - stdout: 'package-type-module', + stdout: /^package-type-module\s+A$/, stderr, trim: true, } diff --git a/test/es-module/test-typescript-eval.mjs b/test/es-module/test-typescript-eval.mjs index 120b66a1d26017..e6d841ffa07f7e 100644 --- a/test/es-module/test-typescript-eval.mjs +++ b/test/es-module/test-typescript-eval.mjs @@ -98,7 +98,7 @@ test('expect fail eval TypeScript CommonJS syntax with input-type module', async strictEqual(result.code, 1); }); -test('expect fail eval TypeScript CommonJS syntax with input-type module', async () => { +test('expect fail eval TypeScript ESM syntax with input-type commonjs', async () => { const result = await spawnPromisified(process.execPath, [ '--experimental-strip-types', '--input-type=commonjs', @@ -110,3 +110,13 @@ test('expect fail eval TypeScript CommonJS syntax with input-type module', async match(result.stderr, /Cannot use import statement outside a module/); strictEqual(result.code, 1); }); + +test('check syntax error is thrown when passing invalid syntax', async () => { + const result = await spawnPromisified(process.execPath, [ + '--experimental-strip-types', + '--eval', + 'enum Foo { A, B, C }']); + strictEqual(result.stdout, ''); + match(result.stderr, /ERR_INVALID_TYPESCRIPT_SYNTAX/); + strictEqual(result.code, 1); +}); diff --git a/test/es-module/test-typescript.mjs b/test/es-module/test-typescript.mjs index f8b79261495638..00af572f2fc62f 100644 --- a/test/es-module/test-typescript.mjs +++ b/test/es-module/test-typescript.mjs @@ -1,8 +1,33 @@ -import { skip, spawnPromisified } from '../common/index.mjs'; +import { skip, spawnPromisified, isWindows } from '../common/index.mjs'; import * as fixtures from '../common/fixtures.mjs'; import { match, strictEqual } from 'node:assert'; import { test } from 'node:test'; +test('expect process.features.typescript to be \'strip\' when --experimental-strip-types', async () => { + const result = await spawnPromisified(process.execPath, [ + '--no-warnings', + '--experimental-strip-types', + fixtures.path('typescript/echo-process-features-typescript.cjs'), + ]); + + strictEqual(result.stderr, ''); + strictEqual(result.stdout, process.config.variables.node_use_amaro ? 'strip\n' : 'false\n'); + strictEqual(result.code, 0); +}); + +test('expect process.features.typescript to be \'transform\' when --experimental-transform-types', async () => { + const result = await spawnPromisified(process.execPath, [ + '--no-warnings', + '--experimental-transform-types', + fixtures.path('typescript/echo-process-features-typescript.cjs'), + ]); + + strictEqual(result.stderr, ''); + strictEqual(result.stdout, process.config.variables.node_use_amaro ? 'transform\n' : 'false\n'); + strictEqual(result.code, 0); +}); + + if (!process.config.variables.node_use_amaro) skip('Requires Amaro'); test('execute a TypeScript file', async () => { @@ -288,7 +313,7 @@ test('execute a TypeScript file with CommonJS syntax requiring .mts', async () = strictEqual(result.code, 1); }); -test('execute a TypeScript file with CommonJS syntax requiring .mts with require-module', async () => { +test('execute a TypeScript file with CommonJS syntax requiring .mts using require-module', async () => { const result = await spawnPromisified(process.execPath, [ '--experimental-strip-types', '--experimental-require-module', @@ -300,7 +325,7 @@ test('execute a TypeScript file with CommonJS syntax requiring .mts with require strictEqual(result.code, 0); }); -test('execute a TypeScript file with CommonJS syntax requiring .mts with require-module', async () => { +test('execute a TypeScript file with CommonJS syntax requiring .cts using commonjs', async () => { const result = await spawnPromisified(process.execPath, [ '--experimental-strip-types', '--no-warnings', @@ -325,3 +350,68 @@ test('execute a TypeScript file with CommonJS syntax requiring .mts with require match(result.stdout, /Hello, TypeScript!/); strictEqual(result.code, 0); }); + +test('execute a JavaScript file importing a cjs TypeScript file', async () => { + const result = await spawnPromisified(process.execPath, [ + '--experimental-strip-types', + '--no-warnings', + fixtures.path('typescript/ts/issue-54457.mjs'), + ]); + strictEqual(result.stderr, ''); + match(result.stdout, /Hello, TypeScript!/); + strictEqual(result.code, 0); +}); + +// TODO(marco-ippolito) Due to a bug in SWC, the TypeScript loader +// does not work on Windows arm64. This test should be re-enabled +// when https://github.com/nodejs/node/issues/54645 is fixed +test('execute a TypeScript test mocking module', { skip: isWindows && process.arch === 'arm64' }, async () => { + const result = await spawnPromisified(process.execPath, [ + '--test', + '--experimental-test-module-mocks', + '--experimental-strip-types', + '--no-warnings', + fixtures.path('typescript/ts/test-mock-module.ts'), + ]); + strictEqual(result.stderr, ''); + match(result.stdout, /Hello, TypeScript-Module!/); + match(result.stdout, /Hello, TypeScript-CommonJS!/); + strictEqual(result.code, 0); +}); + +test('expect process.features.typescript to be false without type-stripping', async () => { + strictEqual(process.features.typescript, false); +}); + +test('execute a TypeScript file with union types', async () => { + const result = await spawnPromisified(process.execPath, [ + '--experimental-strip-types', + '--no-warnings', + fixtures.path('typescript/ts/test-union-types.ts'), + ]); + + strictEqual(result.stderr, ''); + strictEqual(result.stdout, + '{' + + " name: 'Hello, TypeScript!' }\n" + + '{ role: \'admin\', permission: \'all\' }\n' + + '{\n foo: \'Testing Partial Type\',\n bar: 42,\n' + + ' zoo: true,\n metadata: undefined\n' + + '}\n'); + strictEqual(result.code, 0); +}); + +test('expect error when executing a TypeScript file with generics', async () => { + const result = await spawnPromisified(process.execPath, [ + '--experimental-strip-types', + fixtures.path('typescript/ts/test-parameter-properties.ts'), + ]); + + // This error should be thrown during transformation + match( + result.stderr, + /TypeScript parameter property is not supported in strip-only mode/ + ); + strictEqual(result.stdout, ''); + strictEqual(result.code, 1); +}); diff --git a/test/es-module/test-wasm-web-api.js b/test/es-module/test-wasm-web-api.js index d4a81794f80eb3..b199393a18c370 100644 --- a/test/es-module/test-wasm-web-api.js +++ b/test/es-module/test-wasm-web-api.js @@ -173,7 +173,7 @@ function testCompileStreamingRejectionUsingFetch(responseCallback, rejection) { "'application/octet-stream'" }); - // HTTP status code indiciating an error. + // HTTP status code indicating an error. await testCompileStreamingRejectionUsingFetch((res) => { res.statusCode = 418; res.setHeader('Content-Type', 'application/wasm'); @@ -184,7 +184,7 @@ function testCompileStreamingRejectionUsingFetch(responseCallback, rejection) { message: /^WebAssembly response has status code 418$/ }); - // HTTP status code indiciating an error, but using a Response whose body is + // HTTP status code indicating an error, but using a Response whose body is // a Buffer instead of calling fetch(). await testCompileStreamingSuccess(() => { return Promise.resolve(new Response(simpleWasmBytes, { diff --git a/test/fixtures/compile-cache-flush.js b/test/fixtures/compile-cache-flush.js new file mode 100644 index 00000000000000..8054f67b0ddaea --- /dev/null +++ b/test/fixtures/compile-cache-flush.js @@ -0,0 +1,21 @@ +'use strict'; + +const { flushCompileCache, getCompileCacheDir } = require('module'); +const { spawnSync } = require('child_process'); +const assert = require('assert'); + +if (process.argv[2] !== 'child') { + // The test should be run with the compile cache already enabled and NODE_DEBUG_NATIVE=COMPILE_CACHE. + assert(getCompileCacheDir()); + assert(process.env.NODE_DEBUG_NATIVE.includes('COMPILE_CACHE')); + + flushCompileCache(); + + const child1 = spawnSync(process.execPath, [__filename, 'child']); + console.log(child1.stderr.toString().trim().split('\n').map(line => `[child1]${line}`).join('\n')); + + flushCompileCache(); + + const child2 = spawnSync(process.execPath, [__filename, 'child']); + console.log(child2.stderr.toString().trim().split('\n').map(line => `[child2]${line}`).join('\n')); +} diff --git "a/test/fixtures/copy/utf/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/index.js" "b/test/fixtures/copy/utf/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/index.js" new file mode 100644 index 00000000000000..12388b0457bdda --- /dev/null +++ "b/test/fixtures/copy/utf/\346\226\260\345\273\272\346\226\207\344\273\266\345\244\271/index.js" @@ -0,0 +1,3 @@ +module.exports = { + purpose: 'testing copy' +}; diff --git a/test/fixtures/deep-exit.js b/test/fixtures/deep-exit.js new file mode 100644 index 00000000000000..357137a279c556 --- /dev/null +++ b/test/fixtures/deep-exit.js @@ -0,0 +1,15 @@ +'use strict'; + +// This is meant to be run with --trace-exit. + +const depth = parseInt(process.env.STACK_DEPTH) || 30; +let counter = 1; +function recurse() { + if (counter++ < depth) { + recurse(); + } else { + process.exit(0); + } +} + +recurse(); diff --git a/test/fixtures/es-modules/module-condition/dynamic_import.js b/test/fixtures/es-modules/module-condition/dynamic_import.js new file mode 100644 index 00000000000000..7c4cd42d7037f4 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/dynamic_import.js @@ -0,0 +1,5 @@ +function load(id) { + return import(id); +} + +export { load as import }; diff --git a/test/fixtures/es-modules/module-condition/import.mjs b/test/fixtures/es-modules/module-condition/import.mjs new file mode 100644 index 00000000000000..ae12fbf1429424 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/import.mjs @@ -0,0 +1,7 @@ +export { resolved as import_module_require } from 'import-module-require'; +export { resolved as module_and_import } from 'module-and-import'; +export { resolved as module_and_require } from 'module-and-require'; +export { resolved as module_import_require } from 'module-import-require'; +export { resolved as module_only } from 'module-only'; +export { resolved as module_require_import } from 'module-require-import'; +export { resolved as require_module_import } from 'require-module-import'; diff --git a/test/fixtures/es-modules/module-condition/node_modules/import-module-require/import.js b/test/fixtures/es-modules/module-condition/node_modules/import-module-require/import.js new file mode 100644 index 00000000000000..58f10e20bf3041 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/import-module-require/import.js @@ -0,0 +1 @@ +export const resolved = 'import'; \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/import-module-require/module.js b/test/fixtures/es-modules/module-condition/node_modules/import-module-require/module.js new file mode 100644 index 00000000000000..136ec680a7e3a8 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/import-module-require/module.js @@ -0,0 +1 @@ +export const resolved = 'module'; \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/import-module-require/package.json b/test/fixtures/es-modules/module-condition/node_modules/import-module-require/package.json new file mode 100644 index 00000000000000..46a096c2e7396d --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/import-module-require/package.json @@ -0,0 +1,11 @@ +{ + "type": "module", + "exports": { + "node": { + "import": "./import.js", + "module-sync": "./module.js", + "require": "./require.cjs" + }, + "default": "./module.js" + } +} \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/import-module-require/require.cjs b/test/fixtures/es-modules/module-condition/node_modules/import-module-require/require.cjs new file mode 100644 index 00000000000000..6dd2c2ec97abd6 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/import-module-require/require.cjs @@ -0,0 +1 @@ +exports.resolved = 'require'; diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-and-import/import.js b/test/fixtures/es-modules/module-condition/node_modules/module-and-import/import.js new file mode 100644 index 00000000000000..58f10e20bf3041 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-and-import/import.js @@ -0,0 +1 @@ +export const resolved = 'import'; \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-and-import/module.js b/test/fixtures/es-modules/module-condition/node_modules/module-and-import/module.js new file mode 100644 index 00000000000000..136ec680a7e3a8 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-and-import/module.js @@ -0,0 +1 @@ +export const resolved = 'module'; \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-and-import/package.json b/test/fixtures/es-modules/module-condition/node_modules/module-and-import/package.json new file mode 100644 index 00000000000000..4425147f7dab2c --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-and-import/package.json @@ -0,0 +1,10 @@ +{ + "type": "module", + "exports": { + "node": { + "module-sync": "./module.js", + "import": "./import.js" + }, + "default": "./module.js" + } +} diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-and-require/module.js b/test/fixtures/es-modules/module-condition/node_modules/module-and-require/module.js new file mode 100644 index 00000000000000..136ec680a7e3a8 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-and-require/module.js @@ -0,0 +1 @@ +export const resolved = 'module'; \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-and-require/package.json b/test/fixtures/es-modules/module-condition/node_modules/module-and-require/package.json new file mode 100644 index 00000000000000..57598fb5014bb0 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-and-require/package.json @@ -0,0 +1,10 @@ +{ + "type": "module", + "exports": { + "node": { + "module-sync": "./module.js", + "require": "./require.cjs" + }, + "default": "./module.js" + } +} \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-and-require/require.cjs b/test/fixtures/es-modules/module-condition/node_modules/module-and-require/require.cjs new file mode 100644 index 00000000000000..6dd2c2ec97abd6 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-and-require/require.cjs @@ -0,0 +1 @@ +exports.resolved = 'require'; diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-import-require/import.js b/test/fixtures/es-modules/module-condition/node_modules/module-import-require/import.js new file mode 100644 index 00000000000000..58f10e20bf3041 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-import-require/import.js @@ -0,0 +1 @@ +export const resolved = 'import'; \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-import-require/module.js b/test/fixtures/es-modules/module-condition/node_modules/module-import-require/module.js new file mode 100644 index 00000000000000..136ec680a7e3a8 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-import-require/module.js @@ -0,0 +1 @@ +export const resolved = 'module'; \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-import-require/package.json b/test/fixtures/es-modules/module-condition/node_modules/module-import-require/package.json new file mode 100644 index 00000000000000..bbc8c0d286e656 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-import-require/package.json @@ -0,0 +1,11 @@ +{ + "type": "module", + "exports": { + "node": { + "module-sync": "./module.js", + "import": "./import.js", + "require": "./require.cjs" + }, + "default": "./module.js" + } +} \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-import-require/require.cjs b/test/fixtures/es-modules/module-condition/node_modules/module-import-require/require.cjs new file mode 100644 index 00000000000000..6dd2c2ec97abd6 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-import-require/require.cjs @@ -0,0 +1 @@ +exports.resolved = 'require'; diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-only/module.js b/test/fixtures/es-modules/module-condition/node_modules/module-only/module.js new file mode 100644 index 00000000000000..136ec680a7e3a8 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-only/module.js @@ -0,0 +1 @@ +export const resolved = 'module'; \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-only/package.json b/test/fixtures/es-modules/module-condition/node_modules/module-only/package.json new file mode 100644 index 00000000000000..2d29bbdf325e1a --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-only/package.json @@ -0,0 +1,6 @@ +{ + "type": "module", + "exports": { + "module-sync": "./module.js" + } +} diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-require-import/import.js b/test/fixtures/es-modules/module-condition/node_modules/module-require-import/import.js new file mode 100644 index 00000000000000..58f10e20bf3041 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-require-import/import.js @@ -0,0 +1 @@ +export const resolved = 'import'; \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-require-import/module.js b/test/fixtures/es-modules/module-condition/node_modules/module-require-import/module.js new file mode 100644 index 00000000000000..136ec680a7e3a8 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-require-import/module.js @@ -0,0 +1 @@ +export const resolved = 'module'; \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-require-import/package.json b/test/fixtures/es-modules/module-condition/node_modules/module-require-import/package.json new file mode 100644 index 00000000000000..1490f04b27f9ad --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-require-import/package.json @@ -0,0 +1,11 @@ +{ + "type": "module", + "exports": { + "node": { + "module-sync": "./module.js", + "require": "./require.cjs", + "import": "./import.js" + }, + "default": "./module.js" + } +} diff --git a/test/fixtures/es-modules/module-condition/node_modules/module-require-import/require.cjs b/test/fixtures/es-modules/module-condition/node_modules/module-require-import/require.cjs new file mode 100644 index 00000000000000..f5bf7ed32992e1 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/module-require-import/require.cjs @@ -0,0 +1 @@ +export const resolved = 'require'; diff --git a/test/fixtures/es-modules/module-condition/node_modules/require-module-import/import.js b/test/fixtures/es-modules/module-condition/node_modules/require-module-import/import.js new file mode 100644 index 00000000000000..58f10e20bf3041 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/require-module-import/import.js @@ -0,0 +1 @@ +export const resolved = 'import'; \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/require-module-import/module.js b/test/fixtures/es-modules/module-condition/node_modules/require-module-import/module.js new file mode 100644 index 00000000000000..136ec680a7e3a8 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/require-module-import/module.js @@ -0,0 +1 @@ +export const resolved = 'module'; \ No newline at end of file diff --git a/test/fixtures/es-modules/module-condition/node_modules/require-module-import/package.json b/test/fixtures/es-modules/module-condition/node_modules/require-module-import/package.json new file mode 100644 index 00000000000000..b62f96c997ecb7 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/require-module-import/package.json @@ -0,0 +1,11 @@ +{ + "type": "module", + "exports": { + "node": { + "require": "./require.cjs", + "module-sync": "./module.js", + "import": "./module.js" + }, + "default": "./module.js" + } +} diff --git a/test/fixtures/es-modules/module-condition/node_modules/require-module-import/require.cjs b/test/fixtures/es-modules/module-condition/node_modules/require-module-import/require.cjs new file mode 100644 index 00000000000000..6dd2c2ec97abd6 --- /dev/null +++ b/test/fixtures/es-modules/module-condition/node_modules/require-module-import/require.cjs @@ -0,0 +1 @@ +exports.resolved = 'require'; diff --git a/test/fixtures/es-modules/module-condition/require.cjs b/test/fixtures/es-modules/module-condition/require.cjs new file mode 100644 index 00000000000000..2457758a98f32b --- /dev/null +++ b/test/fixtures/es-modules/module-condition/require.cjs @@ -0,0 +1 @@ +exports.require = require; diff --git a/test/fixtures/es-modules/print-entrypoint.mjs b/test/fixtures/es-modules/print-entrypoint.mjs new file mode 100644 index 00000000000000..d9536a69b3f87d --- /dev/null +++ b/test/fixtures/es-modules/print-entrypoint.mjs @@ -0,0 +1 @@ +console.log(import.meta.url); diff --git a/test/fixtures/es-modules/tla/unresolved.js b/test/fixtures/es-modules/tla/unresolved.js new file mode 100644 index 00000000000000..231a8cd634825c --- /dev/null +++ b/test/fixtures/es-modules/tla/unresolved.js @@ -0,0 +1 @@ +await new Promise(() => {}); diff --git a/test/fixtures/module-mocking/don't-open.mjs b/test/fixtures/module-mocking/don't-open.mjs new file mode 100644 index 00000000000000..f2b47223e34a83 --- /dev/null +++ b/test/fixtures/module-mocking/don't-open.mjs @@ -0,0 +1 @@ +export let string = 'original esm string'; diff --git a/test/fixtures/run-script/invalid-json/package.json b/test/fixtures/run-script/invalid-json/package.json new file mode 100644 index 00000000000000..22ad2cc50fa347 --- /dev/null +++ b/test/fixtures/run-script/invalid-json/package.json @@ -0,0 +1,3 @@ +{ + "scripts": {}, +} diff --git a/test/fixtures/run-script/invalid-schema/package.json b/test/fixtures/run-script/invalid-schema/package.json new file mode 100644 index 00000000000000..59dfa8a201b587 --- /dev/null +++ b/test/fixtures/run-script/invalid-schema/package.json @@ -0,0 +1,9 @@ +{ + "scripts": { + "array": [], + "boolean": true, + "null": null, + "number": 1.0, + "object": {} + } +} diff --git a/test/fixtures/run-script/missing-scripts/package.json b/test/fixtures/run-script/missing-scripts/package.json new file mode 100644 index 00000000000000..0967ef424bce67 --- /dev/null +++ b/test/fixtures/run-script/missing-scripts/package.json @@ -0,0 +1 @@ +{} diff --git a/test/fixtures/run-script/package.json b/test/fixtures/run-script/package.json index c103b48840fa2d..138f47f2f97408 100644 --- a/test/fixtures/run-script/package.json +++ b/test/fixtures/run-script/package.json @@ -10,6 +10,8 @@ "path-env": "path-env", "path-env-windows": "path-env.bat", "special-env-variables": "special-env-variables", - "special-env-variables-windows": "special-env-variables.bat" + "special-env-variables-windows": "special-env-variables.bat", + "pwd": "pwd", + "pwd-windows": "cd" } } diff --git a/test/fixtures/snapshot/error-stack.js b/test/fixtures/snapshot/error-stack.js new file mode 100644 index 00000000000000..96afaec22521ee --- /dev/null +++ b/test/fixtures/snapshot/error-stack.js @@ -0,0 +1,24 @@ + +const { + setDeserializeMainFunction, +} = require('v8').startupSnapshot; + +console.log(`During snapshot building, Error.stackTraceLimit =`, Error.stackTraceLimit); +console.log(getError('During snapshot building', 30)); + +setDeserializeMainFunction(() => { + console.log(`After snapshot deserialization, Error.stackTraceLimit =`, Error.stackTraceLimit); + console.log(getError('After snapshot deserialization', 30)); +}); + +function getError(message, depth) { + let counter = 1; + function recurse() { + if (counter++ < depth) { + return recurse(); + } + const error = new Error(message); + return error.stack; + } + return recurse(); +} diff --git a/test/fixtures/snapshot/marked.js b/test/fixtures/snapshot/marked.js index b72d59461b8e1c..d29b18ba1f9519 100644 --- a/test/fixtures/snapshot/marked.js +++ b/test/fixtures/snapshot/marked.js @@ -617,7 +617,7 @@ } else { indent = cap[2].search(/[^ ]/); // Find first non-space char - indent = cap[1].length + (indent > 4 ? 1 : indent); // intented code blocks after 4 spaces; indent is always 1 + indent = cap[1].length + (indent > 4 ? 1 : indent); // indented code blocks after 4 spaces; indent is always 1 itemContents = lines[0].slice(indent - cap[1].length); } diff --git a/test/fixtures/snapshot/mutate-error-stack-trace-limit.js b/test/fixtures/snapshot/mutate-error-stack-trace-limit.js new file mode 100644 index 00000000000000..e9d704ceb32f23 --- /dev/null +++ b/test/fixtures/snapshot/mutate-error-stack-trace-limit.js @@ -0,0 +1,44 @@ + +const { + addSerializeCallback, + setDeserializeMainFunction, +} = require('v8').startupSnapshot; +const assert = require('assert'); + +if (process.env.TEST_IN_SERIALIZER) { + addSerializeCallback(checkMutate); +} else { + checkMutate(); +} + +function checkMutate() { + // Check that mutation to Error.stackTraceLimit is effective in the snapshot + // builder script. + assert.strictEqual(typeof Error.stackTraceLimit, 'number'); + Error.stackTraceLimit = 0; + assert.strictEqual(getError('', 30), 'Error'); +} + +setDeserializeMainFunction(() => { + // Check that the mutation is preserved in the deserialized main function. + assert.strictEqual(Error.stackTraceLimit, 0); + assert.strictEqual(getError('', 30), 'Error'); + + // Check that it can still be mutated. + Error.stackTraceLimit = 10; + const error = getError('', 30); + const matches = [...error.matchAll(/at recurse/g)]; + assert.strictEqual(matches.length, 10); +}); + +function getError(message, depth) { + let counter = 1; + function recurse() { + if (counter++ < depth) { + return recurse(); + } + const error = new Error(message); + return error.stack; + } + return recurse(); +} diff --git a/test/fixtures/test-runner/coverage-loader/hooks.mjs b/test/fixtures/test-runner/coverage-loader/hooks.mjs index c2e4b1dfc94628..57ee712ed97c73 100644 --- a/test/fixtures/test-runner/coverage-loader/hooks.mjs +++ b/test/fixtures/test-runner/coverage-loader/hooks.mjs @@ -1,5 +1,5 @@ const sources = { -// Virtual file. Dosen't exist on disk +// Virtual file. Doesn't exist on disk "virtual.js": ` import { test } from 'node:test'; test('test', async () => {}); diff --git a/test/fixtures/test-runner/coverage-snap/a-very-long-long-long-sub-dir/c.js b/test/fixtures/test-runner/coverage-snap/a-very-long-long-long-sub-dir/c.js new file mode 100644 index 00000000000000..73ac49a524e60b --- /dev/null +++ b/test/fixtures/test-runner/coverage-snap/a-very-long-long-long-sub-dir/c.js @@ -0,0 +1,52 @@ +'use strict'; +// Here we can't import common module as the coverage will be different based on the system + +// Empty functions that don't do anything +function doNothing1() { + // Not implemented +} + +function doNothing2() { + // No logic here +} + +function unusedFunction1() { + // Intentionally left empty +} + +function unusedFunction2() { + // Another empty function +} + +// Unused variables +const unusedVariable1 = 'This is never used'; +const unusedVariable2 = 42; +let unusedVariable3; + +// Empty class with no methods +class UnusedClass { + constructor() { + // Constructor does nothing + } +} + +// Empty object literal +const emptyObject = {}; + +// Empty array +const emptyArray = []; + +// Function with parameters but no body +function doNothingWithParams(param1, param2) { + // No implementation +} + +// Function that returns nothing +function returnsNothing() { + // No return statement +} + +// Another unused function +function unusedFunction3() { + // More empty code +} diff --git a/test/fixtures/test-runner/coverage/index.test.js b/test/fixtures/test-runner/coverage/index.test.js index 81ce78597f0477..16f9adf9adea50 100644 --- a/test/fixtures/test-runner/coverage/index.test.js +++ b/test/fixtures/test-runner/coverage/index.test.js @@ -1,7 +1,7 @@ 'use strict'; const test = require('node:test'); -test('no soucre map', () => {}); +test('no source map', () => {}); if (false) { console.log('this does not execute'); } diff --git a/test/fixtures/test-runner/matching-patterns/typescript-test.cts b/test/fixtures/test-runner/matching-patterns/typescript-test.cts new file mode 100644 index 00000000000000..1ba6866ffcdb56 --- /dev/null +++ b/test/fixtures/test-runner/matching-patterns/typescript-test.cts @@ -0,0 +1,4 @@ +const test = require('node:test'); + +// 'as string' ensures that type stripping actually occurs +test('this should pass' as string); diff --git a/test/fixtures/test-runner/matching-patterns/typescript-test.mts b/test/fixtures/test-runner/matching-patterns/typescript-test.mts new file mode 100644 index 00000000000000..52ea74c251594a --- /dev/null +++ b/test/fixtures/test-runner/matching-patterns/typescript-test.mts @@ -0,0 +1,4 @@ +import { test } from 'node:test'; + +// 'as string' ensures that type stripping actually occurs +test('this should pass' as string); diff --git a/test/fixtures/test-runner/matching-patterns/typescript-test.ts b/test/fixtures/test-runner/matching-patterns/typescript-test.ts new file mode 100644 index 00000000000000..1ba6866ffcdb56 --- /dev/null +++ b/test/fixtures/test-runner/matching-patterns/typescript-test.ts @@ -0,0 +1,4 @@ +const test = require('node:test'); + +// 'as string' ensures that type stripping actually occurs +test('this should pass' as string); diff --git a/test/fixtures/test-runner/output/coverage-width-100-uncovered-lines.snapshot b/test/fixtures/test-runner/output/coverage-width-100-uncovered-lines.snapshot index 560b4e6d13e15b..c410c42fb7eeb9 100644 --- a/test/fixtures/test-runner/output/coverage-width-100-uncovered-lines.snapshot +++ b/test/fixtures/test-runner/output/coverage-width-100-uncovered-lines.snapshot @@ -15,13 +15,18 @@ ok 1 - Coverage Print Fixed Width 100 # duration_ms * # start of coverage report # -------------------------------------------------------------------------------------------------- -# file | line % | branch % | funcs % | uncovered lines +# file | line % | branch % | funcs % | uncovered lines # -------------------------------------------------------------------------------------------------- -# …ap/a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 -# …ap/b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 -# …ines.js | 50.99 | 42.86 | 1.92 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 55-57 59-6… -# …nes.mjs | 100.00 | 100.00 | 100.00 | +# test | | | | +# fixtures | | | | +# test-runner | | | | +# coverage-snap | | | | +# a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 … +# b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 +# many-uncovered-lines.js | 50.99 | 42.86 | 1.92 | 5-7 9-11 13-15 17-19 … +# output | | | | +# coverage-width-100-uncovered-lines.mjs | 100.00 | 100.00 | 100.00 | # -------------------------------------------------------------------------------------------------- -# all fil… | 52.80 | 60.00 | 1.61 | +# all files | 52.80 | 60.00 | 1.61 | # -------------------------------------------------------------------------------------------------- # end of coverage report diff --git a/test/fixtures/test-runner/output/coverage-width-100.snapshot b/test/fixtures/test-runner/output/coverage-width-100.snapshot index a452e0389967db..dfb48005c48eaf 100644 --- a/test/fixtures/test-runner/output/coverage-width-100.snapshot +++ b/test/fixtures/test-runner/output/coverage-width-100.snapshot @@ -15,12 +15,17 @@ ok 1 - Coverage Print Fixed Width 100 # duration_ms * # start of coverage report # -------------------------------------------------------------------------------------------------- -# file | line % | branch % | funcs % | uncovered lines +# file | line % | branch % | funcs % | uncovered lines # -------------------------------------------------------------------------------------------------- -# test/fixtures/test-runner/coverage-snap/a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 … -# test/fixtures/test-runner/coverage-snap/b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 -# …tures/test-runner/output/coverage-width-100.mjs | 100.00 | 100.00 | 100.00 | +# test | | | | +# fixtures | | | | +# test-runner | | | | +# coverage-snap | | | | +# a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 29-30 40-42 45-4… +# b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 +# output | | | | +# coverage-width-100.mjs | 100.00 | 100.00 | 100.00 | # -------------------------------------------------------------------------------------------------- -# all files | 60.81 | 100.00 | 0.00 | +# all files | 60.81 | 100.00 | 0.00 | # -------------------------------------------------------------------------------------------------- # end of coverage report diff --git a/test/fixtures/test-runner/output/coverage-width-150-uncovered-lines.snapshot b/test/fixtures/test-runner/output/coverage-width-150-uncovered-lines.snapshot index 01f94f2d40a9e6..423dac3291bf74 100644 --- a/test/fixtures/test-runner/output/coverage-width-150-uncovered-lines.snapshot +++ b/test/fixtures/test-runner/output/coverage-width-150-uncovered-lines.snapshot @@ -15,13 +15,18 @@ ok 1 - Coverage Print Fixed Width 150 # duration_ms * # start of coverage report # ---------------------------------------------------------------------------------------------------------------------------------------------------- -# file | line % | branch % | funcs % | uncovered lines +# file | line % | branch % | funcs % | uncovered lines # ---------------------------------------------------------------------------------------------------------------------------------------------------- -# …ap/a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 -# …ap/b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 -# …ines.js | 50.99 | 42.86 | 1.92 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 55-57 59-61 63-65 67-69 91-93 96-98 100-102 104-106 111-112 … -# …nes.mjs | 100.00 | 100.00 | 100.00 | +# test | | | | +# fixtures | | | | +# test-runner | | | | +# coverage-snap | | | | +# a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 +# b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 +# many-uncovered-lines.js | 50.99 | 42.86 | 1.92 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 55-57 59-61 63-65 67-69 91… +# output | | | | +# coverage-width-150-uncovered-lines.mjs | 100.00 | 100.00 | 100.00 | # ---------------------------------------------------------------------------------------------------------------------------------------------------- -# all fil… | 52.80 | 60.00 | 1.61 | +# all files | 52.80 | 60.00 | 1.61 | # ---------------------------------------------------------------------------------------------------------------------------------------------------- # end of coverage report diff --git a/test/fixtures/test-runner/output/coverage-width-150.snapshot b/test/fixtures/test-runner/output/coverage-width-150.snapshot index 37cc1e799fc550..c4aa8109955a12 100644 --- a/test/fixtures/test-runner/output/coverage-width-150.snapshot +++ b/test/fixtures/test-runner/output/coverage-width-150.snapshot @@ -14,13 +14,18 @@ ok 1 - Coverage Print Fixed Width 150 # todo 0 # duration_ms * # start of coverage report -# ------------------------------------------------------------------------------------------------------------------------------------- -# file | line % | branch % | funcs % | uncovered lines -# ------------------------------------------------------------------------------------------------------------------------------------- -# test/fixtures/test-runner/coverage-snap/a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 -# test/fixtures/test-runner/coverage-snap/b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 -# test/fixtures/test-runner/output/coverage-width-150.mjs | 100.00 | 100.00 | 100.00 | -# ------------------------------------------------------------------------------------------------------------------------------------- -# all files | 60.81 | 100.00 | 0.00 | -# ------------------------------------------------------------------------------------------------------------------------------------- +# -------------------------------------------------------------------------------------------------------- +# file | line % | branch % | funcs % | uncovered lines +# -------------------------------------------------------------------------------------------------------- +# test | | | | +# fixtures | | | | +# test-runner | | | | +# coverage-snap | | | | +# a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 +# b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 +# output | | | | +# coverage-width-150.mjs | 100.00 | 100.00 | 100.00 | +# -------------------------------------------------------------------------------------------------------- +# all files | 60.81 | 100.00 | 0.00 | +# -------------------------------------------------------------------------------------------------------- # end of coverage report diff --git a/test/fixtures/test-runner/output/coverage-width-40.mjs b/test/fixtures/test-runner/output/coverage-width-40.mjs new file mode 100644 index 00000000000000..2b5330b3b5fff1 --- /dev/null +++ b/test/fixtures/test-runner/output/coverage-width-40.mjs @@ -0,0 +1,12 @@ +// Flags: --experimental-test-coverage +// here we can't import common module as the coverage will be different based on the system +// Unused imports are here in order to populate the coverage report +import * as a from '../coverage-snap/b.js'; +import * as b from '../coverage-snap/a.js'; +import * as c from '../coverage-snap/a-very-long-long-long-sub-dir/c.js'; + +import { test } from 'node:test'; + +process.stdout.columns = 40; + +test(`Coverage Print Fixed Width ${process.stdout.columns}`); diff --git a/test/fixtures/test-runner/output/coverage-width-40.snapshot b/test/fixtures/test-runner/output/coverage-width-40.snapshot new file mode 100644 index 00000000000000..09d236b8bea413 --- /dev/null +++ b/test/fixtures/test-runner/output/coverage-width-40.snapshot @@ -0,0 +1,33 @@ +TAP version 13 +# Subtest: Coverage Print Fixed Width 40 +ok 1 - Coverage Print Fixed Width 40 + --- + duration_ms: * + ... +1..1 +# tests 1 +# suites 0 +# pass 1 +# fail 0 +# cancelled 0 +# skipped 0 +# todo 0 +# duration_ms * +# start of coverage report +# -------------------------------------- +# file | line % | branch % | funcs % | … +# -------------------------------------- +# test | | | | +# fixtures | | | | +# test-runner | | | | +# coverage-snap | | | | +# …g-long-sub-dir | | | | +# c.js | 55.77 | 100.00 | 0.00 | … +# a.js | 55.77 | 100.00 | 0.00 | … +# b.js | 45.45 | 100.00 | 0.00 | … +# output | | | | +# …e-width-40.mjs | 100.00 | 100.00 | 100.00 | +# -------------------------------------- +# all files | 59.06 | 100.00 | 0.00 | +# -------------------------------------- +# end of coverage report diff --git a/test/fixtures/test-runner/output/coverage-width-80-color.mjs b/test/fixtures/test-runner/output/coverage-width-80-color.mjs new file mode 100644 index 00000000000000..c4712659dedd41 --- /dev/null +++ b/test/fixtures/test-runner/output/coverage-width-80-color.mjs @@ -0,0 +1,12 @@ +// Flags: --experimental-test-coverage +// here we can't import common module as the coverage will be different based on the system +// Unused imports are here in order to populate the coverage report +import * as a from '../coverage-snap/b.js'; +import * as b from '../coverage-snap/a.js'; + +import { test } from 'node:test'; + +process.env.FORCE_COLOR = '3'; +process.stdout.columns = 80; + +test(`Coverage Print Fixed Width ${process.stdout.columns}`); diff --git a/test/fixtures/test-runner/output/coverage-width-80-color.snapshot b/test/fixtures/test-runner/output/coverage-width-80-color.snapshot new file mode 100644 index 00000000000000..eb94b331a18001 --- /dev/null +++ b/test/fixtures/test-runner/output/coverage-width-80-color.snapshot @@ -0,0 +1,26 @@ +[32m✔ Coverage Print Fixed Width 80 [90m(*ms)[39m[39m +[34mℹ tests 1[39m +[34mℹ suites 0[39m +[34mℹ pass 1[39m +[34mℹ fail 0[39m +[34mℹ cancelled 0[39m +[34mℹ skipped 0[39m +[34mℹ todo 0[39m +[34mℹ duration_ms *[39m +[34mℹ start of coverage report +ℹ ------------------------------------------------------------------------------ +ℹ file | [31mline %[34m | [31mbranch %[34m | [31mfuncs %[34m | uncovered … +ℹ ------------------------------------------------------------------------------ +ℹ test | [31m [34m | [31m [34m | [31m [34m | +ℹ fixtures | [31m [34m | [31m [34m | [31m [34m | +ℹ test-runner | [31m [34m | [31m [34m | [31m [34m | +ℹ coverage-snap | [31m [34m | [31m [34m | [31m [34m | +ℹ [33ma.js [34m | [33m 55.77[34m | [32m 100.00[34m | [31m 0.00[34m | 5-7 9-11 1… +ℹ [31mb.js [34m | [31m 45.45[34m | [32m 100.00[34m | [31m 0.00[34m | 5-7 9-11 +ℹ output | [31m [34m | [31m [34m | [31m [34m | +ℹ [32mcoverage-width-80-color.mjs [34m | [32m100.00[34m | [32m 100.00[34m | [32m 100.00[34m | +ℹ ------------------------------------------------------------------------------ +ℹ all files | [33m 61.33[34m | [32m 100.00[34m | [31m 0.00[34m | +ℹ ------------------------------------------------------------------------------ +ℹ end of coverage report +[39m \ No newline at end of file diff --git a/test/fixtures/test-runner/output/coverage-width-80-uncovered-lines-color.mjs b/test/fixtures/test-runner/output/coverage-width-80-uncovered-lines-color.mjs new file mode 100644 index 00000000000000..430338660ce8f1 --- /dev/null +++ b/test/fixtures/test-runner/output/coverage-width-80-uncovered-lines-color.mjs @@ -0,0 +1,13 @@ +// Flags: --experimental-test-coverage +// here we can't import common module as the coverage will be different based on the system +// Unused imports are here in order to populate the coverage report +import * as a from '../coverage-snap/b.js'; +import * as b from '../coverage-snap/a.js'; +import * as c from '../coverage-snap/many-uncovered-lines.js'; + +import { test } from 'node:test'; + +process.env.FORCE_COLOR = '3'; +process.stdout.columns = 100; + +test(`Coverage Print Fixed Width ${process.stdout.columns}`); diff --git a/test/fixtures/test-runner/output/coverage-width-80-uncovered-lines-color.snapshot b/test/fixtures/test-runner/output/coverage-width-80-uncovered-lines-color.snapshot new file mode 100644 index 00000000000000..b9e56fca6586ac --- /dev/null +++ b/test/fixtures/test-runner/output/coverage-width-80-uncovered-lines-color.snapshot @@ -0,0 +1,27 @@ +[32m✔ Coverage Print Fixed Width 100 [90m(*ms)[39m[39m +[34mℹ tests 1[39m +[34mℹ suites 0[39m +[34mℹ pass 1[39m +[34mℹ fail 0[39m +[34mℹ cancelled 0[39m +[34mℹ skipped 0[39m +[34mℹ todo 0[39m +[34mℹ duration_ms *[39m +[34mℹ start of coverage report +ℹ -------------------------------------------------------------------------------------------------- +ℹ file | [31mline %[34m | [31mbranch %[34m | [31mfuncs %[34m | uncovered lines +ℹ -------------------------------------------------------------------------------------------------- +ℹ test | [31m [34m | [31m [34m | [31m [34m | +ℹ fixtures | [31m [34m | [31m [34m | [31m [34m | +ℹ test-runner | [31m [34m | [31m [34m | [31m [34m | +ℹ coverage-snap | [31m [34m | [31m [34m | [31m [34m | +ℹ [33ma.js [34m | [33m 55.77[34m | [32m 100.00[34m | [31m 0.00[34m | 5-7 9-11 13-15… +ℹ [31mb.js [34m | [31m 45.45[34m | [32m 100.00[34m | [31m 0.00[34m | 5-7 9-11 +ℹ [31mmany-uncovered-lines.js [34m | [33m 50.99[34m | [31m 42.86[34m | [31m 1.92[34m | 5-7 9-11 13-15… +ℹ output | [31m [34m | [31m [34m | [31m [34m | +ℹ [32mcoverage-width-80-uncovered-lines-color.mjs [34m | [32m100.00[34m | [32m 100.00[34m | [32m 100.00[34m | +ℹ -------------------------------------------------------------------------------------------------- +ℹ all files | [33m 52.91[34m | [33m 60.00[34m | [31m 1.61[34m | +ℹ -------------------------------------------------------------------------------------------------- +ℹ end of coverage report +[39m \ No newline at end of file diff --git a/test/fixtures/test-runner/output/coverage-width-80-uncovered-lines.snapshot b/test/fixtures/test-runner/output/coverage-width-80-uncovered-lines.snapshot index 560b4e6d13e15b..6564d7942d8cd9 100644 --- a/test/fixtures/test-runner/output/coverage-width-80-uncovered-lines.snapshot +++ b/test/fixtures/test-runner/output/coverage-width-80-uncovered-lines.snapshot @@ -15,13 +15,18 @@ ok 1 - Coverage Print Fixed Width 100 # duration_ms * # start of coverage report # -------------------------------------------------------------------------------------------------- -# file | line % | branch % | funcs % | uncovered lines +# file | line % | branch % | funcs % | uncovered lines # -------------------------------------------------------------------------------------------------- -# …ap/a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 -# …ap/b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 -# …ines.js | 50.99 | 42.86 | 1.92 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 55-57 59-6… -# …nes.mjs | 100.00 | 100.00 | 100.00 | +# test | | | | +# fixtures | | | | +# test-runner | | | | +# coverage-snap | | | | +# a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 2… +# b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 +# many-uncovered-lines.js | 50.99 | 42.86 | 1.92 | 5-7 9-11 13-15 17-19 2… +# output | | | | +# coverage-width-80-uncovered-lines.mjs | 100.00 | 100.00 | 100.00 | # -------------------------------------------------------------------------------------------------- -# all fil… | 52.80 | 60.00 | 1.61 | +# all files | 52.80 | 60.00 | 1.61 | # -------------------------------------------------------------------------------------------------- # end of coverage report diff --git a/test/fixtures/test-runner/output/coverage-width-80.snapshot b/test/fixtures/test-runner/output/coverage-width-80.snapshot index d4a485f45944bf..de071277e1f98d 100644 --- a/test/fixtures/test-runner/output/coverage-width-80.snapshot +++ b/test/fixtures/test-runner/output/coverage-width-80.snapshot @@ -15,12 +15,17 @@ ok 1 - Coverage Print Fixed Width 80 # duration_ms * # start of coverage report # ------------------------------------------------------------------------------ -# file | line % | branch % | funcs % | … +# file | line % | branch % | funcs % | uncovered lines # ------------------------------------------------------------------------------ -# test/fixtures/test-runner/coverage-snap/a.js | 55.77 | 100.00 | 0.00 | … -# test/fixtures/test-runner/coverage-snap/b.js | 45.45 | 100.00 | 0.00 | … -# …es/test-runner/output/coverage-width-80.mjs | 100.00 | 100.00 | 100.00 | +# test | | | | +# fixtures | | | | +# test-runner | | | | +# coverage-snap | | | | +# a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-… +# b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 +# output | | | | +# coverage-width-80.mjs | 100.00 | 100.00 | 100.00 | # ------------------------------------------------------------------------------ -# all files | 60.81 | 100.00 | 0.00 | +# all files | 60.81 | 100.00 | 0.00 | # ------------------------------------------------------------------------------ # end of coverage report diff --git a/test/fixtures/test-runner/output/coverage-width-infinity-uncovered-lines.snapshot b/test/fixtures/test-runner/output/coverage-width-infinity-uncovered-lines.snapshot index c973983e9c2c19..7440b7772a1925 100644 --- a/test/fixtures/test-runner/output/coverage-width-infinity-uncovered-lines.snapshot +++ b/test/fixtures/test-runner/output/coverage-width-infinity-uncovered-lines.snapshot @@ -14,14 +14,19 @@ ok 1 - Coverage Print Fixed Width Infinity # todo 0 # duration_ms * # start of coverage report -# ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ -# file | line % | branch % | funcs % | uncovered lines -# ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ -# test/fixtures/test-runner/coverage-snap/a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 -# test/fixtures/test-runner/coverage-snap/b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 -# test/fixtures/test-runner/coverage-snap/many-uncovered-lines.js | 50.99 | 42.86 | 1.92 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 55-57 59-61 63-65 67-69 91-93 96-98 100-102 104-106 111-112 118-119 122-123 127-129 132-136 144-146 150 166-167 173 180 188-189 196-200 207-213 216-218 221-223 226-228 232 236-238 241-243 246-248 251-257 260-262 265-268 271-273 276-280 283-285 288-290 293-295 298-301 304-306 309-312 315-318 321-324 327-329 332-340 343-348 351-353 -# test/fixtures/test-runner/output/coverage-width-infinity-uncovered-lines.mjs | 100.00 | 100.00 | 100.00 | -# ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ -# all files | 52.80 | 60.00 | 1.61 | -# ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +# file | line % | branch % | funcs % | uncovered lines +# ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +# test | | | | +# fixtures | | | | +# test-runner | | | | +# coverage-snap | | | | +# a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 +# b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 +# many-uncovered-lines.js | 50.99 | 42.86 | 1.92 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 55-57 59-61 63-65 67-69 91-93 96-98 100-102 104-106 111-112 118-119 122-123 127-129 132-136 144-146 150 166-167 173 180 188-189 196-200 207-213 216-218 221-223 226-228 232 236-238 241-243 246-248 251-257 260-262 265-268 271-273 276-280 283-285 288-290 293-295 298-301 304-306 309-312 315-318 321-324 327-329 332-340 343-348 351-353 +# output | | | | +# coverage-width-infinity-uncovered-lines.mjs | 100.00 | 100.00 | 100.00 | +# ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- +# all files | 52.80 | 60.00 | 1.61 | +# ------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------- # end of coverage report diff --git a/test/fixtures/test-runner/output/coverage-width-infinity.snapshot b/test/fixtures/test-runner/output/coverage-width-infinity.snapshot index e2d0370cfe41fe..2b9916a5b08217 100644 --- a/test/fixtures/test-runner/output/coverage-width-infinity.snapshot +++ b/test/fixtures/test-runner/output/coverage-width-infinity.snapshot @@ -14,13 +14,18 @@ ok 1 - Coverage Print Fixed Width Infinity # todo 0 # duration_ms * # start of coverage report -# ------------------------------------------------------------------------------------------------------------------------------------------ -# file | line % | branch % | funcs % | uncovered lines -# ------------------------------------------------------------------------------------------------------------------------------------------ -# test/fixtures/test-runner/coverage-snap/a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 -# test/fixtures/test-runner/coverage-snap/b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 -# test/fixtures/test-runner/output/coverage-width-infinity.mjs | 100.00 | 100.00 | 100.00 | -# ------------------------------------------------------------------------------------------------------------------------------------------ -# all files | 60.81 | 100.00 | 0.00 | -# ------------------------------------------------------------------------------------------------------------------------------------------ +# ------------------------------------------------------------------------------------------------------------- +# file | line % | branch % | funcs % | uncovered lines +# ------------------------------------------------------------------------------------------------------------- +# test | | | | +# fixtures | | | | +# test-runner | | | | +# coverage-snap | | | | +# a.js | 55.77 | 100.00 | 0.00 | 5-7 9-11 13-15 17-19 29-30 40-42 45-47 50-52 +# b.js | 45.45 | 100.00 | 0.00 | 5-7 9-11 +# output | | | | +# coverage-width-infinity.mjs | 100.00 | 100.00 | 100.00 | +# ------------------------------------------------------------------------------------------------------------- +# all files | 60.81 | 100.00 | 0.00 | +# ------------------------------------------------------------------------------------------------------------- # end of coverage report diff --git a/test/fixtures/test-runner/output/default_output.snapshot b/test/fixtures/test-runner/output/default_output.snapshot index 96450b05bc7ec8..73bfc1da5e92e9 100644 --- a/test/fixtures/test-runner/output/default_output.snapshot +++ b/test/fixtures/test-runner/output/default_output.snapshot @@ -27,7 +27,7 @@ [31m✖ should pass but parent fail [90m(*ms)[39m[39m [32m'test did not finish before its parent and was cancelled'[39m -[31m▶ [39mparent [90m(*ms)[39m +[31m✖ parent [90m(*ms)[39m[39m [34mℹ tests 6[39m [34mℹ suites 0[39m [34mℹ pass 1[39m diff --git a/test/fixtures/test-runner/output/dot_output_custom_columns.snapshot b/test/fixtures/test-runner/output/dot_output_custom_columns.snapshot index b315dd77465a37..2c9c5aecda20fe 100644 --- a/test/fixtures/test-runner/output/dot_output_custom_columns.snapshot +++ b/test/fixtures/test-runner/output/dot_output_custom_columns.snapshot @@ -1,3 +1,3 @@ -[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c -[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c -[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c[32m.c +[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m +[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m +[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m[32m.[0m diff --git a/test/fixtures/test-runner/output/suite-skip-hooks.snapshot b/test/fixtures/test-runner/output/suite-skip-hooks.snapshot index b7fc826426eaf9..91949ea6217b66 100644 --- a/test/fixtures/test-runner/output/suite-skip-hooks.snapshot +++ b/test/fixtures/test-runner/output/suite-skip-hooks.snapshot @@ -12,7 +12,7 @@ AFTER 2 ✔ should run 1 (*ms) ﹣ should not run (*ms) # SKIP ✔ should run 2 (*ms) -▶ suite runs with mixture of skipped tests (*ms) +✔ suite runs with mixture of skipped tests (*ms) ℹ tests 4 ℹ suites 2 ℹ pass 2 diff --git a/test/fixtures/test-runner/print-arguments.js b/test/fixtures/test-runner/print-arguments.js new file mode 100644 index 00000000000000..6140b3b8d26060 --- /dev/null +++ b/test/fixtures/test-runner/print-arguments.js @@ -0,0 +1,5 @@ +const { test } = require('node:test'); + +test('process.argv is setup', (t) => { + t.assert.deepStrictEqual(process.argv.slice(2), ['--a-custom-argument']); +}); diff --git a/test/fixtures/test-runner/source-maps/invalid-json/index.js b/test/fixtures/test-runner/source-maps/invalid-json/index.js new file mode 100644 index 00000000000000..f8a711af8c0098 --- /dev/null +++ b/test/fixtures/test-runner/source-maps/invalid-json/index.js @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/test/fixtures/test-runner/source-maps/invalid-json/index.js.map b/test/fixtures/test-runner/source-maps/invalid-json/index.js.map new file mode 100644 index 00000000000000..e466dcbd8e8f2b --- /dev/null +++ b/test/fixtures/test-runner/source-maps/invalid-json/index.js.map @@ -0,0 +1 @@ +invalid \ No newline at end of file diff --git a/test/fixtures/test-runner/source-map-line-lengths/index.js b/test/fixtures/test-runner/source-maps/line-lengths/index.js similarity index 100% rename from test/fixtures/test-runner/source-map-line-lengths/index.js rename to test/fixtures/test-runner/source-maps/line-lengths/index.js diff --git a/test/fixtures/test-runner/source-map-line-lengths/index.js.map b/test/fixtures/test-runner/source-maps/line-lengths/index.js.map similarity index 100% rename from test/fixtures/test-runner/source-map-line-lengths/index.js.map rename to test/fixtures/test-runner/source-maps/line-lengths/index.js.map diff --git a/test/fixtures/test-runner/source-map-line-lengths/index.ts b/test/fixtures/test-runner/source-maps/line-lengths/index.ts similarity index 100% rename from test/fixtures/test-runner/source-map-line-lengths/index.ts rename to test/fixtures/test-runner/source-maps/line-lengths/index.ts diff --git a/test/fixtures/test-runner/source-maps/missing-map.js b/test/fixtures/test-runner/source-maps/missing-map.js new file mode 100644 index 00000000000000..af68b844e0af61 --- /dev/null +++ b/test/fixtures/test-runner/source-maps/missing-map.js @@ -0,0 +1 @@ +//# sourceMappingURL=missing.js.map \ No newline at end of file diff --git a/test/fixtures/test-runner/source-maps/missing-sources/index.js b/test/fixtures/test-runner/source-maps/missing-sources/index.js new file mode 100644 index 00000000000000..f8a711af8c0098 --- /dev/null +++ b/test/fixtures/test-runner/source-maps/missing-sources/index.js @@ -0,0 +1,2 @@ +export {}; +//# sourceMappingURL=index.js.map \ No newline at end of file diff --git a/test/fixtures/test-runner/source-maps/missing-sources/index.js.map b/test/fixtures/test-runner/source-maps/missing-sources/index.js.map new file mode 100644 index 00000000000000..12cacc226ac5c1 --- /dev/null +++ b/test/fixtures/test-runner/source-maps/missing-sources/index.js.map @@ -0,0 +1 @@ +{"version":3,"file":"index.js.map","sourceRoot":"","sources":["nonexistent.js"],"names":[],"mappings":"AAAA,OAAO,CAAC,GAAG,CAAC,IAAI,CAAC,CAAC"} \ No newline at end of file diff --git a/test/fixtures/typescript/echo-process-features-typescript.cjs b/test/fixtures/typescript/echo-process-features-typescript.cjs new file mode 100644 index 00000000000000..7f8b14e9e0440f --- /dev/null +++ b/test/fixtures/typescript/echo-process-features-typescript.cjs @@ -0,0 +1,2 @@ +'use strict'; +console.log(process.features.typescript); diff --git a/test/fixtures/typescript/ts/commonjs-logger.ts b/test/fixtures/typescript/ts/commonjs-logger.ts new file mode 100644 index 00000000000000..91b171231e5522 --- /dev/null +++ b/test/fixtures/typescript/ts/commonjs-logger.ts @@ -0,0 +1,5 @@ +const logger = (): void => { }; + +module.exports = { + logger +} diff --git a/test/fixtures/typescript/ts/issue-54457.mjs b/test/fixtures/typescript/ts/issue-54457.mjs new file mode 100644 index 00000000000000..7394101b61ce2b --- /dev/null +++ b/test/fixtures/typescript/ts/issue-54457.mjs @@ -0,0 +1,4 @@ +// https://github.com/nodejs/node/issues/54457 +const { text } = await import('./test-require.ts'); + +console.log(text); diff --git a/test/fixtures/typescript/ts/module-logger.ts b/test/fixtures/typescript/ts/module-logger.ts new file mode 100644 index 00000000000000..50aecfdf350f33 --- /dev/null +++ b/test/fixtures/typescript/ts/module-logger.ts @@ -0,0 +1 @@ +export const logger = (): void => { }; diff --git a/test/fixtures/typescript/ts/test-mock-module.ts b/test/fixtures/typescript/ts/test-mock-module.ts new file mode 100644 index 00000000000000..adc5f5ce57e215 --- /dev/null +++ b/test/fixtures/typescript/ts/test-mock-module.ts @@ -0,0 +1,23 @@ +import { before, mock, test } from 'node:test'; +import { strictEqual } from 'node:assert'; + +const logger = mock.fn((s) => console.log(`Hello, ${s}!`)); + +before(async () => { + mock.module('./module-logger.ts', { + namedExports: { logger } + }); + mock.module('./commonjs-logger.ts', { + namedExports: { logger } + }); +}); + +test('logger', async () => { + const { logger: mockedModule } = await import('./module-logger.ts'); + mockedModule('TypeScript-Module'); + strictEqual(logger.mock.callCount(), 1); + + const { logger: mockedCommonjs } = await import('./commonjs-logger.ts'); + mockedCommonjs('TypeScript-CommonJS'); + strictEqual(logger.mock.callCount(), 2); +}); diff --git a/test/fixtures/typescript/ts/test-parameter-properties.ts b/test/fixtures/typescript/ts/test-parameter-properties.ts new file mode 100644 index 00000000000000..5cf79f6f113dd1 --- /dev/null +++ b/test/fixtures/typescript/ts/test-parameter-properties.ts @@ -0,0 +1,15 @@ +interface Bar { + name: string; + age: number; +} + +class Test { + constructor(private value: T) {} + + public getValue(): T { + return this.value; + } +} + +const foo = new Test({ age: 42, name: 'John Doe' }); +console.log(foo.getValue()); diff --git a/test/fixtures/typescript/ts/test-require.ts b/test/fixtures/typescript/ts/test-require.ts new file mode 100644 index 00000000000000..a96db1a6542afb --- /dev/null +++ b/test/fixtures/typescript/ts/test-require.ts @@ -0,0 +1,7 @@ +const util = require('node:util'); + +const text = 'Hello, TypeScript!'; + +module.exports = { + text +}; diff --git a/test/fixtures/typescript/ts/test-union-types.ts b/test/fixtures/typescript/ts/test-union-types.ts new file mode 100644 index 00000000000000..baa8332d76c2b5 --- /dev/null +++ b/test/fixtures/typescript/ts/test-union-types.ts @@ -0,0 +1,53 @@ +// Use Some Union Types Together +const getTypescript = async () => { + return { + name: 'Hello, TypeScript!', + }; +}; + +type MyNameResult = Awaited>; +const myNameResult: MyNameResult = { + name: 'Hello, TypeScript!', +}; + +console.log(myNameResult); + +type RoleAttributes = + | { + role: 'admin'; + permission: 'all'; + } + | { + role: 'user'; + } + | { + role: 'manager'; + }; + +// Union Type: Extract +type AdminRole = Extract; +const adminRole: AdminRole = { + role: 'admin', + permission: 'all', +}; + +console.log(adminRole); + +type MyType = { + foo: string; + bar: number; + zoo: boolean; + metadata?: unknown; +}; + +// Union Type: Partial +type PartialType = Partial; + +const PartialTypeWithValues: PartialType = { + foo: 'Testing Partial Type', + bar: 42, + zoo: true, + metadata: undefined, +}; + +console.log(PartialTypeWithValues); diff --git a/test/fixtures/tz-version.txt b/test/fixtures/tz-version.txt index 04fe6744432fa4..699e50d4d38edf 100644 --- a/test/fixtures/tz-version.txt +++ b/test/fixtures/tz-version.txt @@ -1 +1 @@ -2024a +2024b diff --git a/test/fixtures/wpt/FileAPI/support/send-file-form-helper.js b/test/fixtures/wpt/FileAPI/support/send-file-form-helper.js index d6adf21ec33795..39c73c41b42207 100644 --- a/test/fixtures/wpt/FileAPI/support/send-file-form-helper.js +++ b/test/fixtures/wpt/FileAPI/support/send-file-form-helper.js @@ -180,7 +180,7 @@ const formPostFileUploadTest = ({ // Used to verify that the browser agrees with the test about // field value replacement and encoding independently of file system - // idiosyncracies. + // idiosyncrasies. form.append(Object.assign(document.createElement('input'), { type: 'hidden', name: 'filename', diff --git a/test/fixtures/wpt/FileAPI/support/send-file-formdata-helper.js b/test/fixtures/wpt/FileAPI/support/send-file-formdata-helper.js index 53c8cca7e09b8e..dd62a0e98e92c8 100644 --- a/test/fixtures/wpt/FileAPI/support/send-file-formdata-helper.js +++ b/test/fixtures/wpt/FileAPI/support/send-file-formdata-helper.js @@ -34,7 +34,7 @@ const formDataPostFileUploadTest = ({ // Used to verify that the browser agrees with the test about // field value replacement and encoding independently of file system - // idiosyncracies. + // idiosyncrasies. formData.append("filename", fileBaseName); // Same, but with name and value reversed to ensure field names diff --git a/test/fixtures/wpt/README.md b/test/fixtures/wpt/README.md index 836e238bc4bcef..2c8a8b9b03afa9 100644 --- a/test/fixtures/wpt/README.md +++ b/test/fixtures/wpt/README.md @@ -13,9 +13,9 @@ Last update: - common: https://github.com/web-platform-tests/wpt/tree/dbd648158d/common - compression: https://github.com/web-platform-tests/wpt/tree/5aa50dd415/compression - console: https://github.com/web-platform-tests/wpt/tree/767ae35464/console -- dom/abort: https://github.com/web-platform-tests/wpt/tree/d1f1ecbd52/dom/abort -- dom/events: https://github.com/web-platform-tests/wpt/tree/ab8999891c/dom/events -- encoding: https://github.com/web-platform-tests/wpt/tree/5aa50dd415/encoding +- dom/abort: https://github.com/web-platform-tests/wpt/tree/0143fe244b/dom/abort +- dom/events: https://github.com/web-platform-tests/wpt/tree/0a811c5161/dom/events +- encoding: https://github.com/web-platform-tests/wpt/tree/1ac8deee08/encoding - fetch/data-urls/resources: https://github.com/web-platform-tests/wpt/tree/7c79d998ff/fetch/data-urls/resources - FileAPI: https://github.com/web-platform-tests/wpt/tree/cceaf3628d/FileAPI - hr-time: https://github.com/web-platform-tests/wpt/tree/34cafd797e/hr-time @@ -32,7 +32,7 @@ Last update: - user-timing: https://github.com/web-platform-tests/wpt/tree/5ae85bf826/user-timing - wasm/jsapi: https://github.com/web-platform-tests/wpt/tree/cde25e7e3c/wasm/jsapi - wasm/webapi: https://github.com/web-platform-tests/wpt/tree/fd1b23eeaa/wasm/webapi -- WebCryptoAPI: https://github.com/web-platform-tests/wpt/tree/272064ebf9/WebCryptoAPI +- WebCryptoAPI: https://github.com/web-platform-tests/wpt/tree/203d2ac459/WebCryptoAPI - webidl/ecmascript-binding/es-exceptions: https://github.com/web-platform-tests/wpt/tree/a370aad338/webidl/ecmascript-binding/es-exceptions - webmessaging/broadcastchannel: https://github.com/web-platform-tests/wpt/tree/e97fac4791/webmessaging/broadcastchannel - webstorage: https://github.com/web-platform-tests/wpt/tree/9dafa89214/webstorage diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits.js index da809278a87aa5..8ab9db7bf71318 100644 --- a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits.js +++ b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits.js @@ -1,12 +1,19 @@ +function define_tests_25519() { + return define_tests("X25519"); +} + +function define_tests_448() { + return define_tests("X448"); +} -function define_tests() { +function define_tests(algorithmName) { // May want to test prefixed implementations. var subtle = self.crypto.subtle; // Verify the derive functions perform checks against the all-zero value results, // ensuring small-order points are rejected. // https://www.rfc-editor.org/rfc/rfc7748#section-6.1 - Object.keys(kSmallOrderPoint).forEach(function(algorithmName) { + { kSmallOrderPoint[algorithmName].forEach(function(test) { promise_test(async() => { let derived; @@ -28,15 +35,16 @@ function define_tests() { assert_equals(derived, undefined, "Operation succeeded, but should not have."); }, algorithmName + " key derivation checks for all-zero value result with a key of order " + test.order); }); - }); + } return importKeys(pkcs8, spki, sizes) .then(function(results) { publicKeys = results.publicKeys; privateKeys = results.privateKeys; noDeriveBitsKeys = results.noDeriveBitsKeys; + ecdhKeys = results.ecdhKeys; - Object.keys(sizes).forEach(function(algorithmName) { + { // Basic success case promise_test(function(test) { return subtle.deriveBits({name: algorithmName, public: publicKeys[algorithmName]}, privateKeys[algorithmName], 8 * sizes[algorithmName]) @@ -101,11 +109,7 @@ function define_tests() { // - wrong algorithm promise_test(function(test) { - publicKey = publicKeys["X25519"]; - if (algorithmName === "X25519") { - publicKey = publicKeys["X448"]; - } - return subtle.deriveBits({name: algorithmName, public: publicKey}, privateKeys[algorithmName], 8 * sizes[algorithmName]) + return subtle.deriveBits({name: algorithmName, public: ecdhKeys[algorithmName]}, privateKeys[algorithmName], 8 * sizes[algorithmName]) .then(function(derivation) { assert_unreached("deriveBits succeeded but should have failed with InvalidAccessError"); }, function(err) { @@ -165,16 +169,17 @@ function define_tests() { assert_equals(err.name, "OperationError", "Should throw correct error, not " + err.name + ": " + err.message); }); }, algorithmName + " asking for too many bits"); - }); + } }); function importKeys(pkcs8, spki, sizes) { var privateKeys = {}; var publicKeys = {}; var noDeriveBitsKeys = {}; + var ecdhPublicKeys = {}; var promises = []; - Object.keys(pkcs8).forEach(function(algorithmName) { + { var operation = subtle.importKey("pkcs8", pkcs8[algorithmName], {name: algorithmName}, false, ["deriveBits", "deriveKey"]) @@ -184,8 +189,8 @@ function define_tests() { privateKeys[algorithmName] = null; }); promises.push(operation); - }); - Object.keys(pkcs8).forEach(function(algorithmName) { + } + { var operation = subtle.importKey("pkcs8", pkcs8[algorithmName], {name: algorithmName}, false, ["deriveKey"]) @@ -195,8 +200,8 @@ function define_tests() { noDeriveBitsKeys[algorithmName] = null; }); promises.push(operation); - }); - Object.keys(spki).forEach(function(algorithmName) { + } + { var operation = subtle.importKey("spki", spki[algorithmName], {name: algorithmName}, false, []) @@ -206,10 +211,17 @@ function define_tests() { publicKeys[algorithmName] = null; }); promises.push(operation); - }); - + } + { + var operation = subtle.importKey("spki", ecSPKI, + {name: "ECDH", namedCurve: "P-256"}, + false, []) + .then(function(key) { + ecdhPublicKeys[algorithmName] = key; + }); + } return Promise.all(promises) - .then(function(results) {return {privateKeys: privateKeys, publicKeys: publicKeys, noDeriveBitsKeys: noDeriveBitsKeys}}); + .then(function(results) {return {privateKeys: privateKeys, publicKeys: publicKeys, noDeriveBitsKeys: noDeriveBitsKeys, ecdhKeys: ecdhPublicKeys}}); } // Compares two ArrayBuffer or ArrayBufferView objects. If bitCount is diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_curve25519.https.any.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_curve25519.https.any.js new file mode 100644 index 00000000000000..866192e0193bc1 --- /dev/null +++ b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_curve25519.https.any.js @@ -0,0 +1,10 @@ +// META: title=WebCryptoAPI: deriveKey() Using ECDH with CFRG Elliptic Curves +// META: script=cfrg_curves_bits_fixtures.js +// META: script=cfrg_curves_bits.js + +// Define subtests from a `promise_test` to ensure the harness does not +// complete before the subtests are available. `explicit_done` cannot be used +// for this purpose because the global `done` function is automatically invoked +// by the WPT infrastructure in dedicated worker tests defined using the +// "multi-global" pattern. +promise_test(define_tests_25519, 'setup - define tests'); diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits.https.any.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_curve448.https.any.js similarity index 75% rename from test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_curve448.https.any.js index c1837591ee85d4..32485c68107e5c 100644 --- a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits.https.any.js +++ b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_curve448.https.any.js @@ -1,4 +1,4 @@ -// META: title=WebCryptoAPI: deriveBits() Using ECDH with CFRG Elliptic Curves +// META: title=WebCryptoAPI: deriveKey() Using ECDH with CFRG Elliptic Curves // META: script=cfrg_curves_bits_fixtures.js // META: script=cfrg_curves_bits.js @@ -7,4 +7,4 @@ // for this purpose because the global `done` function is automatically invoked // by the WPT infrastructure in dedicated worker tests defined using the // "multi-global" pattern. -promise_test(define_tests, 'setup - define tests'); +promise_test(define_tests_448, 'setup - define tests'); diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_fixtures.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_fixtures.js index ffdeb51eab9700..c376c75bfe6cf8 100644 --- a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_fixtures.js +++ b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_bits_fixtures.js @@ -35,3 +35,6 @@ var kSmallOrderPoint = { { order: "p+1 (=1, order 1)", vector : new Uint8Array([48, 66, 48, 5, 6, 3, 43, 101, 111, 3, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255, 255]) }, ] }; + +// "P-256": +var ecSPKI = new Uint8Array([48, 89, 48, 19, 6, 7, 42, 134, 72, 206, 61, 2, 1, 6, 8, 42, 134, 72, 206, 61, 3, 1, 7, 3, 66, 0, 4, 154, 116, 32, 120, 126, 95, 77, 105, 211, 232, 34, 114, 115, 1, 109, 56, 224, 71, 129, 133, 223, 127, 238, 156, 142, 103, 60, 202, 211, 79, 126, 128, 254, 49, 141, 182, 221, 107, 119, 218, 99, 32, 165, 246, 151, 89, 9, 68, 23, 177, 52, 239, 138, 139, 116, 193, 101, 4, 57, 198, 115, 0, 90, 61]); diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys.js index 81244ba05a8766..62f9e00aa33846 100644 --- a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys.js +++ b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys.js @@ -1,5 +1,12 @@ +function define_tests_25519() { + return define_tests("X25519"); +} + +function define_tests_448() { + return define_tests("X448"); +} -function define_tests() { +function define_tests(algorithmName) { // May want to test prefixed implementations. var subtle = self.crypto.subtle; @@ -8,7 +15,7 @@ function define_tests() { // https://www.rfc-editor.org/rfc/rfc7748#section-6.1 // TODO: The spec states that the check must be done on use, but there is discussion about doing it on import. // https://github.com/WICG/webcrypto-secure-curves/pull/13 - Object.keys(kSmallOrderPoint).forEach(function(algorithmName) { + { kSmallOrderPoint[algorithmName].forEach(function(test) { promise_test(async() => { let derived; @@ -32,10 +39,10 @@ function define_tests() { assert_equals(derived, undefined, "Operation succeeded, but should not have."); }, algorithmName + " deriveBits checks for all-zero value result with a key of order " + test.order); }); - }); + } // Ensure the keys generated by each algorithm are valid for key derivation. - Object.keys(sizes).forEach(function(algorithmName) { + { promise_test(async() => { let derived; try { @@ -46,15 +53,16 @@ function define_tests() { } assert_false (derived === undefined, "Key derivation failed."); }, "Key derivation using a " + algorithmName + " generated keys."); - }); + } return importKeys(pkcs8, spki, sizes) .then(function(results) { publicKeys = results.publicKeys; privateKeys = results.privateKeys; noDeriveKeyKeys = results.noDeriveKeyKeys; + ecdhKeys = results.ecdhKeys; - Object.keys(sizes).forEach(function(algorithmName) { + { // Basic success case promise_test(function(test) { return subtle.deriveKey({name: algorithmName, public: publicKeys[algorithmName]}, privateKeys[algorithmName], {name: "HMAC", hash: "SHA-256", length: 256}, true, ["sign", "verify"]) @@ -102,11 +110,7 @@ function define_tests() { // - wrong algorithm promise_test(function(test) { - publicKey = publicKeys["X25519"]; - if (algorithmName === "X25519") { - publicKey = publicKeys["X448"]; - } - return subtle.deriveKey({name: algorithmName, public: publicKey}, privateKeys[algorithmName], {name: "HMAC", hash: "SHA-256", length: 256}, true, ["sign", "verify"]) + return subtle.deriveKey({name: algorithmName, public: ecdhKeys[algorithmName]}, privateKeys[algorithmName], {name: "HMAC", hash: "SHA-256", length: 256}, true, ["sign", "verify"]) .then(function(key) {return crypto.subtle.exportKey("raw", key);}) .then(function(exportedKey) { assert_unreached("deriveKey succeeded but should have failed with InvalidAccessError"); @@ -161,16 +165,17 @@ function define_tests() { }); }); }, algorithmName + " public property value is a secret key"); - }); + } }); function importKeys(pkcs8, spki, sizes) { var privateKeys = {}; var publicKeys = {}; var noDeriveKeyKeys = {}; + var ecdhPublicKeys = {}; var promises = []; - Object.keys(pkcs8).forEach(function(algorithmName) { + { var operation = subtle.importKey("pkcs8", pkcs8[algorithmName], {name: algorithmName}, false, ["deriveBits", "deriveKey"]) @@ -180,8 +185,8 @@ function define_tests() { privateKeys[algorithmName] = null; }); promises.push(operation); - }); - Object.keys(pkcs8).forEach(function(algorithmName) { + } + { var operation = subtle.importKey("pkcs8", pkcs8[algorithmName], {name: algorithmName}, false, ["deriveBits"]) @@ -191,8 +196,8 @@ function define_tests() { noDeriveKeyKeys[algorithmName] = null; }); promises.push(operation); - }); - Object.keys(spki).forEach(function(algorithmName) { + } + { var operation = subtle.importKey("spki", spki[algorithmName], {name: algorithmName}, false, []) @@ -202,10 +207,18 @@ function define_tests() { publicKeys[algorithmName] = null; }); promises.push(operation); - }); + } + { + var operation = subtle.importKey("spki", ecSPKI, + {name: "ECDH", namedCurve: "P-256"}, + false, []) + .then(function(key) { + ecdhPublicKeys[algorithmName] = key; + }); + } return Promise.all(promises) - .then(function(results) {return {privateKeys: privateKeys, publicKeys: publicKeys, noDeriveKeyKeys: noDeriveKeyKeys}}); + .then(function(results) {return {privateKeys: privateKeys, publicKeys: publicKeys, noDeriveKeyKeys: noDeriveKeyKeys, ecdhKeys: ecdhPublicKeys}}); } // Compares two ArrayBuffer or ArrayBufferView objects. If bitCount is diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys_curve25519.https.any.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys_curve25519.https.any.js new file mode 100644 index 00000000000000..91390ba5c2a17a --- /dev/null +++ b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys_curve25519.https.any.js @@ -0,0 +1,10 @@ +// META: title=WebCryptoAPI: deriveKey() Using ECDH with CFRG Elliptic Curves +// META: script=cfrg_curves_bits_fixtures.js +// META: script=cfrg_curves_keys.js + +// Define subtests from a `promise_test` to ensure the harness does not +// complete before the subtests are available. `explicit_done` cannot be used +// for this purpose because the global `done` function is automatically invoked +// by the WPT infrastructure in dedicated worker tests defined using the +// "multi-global" pattern. +promise_test(define_tests_25519, 'setup - define tests'); diff --git a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys.https.any.js b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys_curve448.https.any.js similarity index 89% rename from test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys.https.any.js rename to test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys_curve448.https.any.js index 96658a56e81da9..b34e366376a70f 100644 --- a/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys.https.any.js +++ b/test/fixtures/wpt/WebCryptoAPI/derive_bits_keys/cfrg_curves_keys_curve448.https.any.js @@ -7,4 +7,4 @@ // for this purpose because the global `done` function is automatically invoked // by the WPT infrastructure in dedicated worker tests defined using the // "multi-global" pattern. -promise_test(define_tests, 'setup - define tests'); +promise_test(define_tests_448, 'setup - define tests'); diff --git a/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey.https.any.js b/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey.https.any.js deleted file mode 100644 index a56bd31cbe14b1..00000000000000 --- a/test/fixtures/wpt/WebCryptoAPI/import_export/okp_importKey.https.any.js +++ /dev/null @@ -1,280 +0,0 @@ -// META: title=WebCryptoAPI: importKey() for OKP keys -// META: timeout=long -// META: script=../util/helpers.js - -// Test importKey and exportKey for OKP algorithms. Only "happy paths" are -// currently tested - those where the operation should succeed. - - var subtle = crypto.subtle; - - var keyData = { - "Ed25519": { - spki: new Uint8Array([48, 42, 48, 5, 6, 3, 43, 101, 112, 3, 33, 0, 216, 225, 137, 99, 216, 9, 212, 135, 217, 84, 154, 204, 174, 198, 116, 46, 126, 235, 162, 77, 138, 13, 59, 20, 183, 227, 202, 234, 6, 137, 61, 204]), - raw: new Uint8Array([216, 225, 137, 99, 216, 9, 212, 135, 217, 84, 154, 204, 174, 198, 116, 46, 126, 235, 162, 77, 138, 13, 59, 20, 183, 227, 202, 234, 6, 137, 61, 204]), - pkcs8: new Uint8Array([48, 46, 2, 1, 0, 48, 5, 6, 3, 43, 101, 112, 4, 34, 4, 32, 243, 200, 244, 196, 141, 248, 120, 20, 110, 140, 211, 191, 109, 244, 229, 14, 56, 155, 167, 7, 78, 21, 194, 53, 45, 205, 93, 48, 141, 76, 168, 31]), - jwk: { - crv: "Ed25519", - d: "88j0xI34eBRujNO_bfTlDjibpwdOFcI1Lc1dMI1MqB8", - x: "2OGJY9gJ1IfZVJrMrsZ0Ln7rok2KDTsUt-PK6gaJPcw", - kty: "OKP" - } - }, - - "Ed448": { - spki: new Uint8Array([48, 67, 48, 5, 6, 3, 43, 101, 113, 3, 58, 0, 171, 75, 184, 133, 253, 125, 44, 90, 242, 78, 131, 113, 12, 255, 160, 199, 74, 87, 226, 116, 128, 29, 178, 5, 123, 11, 220, 94, 160, 50, 182, 254, 107, 199, 139, 128, 69, 54, 90, 235, 38, 232, 110, 31, 20, 253, 52, 157, 7, 196, 132, 149, 245, 164, 106, 90, 128]), - raw: new Uint8Array([171, 75, 184, 133, 253, 125, 44, 90, 242, 78, 131, 113, 12, 255, 160, 199, 74, 87, 226, 116, 128, 29, 178, 5, 123, 11, 220, 94, 160, 50, 182, 254, 107, 199, 139, 128, 69, 54, 90, 235, 38, 232, 110, 31, 20, 253, 52, 157, 7, 196, 132, 149, 245, 164, 106, 90, 128]), - pkcs8: new Uint8Array([48, 71, 2, 1, 0, 48, 5, 6, 3, 43, 101, 113, 4, 59, 4, 57, 14, 255, 3, 69, 140, 40, 224, 23, 156, 82, 29, 227, 18, 201, 105, 183, 131, 67, 72, 236, 171, 153, 26, 96, 227, 178, 233, 167, 158, 76, 217, 228, 128, 239, 41, 23, 18, 210, 200, 61, 4, 114, 114, 213, 201, 244, 40, 102, 79, 105, 109, 38, 112, 69, 143, 29, 46]), - jwk: { - crv: "Ed448", - d: "Dv8DRYwo4BecUh3jEslpt4NDSOyrmRpg47Lpp55M2eSA7ykXEtLIPQRyctXJ9ChmT2ltJnBFjx0u", - x: "q0u4hf19LFryToNxDP-gx0pX4nSAHbIFewvcXqAytv5rx4uARTZa6ybobh8U_TSdB8SElfWkalqA", - kty: "OKP" - } - }, - - "X25519": { - spki: new Uint8Array([48, 42, 48, 5, 6, 3, 43, 101, 110, 3, 33, 0, 28, 242, 177, 230, 2, 46, 197, 55, 55, 30, 215, 245, 62, 84, 250, 17, 84, 216, 62, 152, 235, 100, 234, 81, 250, 229, 179, 48, 124, 254, 151, 6]), - raw: new Uint8Array([28, 242, 177, 230, 2, 46, 197, 55, 55, 30, 215, 245, 62, 84, 250, 17, 84, 216, 62, 152, 235, 100, 234, 81, 250, 229, 179, 48, 124, 254, 151, 6]), - pkcs8: new Uint8Array([48, 46, 2, 1, 0, 48, 5, 6, 3, 43, 101, 110, 4, 34, 4, 32, 200, 131, 142, 118, 208, 87, 223, 183, 216, 201, 90, 105, 225, 56, 22, 10, 221, 99, 115, 253, 113, 164, 210, 118, 187, 86, 227, 168, 27, 100, 255, 97]), - jwk: { - crv: "X25519", - d: "yIOOdtBX37fYyVpp4TgWCt1jc_1xpNJ2u1bjqBtk_2E", - x: "HPKx5gIuxTc3Htf1PlT6EVTYPpjrZOpR-uWzMHz-lwY", - kty: "OKP" - } - }, - - "X448": { - spki: new Uint8Array([48, 66, 48, 5, 6, 3, 43, 101, 111, 3, 57, 0, 182, 4, 161, 209, 165, 205, 29, 148, 38, 213, 97, 239, 99, 10, 158, 177, 108, 190, 105, 213, 185, 202, 97, 94, 220, 83, 99, 62, 251, 82, 234, 49, 230, 230, 160, 161, 219, 172, 198, 231, 108, 188, 230, 72, 45, 126, 75, 163, 213, 93, 158, 128, 39, 101, 206, 111]), - raw: new Uint8Array([182, 4, 161, 209, 165, 205, 29, 148, 38, 213, 97, 239, 99, 10, 158, 177, 108, 190, 105, 213, 185, 202, 97, 94, 220, 83, 99, 62, 251, 82, 234, 49, 230, 230, 160, 161, 219, 172, 198, 231, 108, 188, 230, 72, 45, 126, 75, 163, 213, 93, 158, 128, 39, 101, 206, 111]), - pkcs8: new Uint8Array([48, 70, 2, 1, 0, 48, 5, 6, 3, 43, 101, 111, 4, 58, 4, 56, 88, 199, 210, 154, 62, 181, 25, 178, 157, 0, 207, 177, 145, 187, 100, 252, 109, 138, 66, 216, 241, 113, 118, 39, 43, 137, 242, 39, 45, 24, 25, 41, 92, 101, 37, 192, 130, 150, 113, 176, 82, 239, 7, 39, 83, 15, 24, 142, 49, 208, 204, 83, 191, 38, 146, 158]), - jwk: { - crv: "X448", - d: "WMfSmj61GbKdAM-xkbtk_G2KQtjxcXYnK4nyJy0YGSlcZSXAgpZxsFLvBydTDxiOMdDMU78mkp4", - x: "tgSh0aXNHZQm1WHvYwqesWy-adW5ymFe3FNjPvtS6jHm5qCh26zG52y85kgtfkuj1V2egCdlzm8", - kty: "OKP" - } - }, - - }; - - // combinations to test - var testVectors = [ - {name: "Ed25519", privateUsages: ["sign"], publicUsages: ["verify"]}, - {name: "Ed448", privateUsages: ["sign"], publicUsages: ["verify"]}, - {name: "X25519", privateUsages: ["deriveKey", "deriveBits"], publicUsages: []}, - {name: "X448", privateUsages: ["deriveKey", "deriveBits"], publicUsages: []}, - ]; - - // TESTS ARE HERE: - // Test every test vector, along with all available key data - testVectors.forEach(function(vector) { - [true, false].forEach(function(extractable) { - - // Test public keys first - allValidUsages(vector.publicUsages, true).forEach(function(usages) { - ['spki', 'jwk', 'raw'].forEach(function(format) { - var algorithm = {name: vector.name}; - var data = keyData[vector.name]; - if (format === "jwk") { // Not all fields used for public keys - data = {jwk: {kty: keyData[vector.name].jwk.kty, crv: keyData[vector.name].jwk.crv, x: keyData[vector.name].jwk.x}}; - } - - testFormat(format, algorithm, data, vector.name, usages, extractable); - - // Test for https://github.com/WICG/webcrypto-secure-curves/pull/24 - if (format === "jwk" && extractable) { - testJwkAlgBehaviours(algorithm, data.jwk, vector.name, usages); - } - }); - - }); - - // Next, test private keys - allValidUsages(vector.privateUsages).forEach(function(usages) { - ['pkcs8', 'jwk'].forEach(function(format) { - var algorithm = {name: vector.name}; - var data = keyData[vector.name]; - - testFormat(format, algorithm, data, vector.name, usages, extractable); - - // Test for https://github.com/WICG/webcrypto-secure-curves/pull/24 - if (format === "jwk" && extractable) { - testJwkAlgBehaviours(algorithm, data.jwk, vector.name, usages); - } - }); - }); - }); - }); - - - // Test importKey with a given key format and other parameters. If - // extrable is true, export the key and verify that it matches the input. - function testFormat(format, algorithm, keyData, keySize, usages, extractable) { - promise_test(function(test) { - return subtle.importKey(format, keyData[format], algorithm, extractable, usages). - then(function(key) { - assert_equals(key.constructor, CryptoKey, "Imported a CryptoKey object"); - assert_goodCryptoKey(key, algorithm, extractable, usages, (format === 'pkcs8' || (format === 'jwk' && keyData[format].d)) ? 'private' : 'public'); - if (!extractable) { - return; - } - - return subtle.exportKey(format, key). - then(function(result) { - if (format !== "jwk") { - assert_true(equalBuffers(keyData[format], result), "Round trip works"); - } else { - assert_true(equalJwk(keyData[format], result), "Round trip works"); - } - }, function(err) { - assert_unreached("Threw an unexpected error: " + err.toString()); - }); - }, function(err) { - assert_unreached("Threw an unexpected error: " + err.toString()); - }); - }, "Good parameters: " + keySize.toString() + " bits " + parameterString(format, keyData[format], algorithm, extractable, usages)); - } - - // Test importKey/exportKey "alg" behaviours, alg is ignored upon import and alg is missing for Ed25519 and Ed448 JWK export - // https://github.com/WICG/webcrypto-secure-curves/pull/24 - function testJwkAlgBehaviours(algorithm, keyData, crv, usages) { - promise_test(function(test) { - return subtle.importKey('jwk', { ...keyData, alg: 'this is ignored' }, algorithm, true, usages). - then(function(key) { - assert_equals(key.constructor, CryptoKey, "Imported a CryptoKey object"); - - return subtle.exportKey('jwk', key). - then(function(result) { - assert_equals(Object.keys(result).length, keyData.d ? 6 : 5, "Correct number of JWK members"); - assert_equals(result.alg, undefined, 'No JWK "alg" member is present'); - assert_true(equalJwk(keyData, result), "Round trip works"); - }, function(err) { - assert_unreached("Threw an unexpected error: " + err.toString()); - }); - }, function(err) { - assert_unreached("Threw an unexpected error: " + err.toString()); - }); - }, "Good parameters with ignored JWK alg: " + crv.toString() + " " + parameterString('jwk', keyData, algorithm, true, usages)); - } - - - - // Helper methods follow: - - // Are two array buffers the same? - function equalBuffers(a, b) { - if (a.byteLength !== b.byteLength) { - return false; - } - - var aBytes = new Uint8Array(a); - var bBytes = new Uint8Array(b); - - for (var i=0; i + + + AbortSignal::Any when source signal was garbage collected + + + + + +

    Test passes if the browser does not crash.

    + + + diff --git a/test/fixtures/wpt/dom/abort/crashtests/any-on-abort.html b/test/fixtures/wpt/dom/abort/crashtests/any-on-abort.html new file mode 100644 index 00000000000000..07a0f0bd3c2ba6 --- /dev/null +++ b/test/fixtures/wpt/dom/abort/crashtests/any-on-abort.html @@ -0,0 +1,11 @@ + + + diff --git a/test/fixtures/wpt/dom/abort/resources/abort-signal-any-tests.js b/test/fixtures/wpt/dom/abort/resources/abort-signal-any-tests.js index 66e4141eaccb08..8f897c934e87ea 100644 --- a/test/fixtures/wpt/dom/abort/resources/abort-signal-any-tests.js +++ b/test/fixtures/wpt/dom/abort/resources/abort-signal-any-tests.js @@ -182,4 +182,59 @@ function abortSignalAnyTests(signalInterface, controllerInterface) { controller.abort(); assert_equals(result, "01234"); }, `Abort events for ${desc} signals fire in the right order ${suffix}`); + + test(t => { + const controller = new controllerInterface(); + const signal1 = signalInterface.any([controller.signal]); + const signal2 = signalInterface.any([signal1]); + let eventFired = false; + + controller.signal.addEventListener('abort', () => { + const signal3 = signalInterface.any([signal2]); + assert_true(controller.signal.aborted); + assert_true(signal1.aborted); + assert_true(signal2.aborted); + assert_true(signal3.aborted); + eventFired = true; + }); + + controller.abort(); + assert_true(eventFired, "event fired"); + }, `Dependent signals for ${desc} are marked aborted before abort events fire ${suffix}`); + + test(t => { + const controller1 = new controllerInterface(); + const controller2 = new controllerInterface(); + const signal = signalInterface.any([controller1.signal, controller2.signal]); + let count = 0; + + controller1.signal.addEventListener('abort', () => { + controller2.abort("reason 2"); + }); + + signal.addEventListener('abort', () => { + count++; + }); + + controller1.abort("reason 1"); + assert_equals(count, 1); + assert_true(signal.aborted); + assert_equals(signal.reason, "reason 1"); + }, `Dependent signals for ${desc} are aborted correctly for reentrant aborts ${suffix}`); + + test(t => { + const source = signalInterface.abort(); + const dependent = signalInterface.any([source]); + assert_true(source.reason instanceof DOMException); + assert_equals(source.reason, dependent.reason); + }, `Dependent signals for ${desc} should use the same DOMException instance from the already aborted source signal ${suffix}`); + + test(t => { + const controller = new controllerInterface(); + const source = controller.signal; + const dependent = signalInterface.any([source]); + controller.abort(); + assert_true(source.reason instanceof DOMException); + assert_equals(source.reason, dependent.reason); + }, `Dependent signals for ${desc} should use the same DOMException instance from the source signal being aborted later ${suffix}`); } diff --git a/test/fixtures/wpt/dom/events/Event-dispatch-click.html b/test/fixtures/wpt/dom/events/Event-dispatch-click.html index 010305775df7e9..ab4a24a5ad5096 100644 --- a/test/fixtures/wpt/dom/events/Event-dispatch-click.html +++ b/test/fixtures/wpt/dom/events/Event-dispatch-click.html @@ -87,6 +87,22 @@ child.dispatchEvent(new MouseEvent("click", {bubbles:true})) }, "pick the first with activation behavior ") +async_test(function(t) { + var input = document.createElement("input") + input.type = "radio" + dump.appendChild(input) + input.onclick = t.step_func(function() { + assert_false(input.checked, "input pre-click must not be triggered") + }) + var child = input.appendChild(document.createElement("input")) + child.type = "radio" + child.onclick = t.step_func(function() { + assert_true(child.checked, "child pre-click must be triggered") + }) + child.dispatchEvent(new MouseEvent("click", {bubbles:true})) + t.done() +}, "pick the first with activation behavior ") + async_test(function(t) { var input = document.createElement("input") input.type = "checkbox" @@ -173,6 +189,46 @@ t.done() }, "disabled checkbox still has activation behavior, part 2") +async_test(function(t) { + var state = "start" + + var form = document.createElement("form") + form.onsubmit = t.step_func(() => { + if(state == "start" || state == "radio") { + state = "failure" + } else if(state == "form") { + state = "done" + } + return false + }) + dump.appendChild(form) + var button = form.appendChild(document.createElement("button")) + button.type = "submit" + var radio = button.appendChild(document.createElement("input")) + radio.type = "radio" + radio.onclick = t.step_func(() => { + if(state == "start") { + assert_unreached() + } else if(state == "radio") { + assert_true(radio.checked) + } + }) + radio.disabled = true + radio.click() + assert_equals(state, "start") + + state = "radio" + radio.disabled = false + radio.click() + assert_equals(state, "radio") + + state = "form" + button.click() + assert_equals(state, "done") + + t.done() +}, "disabled radio still has activation behavior") + async_test(function(t) { var input = document.createElement("input") input.type = "checkbox" diff --git a/test/fixtures/wpt/dom/events/Event-dispatch-on-disabled-elements.html b/test/fixtures/wpt/dom/events/Event-dispatch-on-disabled-elements.html index 361006a7240496..e7d6b455bbcaab 100644 --- a/test/fixtures/wpt/dom/events/Event-dispatch-on-disabled-elements.html +++ b/test/fixtures/wpt/dom/events/Event-dispatch-on-disabled-elements.html @@ -19,7 +19,7 @@ + +
    + +
    + + + + + diff --git a/test/fixtures/wpt/dom/events/Event-dispatch-throwing-multiple-globals.html b/test/fixtures/wpt/dom/events/Event-dispatch-throwing-multiple-globals.html new file mode 100644 index 00000000000000..a56fdf6af86581 --- /dev/null +++ b/test/fixtures/wpt/dom/events/Event-dispatch-throwing-multiple-globals.html @@ -0,0 +1,69 @@ + + + + + + diff --git a/test/fixtures/wpt/dom/events/EventTarget-constructible.any.js b/test/fixtures/wpt/dom/events/EventTarget-constructible.any.js index b0e7614e625b3d..4125d23f0c9650 100644 --- a/test/fixtures/wpt/dom/events/EventTarget-constructible.any.js +++ b/test/fixtures/wpt/dom/events/EventTarget-constructible.any.js @@ -23,6 +23,23 @@ test(() => { assert_equals(callCount, 2); }, "A constructed EventTarget can be used as expected"); +test(() => { + const target = new EventTarget(); + const event = new Event("foo"); + + function listener(e) { + assert_equals(e, event); + assert_equals(e.target, target); + assert_equals(e.currentTarget, target); + assert_array_equals(e.composedPath(), [target]); + } + target.addEventListener("foo", listener, { once: true }); + target.dispatchEvent(event); + assert_equals(event.target, target); + assert_equals(event.currentTarget, null); + assert_array_equals(event.composedPath(), []); +}, "A constructed EventTarget implements dispatch correctly"); + test(() => { class NicerEventTarget extends EventTarget { on(...args) { diff --git a/test/fixtures/wpt/dom/events/event-global.html b/test/fixtures/wpt/dom/events/event-global.html index 3e8d25ecb5dd9d..f70606fb65496a 100644 --- a/test/fixtures/wpt/dom/events/event-global.html +++ b/test/fixtures/wpt/dom/events/event-global.html @@ -114,4 +114,14 @@ target.dispatchEvent(new Event("click")); }, "window.event is set to the current event, which is the event passed to dispatch"); + +async_test(t => { + let target = new XMLHttpRequest(); + + target.onload = t.step_func_done(e => { + assert_equals(e, window.event); + }); + + target.dispatchEvent(new Event("load")); +}, "window.event is set to the current event, which is the event passed to dispatch (2)"); diff --git a/test/fixtures/wpt/dom/events/pointer-event-document-move.html b/test/fixtures/wpt/dom/events/pointer-event-document-move.html new file mode 100644 index 00000000000000..91e7c36860572a --- /dev/null +++ b/test/fixtures/wpt/dom/events/pointer-event-document-move.html @@ -0,0 +1,29 @@ + + + + + + + + + + + + diff --git a/test/fixtures/wpt/dom/events/preventDefault-during-activation-behavior.html b/test/fixtures/wpt/dom/events/preventDefault-during-activation-behavior.html new file mode 100644 index 00000000000000..92874031347165 --- /dev/null +++ b/test/fixtures/wpt/dom/events/preventDefault-during-activation-behavior.html @@ -0,0 +1,53 @@ + +preventDefault during activation behavior + + + + + + + + + +
    + + + + diff --git a/test/fixtures/wpt/dom/events/remove-all-listeners.html b/test/fixtures/wpt/dom/events/remove-all-listeners.html new file mode 100644 index 00000000000000..3a2a751a146bca --- /dev/null +++ b/test/fixtures/wpt/dom/events/remove-all-listeners.html @@ -0,0 +1,95 @@ + +Various edge cases where listeners are removed during iteration + + +
    + diff --git a/test/fixtures/wpt/dom/events/scrolling/WEB_FEATURES.yml b/test/fixtures/wpt/dom/events/scrolling/WEB_FEATURES.yml new file mode 100644 index 00000000000000..e4fba841b62ac9 --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/WEB_FEATURES.yml @@ -0,0 +1,4 @@ +features: +- name: scrollend + files: + - scrollend-* diff --git a/test/fixtures/wpt/dom/events/scrolling/overscroll-deltas.html b/test/fixtures/wpt/dom/events/scrolling/overscroll-deltas.html index 6f0b77f22eda2a..e13e9f1cce5949 100644 --- a/test/fixtures/wpt/dom/events/scrolling/overscroll-deltas.html +++ b/test/fixtures/wpt/dom/events/scrolling/overscroll-deltas.html @@ -6,80 +6,152 @@ - -
    -
    + +
    +
    +
    +
    +
    diff --git a/test/fixtures/wpt/dom/events/scrolling/overscroll-event-fired-to-scrolled-element.html b/test/fixtures/wpt/dom/events/scrolling/overscroll-event-fired-to-scrolled-element.html index cfc782a809a7e7..be4176df59d6a1 100644 --- a/test/fixtures/wpt/dom/events/scrolling/overscroll-event-fired-to-scrolled-element.html +++ b/test/fixtures/wpt/dom/events/scrolling/overscroll-event-fired-to-scrolled-element.html @@ -10,11 +10,14 @@ width: 200px; height: 200px; overflow: scroll; + position: absolute; + left: 150px; + top: 150px; } #innerDiv { - width: 400px; - height: 400px; + width: 250px; + height: 250px; } @@ -45,7 +48,7 @@ await waitForCompositorCommit(); // Do a horizontal scroll and wait for overscroll event. - await touchScrollInTarget(300, scrolling_div , 'right'); + await touchScrollInTarget(100, scrolling_div , 'right'); await waitFor(() => { return overscrolled_x_delta > 0; }, 'Scroller did not receive overscroll event after horizontal scroll.'); assert_equals(scrolling_div.scrollWidth - scrolling_div.scrollLeft, @@ -55,7 +58,7 @@ overscrolled_y_delta = 0; // Do a vertical scroll and wait for overscroll event. - await touchScrollInTarget(300, scrolling_div, 'down'); + await touchScrollInTarget(100, scrolling_div, 'down'); await waitFor(() => { return overscrolled_y_delta > 0; }, 'Scroller did not receive overscroll event after vertical scroll.'); assert_equals(scrolling_div.scrollHeight - scrolling_div.scrollTop, diff --git a/test/fixtures/wpt/dom/events/scrolling/overscroll-event-fired-to-window.html b/test/fixtures/wpt/dom/events/scrolling/overscroll-event-fired-to-window.html index ef5ae3daef8158..348dadcb4c50b7 100644 --- a/test/fixtures/wpt/dom/events/scrolling/overscroll-event-fired-to-window.html +++ b/test/fixtures/wpt/dom/events/scrolling/overscroll-event-fired-to-window.html @@ -31,7 +31,7 @@ function onOverscroll(event) { assert_false(event.cancelable); - // overscroll events targetting document are bubbled to the window. + // overscroll events targeting document are bubbled to the window. assert_true(event.bubbles); window_received_overscroll = true; } diff --git a/test/fixtures/wpt/dom/events/scrolling/scroll_support.js b/test/fixtures/wpt/dom/events/scrolling/scroll_support.js index 169393e4c3e419..3d7709772baafb 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scroll_support.js +++ b/test/fixtures/wpt/dom/events/scrolling/scroll_support.js @@ -1,15 +1,83 @@ -async function waitForScrollendEvent(test, target, timeoutMs = 500) { +async function waitForEvent(eventName, test, target, timeoutMs = 500) { return new Promise((resolve, reject) => { const timeoutCallback = test.step_timeout(() => { - reject(`No Scrollend event received for target ${target}`); + reject(`No ${eventName} event received for target ${target}`); }, timeoutMs); - target.addEventListener('scrollend', (evt) => { + target.addEventListener(eventName, (evt) => { clearTimeout(timeoutCallback); resolve(evt); }, { once: true }); }); } +async function waitForScrollendEvent(test, target, timeoutMs = 500) { + return waitForEvent("scrollend", test, target, timeoutMs); +} + +async function waitForScrollendEventNoTimeout(target) { + return new Promise((resolve) => { + target.addEventListener("scrollend", resolve); + }); +} + +async function waitForPointercancelEvent(test, target, timeoutMs = 500) { + return waitForEvent("pointercancel", test, target, timeoutMs); +} + +// Resets the scroll position to (0,0). If a scroll is required, then the +// promise is not resolved until the scrollend event is received. +async function waitForScrollReset(test, scroller, x = 0, y = 0) { + return new Promise(resolve => { + if (scroller.scrollTop == x && scroller.scrollLeft == y) { + resolve(); + } else { + const eventTarget = + scroller == document.scrollingElement ? document : scroller; + scroller.scrollTo(x, y); + waitForScrollendEventNoTimeout(eventTarget).then(resolve); + } + }); +} + +async function createScrollendPromiseForTarget(test, + target_div, + timeoutMs = 500) { + return waitForScrollendEvent(test, target_div, timeoutMs).then(evt => { + assert_false(evt.cancelable, 'Event is not cancelable'); + assert_false(evt.bubbles, 'Event targeting element does not bubble'); + }); +} + +function verifyNoScrollendOnDocument(test) { + const callback = + test.unreached_func("window got unexpected scrollend event."); + window.addEventListener('scrollend', callback); + test.add_cleanup(() => { + window.removeEventListener('scrollend', callback); + }); +} + +async function verifyScrollStopped(test, target_div) { + const unscaled_pause_time_in_ms = 100; + const x = target_div.scrollLeft; + const y = target_div.scrollTop; + return new Promise(resolve => { + test.step_timeout(() => { + assert_equals(target_div.scrollLeft, x); + assert_equals(target_div.scrollTop, y); + resolve(); + }, unscaled_pause_time_in_ms); + }); +} + +async function resetTargetScrollState(test, target_div) { + if (target_div.scrollTop != 0 || target_div.scrollLeft != 0) { + target_div.scrollTop = 0; + target_div.scrollLeft = 0; + return waitForScrollendEvent(test, target_div); + } +} + const MAX_FRAME = 700; const MAX_UNCHANGED_FRAMES = 20; @@ -20,7 +88,7 @@ const MAX_UNCHANGED_FRAMES = 20; function waitFor(condition, error_message = 'Reaches the maximum frames.') { return new Promise((resolve, reject) => { function tick(frames) { - // We requestAnimationFrame either for MAX_FRAM frames or until condition + // We requestAnimationFrame either for MAX_FRAME frames or until condition // is met. if (frames >= MAX_FRAME) reject(error_message); @@ -45,6 +113,29 @@ function waitForCompositorCommit() { }); } +// Please don't remove this. This is necessary for chromium-based browsers. It +// can be a no-op on user-agents that do not have a separate compositor thread. +// TODO(crbug.com/1509054): This shouldn't be necessary if the test harness +// deferred running the tests until after paint holding. +async function waitForCompositorReady() { + const animation = + document.body.animate({ opacity: [ 0, 1 ] }, {duration: 1 }); + return animation.finished; +} + +function waitForNextFrame() { + const startTime = performance.now(); + return new Promise(resolve => { + window.requestAnimationFrame((frameTime) => { + if (frameTime < startTime) { + window.requestAnimationFrame(resolve); + } else { + resolve(); + } + }); + }); +} + // TODO(crbug.com/1400399): Deprecate as frame rates may vary greatly in // different test environments. function waitForAnimationEnd(getValue) { @@ -70,6 +161,10 @@ function waitForAnimationEnd(getValue) { } // Scrolls in target according to move_path with pauses in between +// The move_path should contains coordinates that are within target boundaries. +// Keep in mind that 0,0 is the center of the target element and is also +// the pointerDown position. +// pointerUp() is fired after sequence of moves. function touchScrollInTargetSequentiallyWithPause(target, move_path, pause_time_in_ms = 100) { const test_driver_actions = new test_driver.Actions() .addPointer("pointer1", "touch") @@ -88,7 +183,7 @@ function touchScrollInTargetSequentiallyWithPause(target, move_path, pause_time_ y += step_y; test_driver_actions.pointerMove(x, y, {origin: target}); } - test_driver_actions.pause(pause_time_in_ms); + test_driver_actions.pause(pause_time_in_ms); // To prevent inertial scroll } return test_driver_actions.pointerUp().send(); @@ -125,7 +220,7 @@ function touchScrollInTarget(pixels_to_scroll, target, direction, pause_time_in_ // Trigger fling by doing pointerUp right after pointerMoves. function touchFlingInTarget(pixels_to_scroll, target, direction) { - touchScrollInTarget(pixels_to_scroll, target, direction, 0 /* pause_time */); + return touchScrollInTarget(pixels_to_scroll, target, direction, 0 /* pause_time */); } function mouseActionsInTarget(target, origin, delta, pause_time_in_ms = 100) { @@ -161,3 +256,23 @@ function conditionHolds(condition, error_message = 'Condition is not true anymor tick(0); }); } + +function scrollElementDown(element, scroll_amount) { + let x = 0; + let y = 0; + let delta_x = 0; + let delta_y = scroll_amount; + let actions = new test_driver.Actions() + .scroll(x, y, delta_x, delta_y, {origin: element}); + return actions.send(); +} + +function scrollElementLeft(element, scroll_amount) { + let x = 0; + let y = 0; + let delta_x = scroll_amount; + let delta_y = 0; + let actions = new test_driver.Actions() + .scroll(x, y, delta_x, delta_y, {origin: element}); + return actions.send(); +} diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-after-sequence-of-scrolls.tentative.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-after-sequence-of-scrolls.tentative.html index 77bf029ced58c5..dab6dcc9bd8d67 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-after-sequence-of-scrolls.tentative.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-after-sequence-of-scrolls.tentative.html @@ -14,7 +14,7 @@ } #innerDiv { - width: 500px; + width: 4000px; height: 4000px; } @@ -28,36 +28,64 @@ diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-mandatory-snap-point-after-load.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-mandatory-snap-point-after-load.html new file mode 100644 index 00000000000000..f3791134204497 --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-mandatory-snap-point-after-load.html @@ -0,0 +1,87 @@ + + + + + + + + + + + + scrollend + mandatory scroll snap test + + + + + + + + + + + \ No newline at end of file diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-programmatic-scroll.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-programmatic-scroll.html index c6569e0bebbd9f..449aea05351244 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-programmatic-scroll.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-programmatic-scroll.html @@ -1,11 +1,19 @@ - + + + + + + + + + + + +
    +
    +
    +
    + + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-scrollIntoView.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-scrollIntoView.html index 8782b1dfee6237..40aa77f4764b6c 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-scrollIntoView.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-for-scrollIntoView.html @@ -108,10 +108,12 @@ document.body.appendChild(out_div); await waitForCompositorCommit(); - element_scrollend_arrived = false; - document_scrollend_arrived = false; inner_div.scrollIntoView({ inline: "end", block: "end", behavior: "auto" }); - await waitFor(() => { return element_scrollend_arrived || document_scrollend_arrived; }, "Nested scrollIntoView did not receive scrollend event."); + const scrollend_events = [ + waitForScrollendEventNoTimeout(out_div), + waitForScrollendEventNoTimeout(target_div) + ]; + await Promise.all(scrollend_events); assert_equals(root_element.scrollLeft, 0, "Nested scrollIntoView root_element scrollLeft"); assert_equals(root_element.scrollTop, 0, "Nested scrollIntoView root_element scrollTop"); assert_equals(out_div.scrollLeft, 100, "Nested scrollIntoView out_div scrollLeft"); diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-document.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-document.html index 30904553883435..797c2eb53da0e3 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-document.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-document.html @@ -7,64 +7,100 @@ -
    -
    +
    +
    -
    +
    +
    diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-element-with-overscroll-behavior.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-element-with-overscroll-behavior.html index acad168e56c995..edda88e7cb2064 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-element-with-overscroll-behavior.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-element-with-overscroll-behavior.html @@ -7,96 +7,167 @@ -
    -
    -
    -
    -
    -
    -
    -
    +
    +
    +
    +
    -
    +
    +
    + +
    +
    +
    +
    +
    +
    diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-scrolled-element.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-scrolled-element.html deleted file mode 100644 index 734339694220cc..00000000000000 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-scrolled-element.html +++ /dev/null @@ -1,68 +0,0 @@ - - - - - - - - - - - -
    -
    -
    -
    - - - diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-window.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-window.html index ef72f56d2ba9d6..d2fd6f4d3158a5 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-window.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fired-to-window.html @@ -7,49 +7,63 @@ -
    -
    +
    +
    -
    +
    diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fires-on-visual-viewport.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fires-on-visual-viewport.html new file mode 100644 index 00000000000000..99a281480ff11f --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fires-on-visual-viewport.html @@ -0,0 +1,68 @@ + + + + + + + + + + + + + + +
    + + + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fires-to-iframe-inner-frame.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fires-to-iframe-inner-frame.html new file mode 100644 index 00000000000000..115e583c067ebd --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fires-to-iframe-inner-frame.html @@ -0,0 +1,30 @@ + + + + +
    +
    +
    +
    + + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fires-to-iframe-window.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fires-to-iframe-window.html new file mode 100644 index 00000000000000..9cd3b421fe9036 --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-fires-to-iframe-window.html @@ -0,0 +1,77 @@ + + + + + + + + + + + + + + + + + + + + + + \ No newline at end of file diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-for-user-scroll.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-for-user-scroll.html index 5146c5f719a1e4..a06843a35e7ec9 100644 --- a/test/fixtures/wpt/dom/events/scrolling/scrollend-event-for-user-scroll.html +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-event-for-user-scroll.html @@ -8,6 +8,7 @@ + + + +
    + + +
    +
    + +
    + + + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-fires-to-text-input.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-fires-to-text-input.html new file mode 100644 index 00000000000000..edc75d9121776e --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-fires-to-text-input.html @@ -0,0 +1,32 @@ + + + + + + + + + + + + diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-user-scroll-common.js b/test/fixtures/wpt/dom/events/scrolling/scrollend-user-scroll-common.js new file mode 100644 index 00000000000000..1b637233882843 --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-user-scroll-common.js @@ -0,0 +1,150 @@ + +async function test_scrollend_on_touch_drag(t, target_div) { + // Skip the test on a Mac as they do not support touch screens. + const isMac = navigator.platform.toUpperCase().indexOf('MAC') >= 0; + if (isMac) + return; + + await resetTargetScrollState(t, target_div); + await waitForCompositorReady(); + + const targetScrollendPromise = waitForScrollendEventNoTimeout(target_div); + verifyNoScrollendOnDocument(t); + + let scrollend_count = 0; + const scrollend_listener = () => { + scrollend_count += 1; + }; + target_div.addEventListener("scrollend", scrollend_listener); + t.add_cleanup(() => { + target_div.removeEventListener('scrollend', scrollend_listener); + }); + + // Perform a touch drag on target div and wait for target_div to get + // a scrollend event. + await new test_driver.Actions() + .addPointer('TestPointer', 'touch') + .pointerMove(0, 0, { origin: target_div }) // 0, 0 is center of element. + .pointerDown() + .addTick() + .pointerMove(0, -40, { origin: target_div }) // Drag up to move down. + .addTick() + .pause(200) // Prevent inertial scroll. + .pointerMove(0, -60, { origin: target_div }) + .addTick() + .pause(200) // Prevent inertial scroll. + .pointerUp() + .send(); + + await targetScrollendPromise; + + assert_true(target_div.scrollTop > 0); + await verifyScrollStopped(t, target_div); + assert_equals(scrollend_count, 1); +} + +async function test_scrollend_on_scrollbar_gutter_click(t, target_div) { + // Skip test on platforms that do not have a visible scrollbar (e.g. + // overlay scrollbar). + const scrollbar_width = target_div.offsetWidth - target_div.clientWidth; + if (scrollbar_width == 0) + return; + + await resetTargetScrollState(t, target_div); + await waitForCompositorReady(); + + const targetScrollendPromise = waitForScrollendEventNoTimeout(target_div); + verifyNoScrollendOnDocument(t); + + const bounds = target_div.getBoundingClientRect(); + // Some versions of webdriver have been known to frown at non-int arguments + // to pointerMove. + const x = Math.round(bounds.right - scrollbar_width / 2); + const y = Math.round(bounds.bottom - 20); + await new test_driver.Actions() + .addPointer('TestPointer', 'mouse') + .pointerMove(x, y, { origin: 'viewport' }) + .pointerDown() + .addTick() + .pointerUp() + .send(); + + await targetScrollendPromise; + assert_true(target_div.scrollTop > 0); + await verifyScrollStopped(t, target_div); +} + +// Same issue as previous test. +async function test_scrollend_on_scrollbar_thumb_drag(t, target_div) { + // Skip test on platforms that do not have a visible scrollbar (e.g. + // overlay scrollbar). + const scrollbar_width = target_div.offsetWidth - target_div.clientWidth; + if (scrollbar_width == 0) + return; + + await resetTargetScrollState(t, target_div); + await waitForCompositorReady(); + + const targetScrollendPromise = waitForScrollendEventNoTimeout(target_div); + verifyNoScrollendOnDocument(t); + + const bounds = target_div.getBoundingClientRect(); + // Some versions of webdriver have been known to frown at non-int arguments + // to pointerMove. + const x = Math.round(bounds.right - scrollbar_width / 2); + const y = Math.round(bounds.top + 30); + const dy = 30; + await new test_driver.Actions() + .addPointer('TestPointer', 'mouse') + .pointerMove(x, y, { origin: 'viewport' }) + .pointerDown() + .pointerMove(x, y + dy, { origin: 'viewport' }) + .addTick() + .pointerUp() + .send(); + + await targetScrollendPromise; + assert_true(target_div.scrollTop > 0); + await verifyScrollStopped(t, target_div); +} + +async function test_scrollend_on_mousewheel_scroll(t, target_div, frame) { + await resetTargetScrollState(t, target_div); + await waitForCompositorReady(); + + const targetScrollendPromise = waitForScrollendEventNoTimeout(target_div); + verifyNoScrollendOnDocument(t); + + let scroll_origin = target_div; + if (frame) { + // chromedriver doesn't support passing { origin: element } + // for an element within a subframe. Use the frame element itself. + scroll_origin = frame; + } + const x = 0; + const y = 0; + const dx = 0; + const dy = 40; + await new test_driver.Actions() + .scroll(x, y, dx, dy, { origin: scroll_origin }) + .send(); + + await targetScrollendPromise; + assert_true(target_div.scrollTop > 0); + await verifyScrollStopped(t, target_div); +} + +async function test_scrollend_on_keyboard_scroll(t, target_div) { + await resetTargetScrollState(t, target_div); + await waitForCompositorReady(); + + verifyNoScrollendOnDocument(t); + const targetScrollendPromise = waitForScrollendEventNoTimeout(target_div); + + target_div.focus(); + window.test_driver.send_keys(target_div, '\ue015'); + + await targetScrollendPromise; + assert_true(target_div.scrollTop > 0); + await verifyScrollStopped(t, target_div); +} diff --git a/test/fixtures/wpt/dom/events/scrolling/scrollend-with-snap-on-fractional-offset.html b/test/fixtures/wpt/dom/events/scrolling/scrollend-with-snap-on-fractional-offset.html new file mode 100644 index 00000000000000..d1f50304add2ac --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/scrollend-with-snap-on-fractional-offset.html @@ -0,0 +1,85 @@ + + + + + + + +
    +
    1
    +
    2
    +
    3
    +
    + + + \ No newline at end of file diff --git a/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-basic.html b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-basic.html new file mode 100644 index 00000000000000..1b9df69ec77f88 --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-basic.html @@ -0,0 +1,140 @@ + + + + + + + + + + + + + + + + + + +
    +
    +
    +
    +
    +
    +
    +
    +
    +
    + + + + diff --git a/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-multiple-action-chains.html b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-multiple-action-chains.html new file mode 100644 index 00000000000000..3b46b2f99a9c99 --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-multiple-action-chains.html @@ -0,0 +1,101 @@ + + + + + + + + + + + + + + +
    +
    + + + + diff --git a/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-display-change.html b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-display-change.html new file mode 100644 index 00000000000000..6ca3c78b74cefe --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-display-change.html @@ -0,0 +1,100 @@ + + + + + + + + + + + + + + + + + +
    +
    +
    + + + + diff --git a/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-elements.html b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-elements.html new file mode 100644 index 00000000000000..0109b7b6b18b92 --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-elements.html @@ -0,0 +1,74 @@ + + + + + + + + + + + + + + + +
    X
    +
    + + + + diff --git a/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-move.html b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-move.html new file mode 100644 index 00000000000000..a739d1cd5479ad --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-move.html @@ -0,0 +1,85 @@ + + + + + + + + + + + + + + +
    +
    +
    + + + + diff --git a/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-removal.html b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-removal.html new file mode 100644 index 00000000000000..f81efd22e22b92 --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-removal.html @@ -0,0 +1,92 @@ + + + + + + + + + + + + + + +
    +
    +
    + + + + diff --git a/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-resize.html b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-resize.html new file mode 100644 index 00000000000000..eb7431fca475db --- /dev/null +++ b/test/fixtures/wpt/dom/events/scrolling/wheel-event-transactions-target-resize.html @@ -0,0 +1,97 @@ + + + + + + + + + + + + + + + + + +
    +
    +
    + + + + diff --git a/test/fixtures/wpt/encoding/legacy-mb-schinese/gb18030/gb18030-decoder.any.js b/test/fixtures/wpt/encoding/legacy-mb-schinese/gb18030/gb18030-decoder.any.js index fca68358623bd9..aa226e185be7ca 100644 --- a/test/fixtures/wpt/encoding/legacy-mb-schinese/gb18030/gb18030-decoder.any.js +++ b/test/fixtures/wpt/encoding/legacy-mb-schinese/gb18030/gb18030-decoder.any.js @@ -66,24 +66,24 @@ decode([0xFE, 0x6D], "\u9FB8", "GB18030-2022 15"); decode([0xFE, 0x7E], "\u9FB9", "GB18030-2022 16"); decode([0xFE, 0x90], "\u9FBA", "GB18030-2022 17"); decode([0xFE, 0xA0], "\u9FBB", "GB18030-2022 18"); -decode([0x82, 0x35, 0x90, 0x37], "\uE81E", "GB18030-2022 19"); -decode([0x82, 0x35, 0x90, 0x38], "\uE826", "GB18030-2022 20"); -decode([0x82, 0x35, 0x90, 0x39], "\uE82B", "GB18030-2022 21"); -decode([0x82, 0x35, 0x91, 0x30], "\uE82C", "GB18030-2022 22"); -decode([0x82, 0x35, 0x91, 0x31], "\uE832", "GB18030-2022 23"); -decode([0x82, 0x35, 0x91, 0x32], "\uE843", "GB18030-2022 24"); -decode([0x82, 0x35, 0x91, 0x33], "\uE854", "GB18030-2022 25"); -decode([0x82, 0x35, 0x91, 0x34], "\uE864", "GB18030-2022 26"); -decode([0x84, 0x31, 0x82, 0x36], "\uE78D", "GB18030-2022 27"); -decode([0x84, 0x31, 0x82, 0x37], "\uE78F", "GB18030-2022 28"); -decode([0x84, 0x31, 0x82, 0x38], "\uE78E", "GB18030-2022 29"); -decode([0x84, 0x31, 0x82, 0x39], "\uE790", "GB18030-2022 30"); -decode([0x84, 0x31, 0x83, 0x30], "\uE791", "GB18030-2022 31"); -decode([0x84, 0x31, 0x83, 0x31], "\uE792", "GB18030-2022 32"); -decode([0x84, 0x31, 0x83, 0x32], "\uE793", "GB18030-2022 33"); -decode([0x84, 0x31, 0x83, 0x33], "\uE794", "GB18030-2022 34"); -decode([0x84, 0x31, 0x83, 0x34], "\uE795", "GB18030-2022 35"); -decode([0x84, 0x31, 0x83, 0x35], "\uE796", "GB18030-2022 36"); +decode([0x82, 0x35, 0x90, 0x37], "\u9FB4", "GB18030-2022 19"); +decode([0x82, 0x35, 0x90, 0x38], "\u9FB5", "GB18030-2022 20"); +decode([0x82, 0x35, 0x90, 0x39], "\u9FB6", "GB18030-2022 21"); +decode([0x82, 0x35, 0x91, 0x30], "\u9FB7", "GB18030-2022 22"); +decode([0x82, 0x35, 0x91, 0x31], "\u9FB8", "GB18030-2022 23"); +decode([0x82, 0x35, 0x91, 0x32], "\u9FB9", "GB18030-2022 24"); +decode([0x82, 0x35, 0x91, 0x33], "\u9FBA", "GB18030-2022 25"); +decode([0x82, 0x35, 0x91, 0x34], "\u9FBB", "GB18030-2022 26"); +decode([0x84, 0x31, 0x82, 0x36], "\uFE10", "GB18030-2022 27"); +decode([0x84, 0x31, 0x82, 0x37], "\uFE11", "GB18030-2022 28"); +decode([0x84, 0x31, 0x82, 0x38], "\uFE12", "GB18030-2022 29"); +decode([0x84, 0x31, 0x82, 0x39], "\uFE13", "GB18030-2022 30"); +decode([0x84, 0x31, 0x83, 0x30], "\uFE14", "GB18030-2022 31"); +decode([0x84, 0x31, 0x83, 0x31], "\uFE15", "GB18030-2022 32"); +decode([0x84, 0x31, 0x83, 0x32], "\uFE16", "GB18030-2022 33"); +decode([0x84, 0x31, 0x83, 0x33], "\uFE17", "GB18030-2022 34"); +decode([0x84, 0x31, 0x83, 0x34], "\uFE18", "GB18030-2022 35"); +decode([0x84, 0x31, 0x83, 0x35], "\uFE19", "GB18030-2022 36"); let i = 0; for (const range of ranges) { diff --git a/test/fixtures/wpt/encoding/legacy-mb-schinese/gb18030/gb18030-encoder.html b/test/fixtures/wpt/encoding/legacy-mb-schinese/gb18030/gb18030-encoder.html index 531d26084e27dc..dc0dc43468f248 100644 --- a/test/fixtures/wpt/encoding/legacy-mb-schinese/gb18030/gb18030-encoder.html +++ b/test/fixtures/wpt/encoding/legacy-mb-schinese/gb18030/gb18030-encoder.html @@ -43,24 +43,24 @@ encode("\u9FB9", "%FE~", "GB18030-2022 16"); encode("\u9FBA", "%FE%90", "GB18030-2022 17"); encode("\u9FBB", "%FE%A0", "GB18030-2022 18"); - encode("\uE78D", "%841%826", "GB18030-2022 19"); - encode("\uE78E", "%841%828", "GB18030-2022 20"); - encode("\uE78F", "%841%827", "GB18030-2022 21"); - encode("\uE790", "%841%829", "GB18030-2022 22"); - encode("\uE791", "%841%830", "GB18030-2022 23"); - encode("\uE792", "%841%831", "GB18030-2022 24"); - encode("\uE793", "%841%832", "GB18030-2022 25"); - encode("\uE794", "%841%833", "GB18030-2022 26"); - encode("\uE795", "%841%834", "GB18030-2022 27"); - encode("\uE796", "%841%835", "GB18030-2022 28"); - encode("\uE81E", "%825%907", "GB18030-2022 29"); - encode("\uE826", "%825%908", "GB18030-2022 30"); - encode("\uE82B", "%825%909", "GB18030-2022 31"); - encode("\uE82C", "%825%910", "GB18030-2022 32"); - encode("\uE832", "%825%911", "GB18030-2022 33"); - encode("\uE843", "%825%912", "GB18030-2022 34"); - encode("\uE854", "%825%913", "GB18030-2022 35"); - encode("\uE864", "%825%914", "GB18030-2022 36"); + encode("\uE78D", "%A6%D9", "GB18030-2022 19"); + encode("\uE78E", "%A6%DA", "GB18030-2022 20"); + encode("\uE78F", "%A6%DB", "GB18030-2022 21"); + encode("\uE790", "%A6%DC", "GB18030-2022 22"); + encode("\uE791", "%A6%DD", "GB18030-2022 23"); + encode("\uE792", "%A6%DE", "GB18030-2022 24"); + encode("\uE793", "%A6%DF", "GB18030-2022 25"); + encode("\uE794", "%A6%EC", "GB18030-2022 26"); + encode("\uE795", "%A6%ED", "GB18030-2022 27"); + encode("\uE796", "%A6%F3", "GB18030-2022 28"); + encode("\uE81E", "%FEY", "GB18030-2022 29"); + encode("\uE826", "%FEa", "GB18030-2022 30"); + encode("\uE82B", "%FEf", "GB18030-2022 31"); + encode("\uE82C", "%FEg", "GB18030-2022 32"); + encode("\uE832", "%FEm", "GB18030-2022 33"); + encode("\uE843", "%FE~", "GB18030-2022 34"); + encode("\uE854", "%FE%90", "GB18030-2022 35"); + encode("\uE864", "%FE%A0", "GB18030-2022 36"); const upperCaseNibble = x => { return Math.floor(x).toString(16).toUpperCase(); diff --git a/test/fixtures/wpt/encoding/legacy-mb-schinese/gbk/gbk-decoder.any.js b/test/fixtures/wpt/encoding/legacy-mb-schinese/gbk/gbk-decoder.any.js index c0221480da156d..b7f1ca9c51e88f 100644 --- a/test/fixtures/wpt/encoding/legacy-mb-schinese/gbk/gbk-decoder.any.js +++ b/test/fixtures/wpt/encoding/legacy-mb-schinese/gbk/gbk-decoder.any.js @@ -1,3 +1,5 @@ +// Additional tests can be found in ../gb18030/gb18030-decoder.any.js + const gbkPointers = [ 6432, 7533, 7536, 7672, 7673, 7674, 7675, 7676, 7677, 7678, 7679, 7680, 7681, 7682, 7683, 7684, 23766, 23770, 23771, 23772, 23773, 23774, 23776, 23777, 23778, 23779, 23780, 23781, 23782, 23784, 23785, 23786, diff --git a/test/fixtures/wpt/encoding/legacy-mb-schinese/gbk/gbk-encoder.html b/test/fixtures/wpt/encoding/legacy-mb-schinese/gbk/gbk-encoder.html index e43cb73fea72e5..11557242e3dc65 100644 --- a/test/fixtures/wpt/encoding/legacy-mb-schinese/gbk/gbk-encoder.html +++ b/test/fixtures/wpt/encoding/legacy-mb-schinese/gbk/gbk-encoder.html @@ -23,4 +23,43 @@ encode("\u00A5", "%26%23165%3B", "legacy WebKit case 1"); encode("\u22EF", "%26%238943%3B", "legacy WebKit case 2"); encode("\u301C", "%26%2312316%3B", "legacy WebKit case 3"); + encode("\u{10FFFF}", "%26%231114111%3B", "U+10FFFF"); + + // GB18030-2022 + encode("\uFE10", "%A6%D9", "GB18030-2022 1"); + encode("\uFE12", "%A6%DA", "GB18030-2022 2"); + encode("\uFE11", "%A6%DB", "GB18030-2022 3"); + encode("\uFE13", "%A6%DC", "GB18030-2022 4"); + encode("\uFE14", "%A6%DD", "GB18030-2022 5"); + encode("\uFE15", "%A6%DE", "GB18030-2022 6"); + encode("\uFE16", "%A6%DF", "GB18030-2022 7"); + encode("\uFE17", "%A6%EC", "GB18030-2022 8"); + encode("\uFE18", "%A6%ED", "GB18030-2022 9"); + encode("\uFE19", "%A6%F3", "GB18030-2022 10"); + encode("\u9FB4", "%FEY", "GB18030-2022 11"); + encode("\u9FB5", "%FEa", "GB18030-2022 12"); + encode("\u9FB6", "%FEf", "GB18030-2022 13"); + encode("\u9FB7", "%FEg", "GB18030-2022 14"); + encode("\u9FB8", "%FEm", "GB18030-2022 15"); + encode("\u9FB9", "%FE~", "GB18030-2022 16"); + encode("\u9FBA", "%FE%90", "GB18030-2022 17"); + encode("\u9FBB", "%FE%A0", "GB18030-2022 18"); + encode("\uE78D", "%A6%D9", "GB18030-2022 19"); + encode("\uE78E", "%A6%DA", "GB18030-2022 20"); + encode("\uE78F", "%A6%DB", "GB18030-2022 21"); + encode("\uE790", "%A6%DC", "GB18030-2022 22"); + encode("\uE791", "%A6%DD", "GB18030-2022 23"); + encode("\uE792", "%A6%DE", "GB18030-2022 24"); + encode("\uE793", "%A6%DF", "GB18030-2022 25"); + encode("\uE794", "%A6%EC", "GB18030-2022 26"); + encode("\uE795", "%A6%ED", "GB18030-2022 27"); + encode("\uE796", "%A6%F3", "GB18030-2022 28"); + encode("\uE81E", "%FEY", "GB18030-2022 29"); + encode("\uE826", "%FEa", "GB18030-2022 30"); + encode("\uE82B", "%FEf", "GB18030-2022 31"); + encode("\uE82C", "%FEg", "GB18030-2022 32"); + encode("\uE832", "%FEm", "GB18030-2022 33"); + encode("\uE843", "%FE~", "GB18030-2022 34"); + encode("\uE854", "%FE%90", "GB18030-2022 35"); + encode("\uE864", "%FE%A0", "GB18030-2022 36"); diff --git a/test/fixtures/wpt/encoding/resources/utf-32-big-endian-nobom.xml b/test/fixtures/wpt/encoding/resources/utf-32-big-endian-nobom.xml index f51213c965ce74..f704501ccae0d6 100644 Binary files a/test/fixtures/wpt/encoding/resources/utf-32-big-endian-nobom.xml and b/test/fixtures/wpt/encoding/resources/utf-32-big-endian-nobom.xml differ diff --git a/test/fixtures/wpt/encoding/resources/utf-32-little-endian-nobom.xml b/test/fixtures/wpt/encoding/resources/utf-32-little-endian-nobom.xml index 64e6051a2c6b07..465f44df5f782a 100644 Binary files a/test/fixtures/wpt/encoding/resources/utf-32-little-endian-nobom.xml and b/test/fixtures/wpt/encoding/resources/utf-32-little-endian-nobom.xml differ diff --git a/test/fixtures/wpt/resource-timing/cached-image-gets-single-entry.html b/test/fixtures/wpt/resource-timing/cached-image-gets-single-entry.html index 2d8c4e2e83944d..bf71615f7b15c5 100644 --- a/test/fixtures/wpt/resource-timing/cached-image-gets-single-entry.html +++ b/test/fixtures/wpt/resource-timing/cached-image-gets-single-entry.html @@ -14,7 +14,7 @@

    Description

    Test that a reused resource only appears in the buffer once.