diff --git a/.circleci/config.yml b/.circleci/config.yml deleted file mode 100644 index 227603cbbd..0000000000 --- a/.circleci/config.yml +++ /dev/null @@ -1,30 +0,0 @@ -version: 2 -jobs: - e2e: - docker: - - image: cimg/go:1.22 # If you update this, update it in the Makefile too - environment: - # This version of TF will be downloaded before Atlantis is started. - # We do this instead of setting --default-tf-version because setting - # that flag starts the download asynchronously so we'd have a race - # condition. - # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp - TERRAFORM_VERSION: 1.7.4 - steps: - - checkout - - run: make build-service - # We don't run e2e tests on fork PRs because they don't have access to the secret env vars. - - run: if [ -z "${CIRCLE_PR_REPONAME}" ]; then ./scripts/e2e.sh; fi - -workflows: - version: 2 - branch: - jobs: - - e2e: - context: - - atlantis-e2e-tests - filters: - branches: - # Ignore fork PRs since they don't have access to - # the atlantis-e2e-tests context (and also doc PRs). - ignore: /(pull\/\d+)|(docs\/.*)/ diff --git a/.dockerignore b/.dockerignore index d9647e1977..523596ac26 100644 --- a/.dockerignore +++ b/.dockerignore @@ -1,5 +1,6 @@ * !cmd/ +!scripts/download-release.sh !server/ !testdrive/ !main.go diff --git a/.editorconfig b/.editorconfig new file mode 100644 index 0000000000..3f90f6406f --- /dev/null +++ b/.editorconfig @@ -0,0 +1,12 @@ +root = true + +[*] +charset = utf-8 +end_of_line = lf +trim_trailing_whitespace = true +insert_final_newline = true + +[*.md] +indent_style = space +indent_size = 3 +trim_trailing_whitespace = false diff --git a/.gitattributes b/.gitattributes index 60fad8a123..d56abbf304 100644 --- a/.gitattributes +++ b/.gitattributes @@ -1,11 +1,2 @@ # Set the default behavior, in case people don't have core.autocrlf set. -* text=auto - -# Explicitly declare text files you want to always be normalized and converted -# to native line endings on checkout. -*.go text -*.json text -*.yml text -*.yaml text -*.sh text -*.tf text +* text=auto eol=lf diff --git a/.github/labeler.yml b/.github/labeler.yml index 7d6cf75daf..6dd6741d81 100644 --- a/.github/labeler.yml +++ b/.github/labeler.yml @@ -14,7 +14,8 @@ docs: github-actions: - changed-files: - - any-glob-to-any-file: '.github/**' + - any-glob-to-any-file: + - '.github/workflows/*.yml' go: - changed-files: @@ -40,6 +41,10 @@ provider/gitlab: website: - changed-files: - - any-glob-to-any-file: 'runatlantis.io/.vuepress/**/*' + - any-glob-to-any-file: 'runatlantis.io/.vitepress/**/*' - any-glob-to-any-file: 'package.json' - - any-glob-to-any-file: 'pnpm-lock.yaml' + - any-glob-to-any-file: 'package-lock.json' + +blog: +- changed-files: + - any-glob-to-any-file: 'runatlantis.io/blog/**' diff --git a/.github/release.yml b/.github/release.yml index df2619416f..0029e2d496 100644 --- a/.github/release.yml +++ b/.github/release.yml @@ -5,6 +5,7 @@ changelog: - github-actions authors: - octocat + - renovate[bot] categories: - title: Breaking Changes 🛠 labels: @@ -15,9 +16,9 @@ changelog: - Semver-Minor - enhancement - feature - - title: Bug fixes + - title: Bug fixes 🐛 labels: - bug - - title: Other Changes + - title: Other Changes 🔄 labels: - "*" diff --git a/.github/renovate.json5 b/.github/renovate.json5 index e520313b2a..4b403d4fce 100644 --- a/.github/renovate.json5 +++ b/.github/renovate.json5 @@ -1,108 +1,117 @@ { extends: [ - "config:base", - "schedule:daily", + 'config:best-practices', + ':separateMultipleMajorReleases', + 'schedule:daily', ], - commitMessageSuffix: " in {{packageFile}}", + commitMessageSuffix: ' in {{packageFile}}', dependencyDashboardAutoclose: true, automerge: true, - baseBranches: ["main", "/^release\-.*/"], + baseBranches: [ + 'main', + '/^release-.*/', + ], platformAutomerge: true, - labels: ["dependencies"], + labels: [ + 'dependencies', + ], postUpdateOptions: [ - "gomodTidy", - "gomodUpdateImportPaths", - "pnpmDedupe", + 'gomodTidy', + 'gomodUpdateImportPaths', + 'npmDedupe', ], - // needed so e2e tests do not stomp over each other prHourlyLimit: 1, - lockFileMaintenance: { - enabled: true, - }, + minimumReleaseAge: '5 days', + osvVulnerabilityAlerts: true, vulnerabilityAlerts: { enabled: true, labels: [ - "security", + 'security', ], }, packageRules: [ - // For vuepress { - "matchPackageNames": ["vuepress", "@vuepress/client", "@vuepress/markdown", "@vuepress/utils"], - "groupName": "vuepress", - "allowedVersions": "!/pre.*$/", + "matchFileNames": ["package.json"], + "enabled": false }, - // e2e test depends on testing/Dockefile testing-image which has conftest specific version. - // to upgrade conftest versions, we need following PRs. - // 1. update testing/Dockerfile conftest version - // 2. update testing-env tag - // 3. update e2e conftest version - // This will allow conftest version updates in testing/Dockefile { - matchPaths: ["testing/**"], - matchPackagePatterns: ["conftest"], - additionalBranchPrefix: "{{baseDir}}-", - groupName: "conftest-testing", - /* - prBodyNotes: [ - ":warning: Upgrade testing-env conftest and then upgrade other conftest versions for e2e :warning:", + matchFileNames: [ + 'testing/**', + ], + matchPackagePatterns: [ + 'conftest', ], - */ + additionalBranchPrefix: '{{packageFileDir}}-', + groupName: 'conftest-testing', }, { - ignorePaths: ["testing/**"], - matchPackagePatterns: ["github-actions"], - groupName: "github-", + ignorePaths: [ + 'testing/**', + ], + matchPackagePatterns: [ + 'github-actions', + ], + groupName: 'github-', }, - /* - // This tag is currently latest so we can skip this check for now unless we need to pin it again. { - // we need to upgrade testing-env on ci quickly - matchPackageNames: ["ghcr.io/runatlantis/testing-env"], - groupName: "testing-env-ci-test", - schedule: ["every 1 hour after 00:00 and before 23:59 every day"], + matchDatasources: [ + 'docker', + ], + matchPackageNames: [ + 'node', + 'cimg/node', + ], + versioning: 'node', }, - */ { - // use LTS node version for node docker image - matchDatasources: ["docker"], - matchPackageNames: ["node", "cimg/node"], - versioning: "node", + matchPackageNames: [ + 'go', + 'golang', + ], + versioning: 'go', + groupName: 'go' }, ], - // https://docs.renovatebot.com/modules/manager/regex/ - regexManagers: [ + customManagers: [ { - fileMatch: ["(^|/)Dockerfile$", "(^|/)Dockerfile\\.[^/]*$"], + customType: 'regex', + fileMatch: [ + '(^|/)Dockerfile$', + '(^|/)Dockerfile\\.[^/]*$', + ], matchStrings: [ // example: - // renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp + // # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp // ENV DEFAULT_TERRAFORM_VERSION=x.x.x - "renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\sENV .*?_VERSION=(?.*)\\s", + // # renovate: datasource=github-releases depName=open-policy-agent/conftest + // ARG DEFAULT_CONFTEST_VERSION=x.x.x + "renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s(ARG|ENV) .*?_VERSION=(?.*)\\s", ], - versioningTemplate: "{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}", + versioningTemplate: '{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}', extractVersionTemplate: '^v(?\\d+\\.\\d+\\.\\d+)', }, { - fileMatch: [".*go$"], + customType: 'regex', + fileMatch: [ + '.*go$', + ], matchStrings: [ - // example: - // const ConftestVersion = "x.x.x" // renovate: datasource=github-releases depName=open-policy-agent/conftest - "\\sconst .*Version = \"(?.*)\"\\s// renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s", + '\\sconst .*Version = "(?.*)"\\s// renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s', ], - versioningTemplate: "{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}", + versioningTemplate: '{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}', extractVersionTemplate: '^v(?\\d+\\.\\d+\\.\\d+)', }, { - fileMatch: [".circleci/config.yml$"], + customType: 'regex', + fileMatch: [ + '^\\.github/workflows/[^/]+\\.ya?ml$', + 'Makefile$', + ], matchStrings: [ - // example: - // # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp - // TRRAFORM_VERSION: x.x.x - "renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s.*?_VERSION: (?.*)\\s", + 'renovate: datasource=(?.*?) depName=(?.*?)( versioning=(?.*?))?\\s.*?_VERSION: (?.*)\\s', ], - versioningTemplate: "{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}", + versioningTemplate: '{{#if versioning}}{{{versioning}}}{{else}}semver{{/if}}', extractVersionTemplate: '^v(?\\d+\\.\\d+\\.\\d+)', }, - ] + ], } diff --git a/.github/workflows/atlantis-image.yml b/.github/workflows/atlantis-image.yml index 157a5c1bb7..d755c595fa 100644 --- a/.github/workflows/atlantis-image.yml +++ b/.github/workflows/atlantis-image.yml @@ -6,12 +6,16 @@ on: - 'main' - 'release-**' tags: - - v*.*.* # stable release like, v0.19.2 - - v*.*.*-pre.* # pre release like, v0.19.0-pre.calendardate + - v*.*.* pull_request: branches: - 'main' - 'release-**' + types: + - opened + - reopened + - synchronize + - ready_for_review workflow_dispatch: concurrency: @@ -23,10 +27,10 @@ jobs: outputs: should-run-build: ${{ steps.changes.outputs.src == 'true' || startsWith(github.ref, 'refs/tags/') }} if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -44,7 +48,7 @@ jobs: strategy: matrix: image_type: [alpine, debian] - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 env: # Set docker repo to either the fork or the main repo where the branch exists DOCKER_REPO: ghcr.io/${{ github.repository }} @@ -52,26 +56,26 @@ jobs: PUSH: ${{ github.event_name != 'pull_request' && (github.ref == 'refs/heads/main' || startsWith(github.ref, 'refs/tags/')) }} steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 # Lint the Dockerfile first before setting anything up - name: Lint Dockerfile - uses: hadolint/hadolint-action@v3.1.0 + uses: hadolint/hadolint-action@54c9adbab1582c2ef04b2016b760714a4bfde3cf # v3.1.0 with: dockerfile: "Dockerfile" - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3 with: image: tonistiigi/binfmt:latest platforms: arm64,arm - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb # v3 # https://github.com/docker/build-push-action/issues/761#issuecomment-1575006515 with: driver-opts: | - image=moby/buildkit:v0.12.5 + image=moby/buildkit:v0.14.0 # release version is the name of the tag i.e. v0.10.0 # release version also has the image type appended i.e. v0.10.0-alpine @@ -81,7 +85,7 @@ jobs: # if it's v0.10.0 and debian, it will do v0.10.0-debian, latest-debian - name: Docker meta id: meta - uses: docker/metadata-action@v5 + uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5 env: SUFFIX: ${{ format('-{0}', matrix.image_type) }} with: @@ -113,7 +117,7 @@ jobs: # Suffix is not used here since there's no way to disable it above - name: Login to Packages Container registry - uses: docker/login-action@v3 + uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -126,7 +130,7 @@ jobs: - name: "Build ${{ env.PUSH == 'true' && 'and push' || '' }} ${{ env.DOCKER_REPO }} image" if: contains(fromJson('["push", "pull_request"]'), github.event_name) - uses: docker/build-push-action@v5 + uses: docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25 # v5 with: cache-from: type=gha cache-to: type=gha,mode=max @@ -143,6 +147,51 @@ jobs: labels: ${{ steps.meta.outputs.labels }} outputs: type=image,name=target,annotation-index.org.opencontainers.image.description=${{ fromJSON(steps.meta.outputs.json).labels['org.opencontainers.image.description'] }} + test: + needs: [changes] + if: needs.changes.outputs.should-run-build == 'true' + name: Test Image With Goss + runs-on: ubuntu-24.04 + strategy: + matrix: + image_type: [alpine, debian] + env: + # Set docker repo to either the fork or the main repo where the branch exists + DOCKER_REPO: ghcr.io/${{ github.repository }} + + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + + - name: Set up Docker Buildx + uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb # v3 + # https://github.com/docker/build-push-action/issues/761#issuecomment-1575006515 + with: + driver-opts: | + image=moby/buildkit:v0.14.0 + + - name: "Build and load into Docker" + if: contains(fromJson('["push", "pull_request"]'), github.event_name) + uses: docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25 # v5 + with: + cache-from: type=gha + cache-to: type=gha,mode=max + context: . + build-args: | + ATLANTIS_BASE_TAG_TYPE=${{ matrix.image_type }} + push: false + load: true + tags: "${{ env.DOCKER_REPO }}:goss-test" + target: ${{ matrix.image_type }} + + - name: "Setup Goss" + uses: e1himself/goss-installation-action@fbb6fb55d3e59c96045b2500eeb8ce0995d99ac1 # v1.2.1 + with: + version: "v0.4.7" + + - name: Execute Goss tests + run: | + dgoss run --rm ${{ env.DOCKER_REPO }}:goss-test bash -c 'while true; do sleep 1; done;' + skip-build: needs: [changes] if: needs.changes.outputs.should-run-build == 'false' @@ -150,6 +199,6 @@ jobs: strategy: matrix: image_type: [alpine, debian] - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - run: 'echo "No build required"' diff --git a/.github/workflows/codeql.yml b/.github/workflows/codeql.yml index 8be3df50f9..c27ec3f9ee 100644 --- a/.github/workflows/codeql.yml +++ b/.github/workflows/codeql.yml @@ -35,10 +35,10 @@ jobs: outputs: should-run-analyze: ${{ steps.changes.outputs.src == 'true' }} if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -50,7 +50,7 @@ jobs: needs: [changes] name: Analyze if: github.event.pull_request.draft == false && needs.changes.outputs.should-run-analyze == 'true' - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 permissions: actions: read contents: read @@ -67,11 +67,11 @@ jobs: steps: - name: Checkout repository - uses: actions/checkout@v4 + uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 # Initializes the CodeQL tools for scanning. - name: Initialize CodeQL - uses: github/codeql-action/init@v3 + uses: github/codeql-action/init@23acc5c183826b7a8a97bce3cecc52db901f8251 # v3 with: languages: ${{ matrix.language }} # If you wish to specify custom queries, you can do so here or in a config file. @@ -85,7 +85,7 @@ jobs: # Autobuild attempts to build any compiled languages (C/C++, C#, Go, or Java). # If this step fails, then you should remove it and run the build manually (see below) - name: Autobuild - uses: github/codeql-action/autobuild@v3 + uses: github/codeql-action/autobuild@23acc5c183826b7a8a97bce3cecc52db901f8251 # v3 # ℹī¸ Command-line programs to run using the OS shell. # 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun @@ -98,7 +98,7 @@ jobs: # ./location_of_script_within_repo/buildscript.sh - name: Perform CodeQL Analysis - uses: github/codeql-action/analyze@v3 + uses: github/codeql-action/analyze@23acc5c183826b7a8a97bce3cecc52db901f8251 # v3 with: category: "/language:${{matrix.language}}" @@ -109,6 +109,6 @@ jobs: strategy: matrix: language: [ 'go', 'javascript' ] - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - run: 'echo "No build required"' + - run: 'echo "No build required"' diff --git a/.github/workflows/labeler.yml b/.github/workflows/labeler.yml index aed089def0..a828fe3eb7 100644 --- a/.github/workflows/labeler.yml +++ b/.github/workflows/labeler.yml @@ -14,6 +14,6 @@ jobs: contents: read pull-requests: write if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/labeler@v5 + - uses: actions/labeler@8558fd74291d67161a8a78ce36a881fa63b766a9 # v5 diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml index 02ab6f7365..9c1653da61 100644 --- a/.github/workflows/lint.yml +++ b/.github/workflows/lint.yml @@ -15,15 +15,23 @@ concurrency: group: ${{ github.workflow }}-${{ github.head_ref || github.run_id }} cancel-in-progress: true +permissions: + # Required: allow read access to the content for analysis. + contents: read + # Optional: allow read access to pull request. Use with `only-new-issues` option. + pull-requests: read + # Optional: Allow write access to checks to allow the action to annotate code in the PR. + checks: write + jobs: changes: outputs: should-run-linting: ${{ steps.changes.outputs.go == 'true' }} if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -37,24 +45,25 @@ jobs: needs: [changes] if: github.event.pull_request.draft == false && needs.changes.outputs.should-run-linting == 'true' name: Linting - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 # need to setup go toolchain explicitly - - uses: actions/setup-go@v5 + - uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5 with: go-version-file: go.mod - name: golangci-lint - uses: reviewdog/action-golangci-lint@v2 + uses: golangci/golangci-lint-action@a4f60bb28d35aeee14e6880718e0c85ff1882e64 # v6 with: - tool_name: golangci-lint + # renovate: datasource=github-releases depName=golangci/golangci-lint + version: v1.59.1 skip-lint: needs: [changes] if: needs.changes.outputs.should-run-linting == 'false' name: Linting - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - run: 'echo "No build required"' diff --git a/.github/workflows/pr-lint.yml b/.github/workflows/pr-lint.yml index e3a5b647ac..47909ecb8b 100644 --- a/.github/workflows/pr-lint.yml +++ b/.github/workflows/pr-lint.yml @@ -13,8 +13,8 @@ permissions: jobs: main: name: Validate PR title - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: amannn/action-semantic-pull-request@v5 + - uses: amannn/action-semantic-pull-request@e32d7e603df1aa1ba07e981f2a23455dee596825 # v5 env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/pr-size-labeler.yml b/.github/workflows/pr-size-labeler.yml new file mode 100644 index 0000000000..8576c5c141 --- /dev/null +++ b/.github/workflows/pr-size-labeler.yml @@ -0,0 +1,28 @@ +name: pr-size + +on: [pull_request] + +jobs: + labeler: + runs-on: ubuntu-latest + name: Label the PR size + steps: + - uses: codelytv/pr-size-labeler@54ef36785e9f4cb5ecf1949cfc9b00dbb621d761 # v1 + with: + GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} + xs_label: 'size/xs' + xs_max_size: '10' + s_label: 'size/s' + s_max_size: '200' + m_label: 'size/m' + m_max_size: '1000' + l_label: 'size/l' + l_max_size: '10000' + xl_label: 'size/xl' + fail_if_xl: 'false' + message_if_xl: > + This PR exceeds the recommended size of 1000 lines. + Please make sure you are NOT addressing multiple issues with one PR. + Note this PR might be rejected due to its size. + github_api_url: 'https://api.github.com' + files_to_ignore: '' diff --git a/.github/workflows/release.yml b/.github/workflows/release.yml index 479e404f69..b238df4884 100644 --- a/.github/workflows/release.yml +++ b/.github/workflows/release.yml @@ -3,30 +3,30 @@ name: release on: push: tags: - - v*.*.* # stable release like, v0.19.2 - - v*.*.*-pre.* # pre release like, v0.19.0-pre.calendardate + - v*.*.* workflow_dispatch: jobs: goreleaser: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 with: submodules: true - - uses: actions/setup-go@v5 + - uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5 with: go-version-file: go.mod - name: Run GoReleaser for stable release - uses: goreleaser/goreleaser-action@v5 + uses: goreleaser/goreleaser-action@286f3b13b1b49da4ac219696163fb8c1c93e1200 # v6 if: (!contains(github.ref, 'pre')) with: - version: v1.16.2 # You can pass flags to goreleaser via GORELEASER_ARGS # --clean will save you deleting the dist dir args: release --clean + distribution: goreleaser # or 'goreleaser-pro' + version: "~> v2" # or 'latest', 'nightly', semver env: GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} @@ -41,32 +41,3 @@ jobs: -q .body > tmp-CHANGELOG.md env: GITHUB_TOKEN: ${{secrets.GITHUB_TOKEN}} - - - name: Run GoReleaser for pre-release - uses: goreleaser/goreleaser-action@v5 - if: contains(github.ref, 'pre') - with: - version: v1.16.2 - # You can pass flags to goreleaser via GORELEASER_ARGS - # --clean will save you deleting the dist dir - args: release --clean --release-notes=tmp-CHANGELOG.md - env: - GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }} - GORELEASER_CURRENT_TAG: ${{ steps.changelog.outputs.RELEASE_TAG }} - - homebrew: - name: "Bump Homebrew formula" - runs-on: ubuntu-22.04 - if: false - # if: (!contains(github.ref, 'pre')) - steps: - - uses: mislav/bump-homebrew-formula-action@v2 - with: - # A PR will be sent to github.com/Homebrew/homebrew-core to update this formula: - formula-name: atlantis - commit-message: | - {{formulaName}} {{version}} - - Created by https://github.com/mislav/bump-homebrew-formula-action - env: - COMMITTER_TOKEN: ${{ secrets.GITHUB_TOKEN }} diff --git a/.github/workflows/renovate-config.yml b/.github/workflows/renovate-config.yml index bb5258df99..294db488bf 100644 --- a/.github/workflows/renovate-config.yml +++ b/.github/workflows/renovate-config.yml @@ -14,8 +14,8 @@ on: jobs: validate: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: actions/setup-node@v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4 - run: npx --package renovate -c 'renovate-config-validator' diff --git a/.github/workflows/stale.yml b/.github/workflows/stale.yml index c989d76963..85e89159e4 100644 --- a/.github/workflows/stale.yml +++ b/.github/workflows/stale.yml @@ -4,9 +4,9 @@ on: - cron: '30 1 * * *' jobs: stale: - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/stale@v9 + - uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9 with: stale-pr-message: 'This issue is stale because it has been open for 1 month with no activity. Remove stale label or comment or this will be closed in 1 month.' stale-issue-message: This issue is stale because it has been open for 1 month with no activity. Remove stale label or comment or this will be closed in 1 month.' diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index fab30b3b31..685408fd39 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -24,39 +24,41 @@ jobs: outputs: should-run-tests: ${{ steps.changes.outputs.go == 'true' }} if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | go: - '**.go' + - '**.txt' # golden file test output - 'go.*' - '.github/workflows/test.yml' test: needs: [changes] if: needs.changes.outputs.should-run-tests == 'true' name: Tests - runs-on: ubuntu-22.04 - container: ghcr.io/runatlantis/testing-env:latest + runs-on: ubuntu-24.04 + container: ghcr.io/runatlantis/testing-env:latest@sha256:82a8cfe34cbf879006bfb3f4252bbeb031291212be0c689c1cc230fc395d0fbf steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 # need to setup go toolchain explicitly - - uses: actions/setup-go@v5 + - uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5 with: go-version-file: go.mod - run: make test-all - run: make check-fmt + ########################################################### # Notifying #contributors about test failure on main branch ########################################################### - name: Slack failure notification if: ${{ github.ref == 'refs/heads/main' && failure() }} - uses: slackapi/slack-github-action@6c661ce58804a1a20f6dc5fbee7f0381b469e001 # v1.25.0 + uses: slackapi/slack-github-action@70cd7be8e40a46e8b0eced40b0de447bdb42f68e # v1.26.0 with: payload: | { @@ -95,6 +97,44 @@ jobs: needs: [changes] if: needs.changes.outputs.should-run-tests == 'false' name: Tests - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - run: 'echo "No build required"' + + e2e-github: + runs-on: ubuntu-latest + # dont run e2e tests on forked PRs + if: github.event.pull_request.head.repo.fork == false + env: + TERRAFORM_VERSION: 1.8.3 + ATLANTISBOT_GITHUB_USERNAME: ${{ secrets.ATLANTISBOT_GITHUB_USERNAME }} + ATLANTISBOT_GITHUB_TOKEN: ${{ secrets.ATLANTISBOT_GITHUB_TOKEN }} + NGROK_AUTH_TOKEN: ${{ secrets.ATLANTISBOT_NGROK_AUTH_TOKEN }} + steps: + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: actions/setup-go@cdcb36043654635271a94b9a6d1392de5bb323a7 # v5 + with: + go-version-file: go.mod + + # This version of TF will be downloaded before Atlantis is started. + # We do this instead of setting --default-tf-version because setting + # that flag starts the download asynchronously so we'd have a race + # condition. + - uses: hashicorp/setup-terraform@651471c36a6092792c552e8b1bef71e592b462d8 # v3 + with: + terraform_version: ${{ env.TERRAFORM_VERSION }} + + - name: Setup ngrok + run: | + wget -q -nc https://bin.equinox.io/c/bNyj1mQVY4c/ngrok-v3-stable-linux-amd64.tgz + tar -xzf ngrok-v3-stable-linux-amd64.tgz && \ + chmod +x ngrok + ./ngrok --help + - name: Setup gitconfig + run: | + git config --global user.email "maintainers@runatlantis.io" + git config --global user.name "atlantisbot" + + - run: | + make build-service + ./scripts/e2e.sh diff --git a/.github/workflows/testing-env-image.yml b/.github/workflows/testing-env-image.yml index 0cf8d5ecf2..454b16fa5a 100644 --- a/.github/workflows/testing-env-image.yml +++ b/.github/workflows/testing-env-image.yml @@ -20,10 +20,10 @@ jobs: outputs: should-run-build: ${{ steps.changes.outputs.src == 'true' }} if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | @@ -35,21 +35,21 @@ jobs: needs: [changes] if: needs.changes.outputs.should-run-build == 'true' name: Build Testing Env Image - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 - name: Set up QEMU - uses: docker/setup-qemu-action@v3 + uses: docker/setup-qemu-action@68827325e0b33c7199eb31dd4e31fbe9023e06e3 # v3 with: image: tonistiigi/binfmt:latest platforms: arm64,arm - name: Set up Docker Buildx - uses: docker/setup-buildx-action@v3 + uses: docker/setup-buildx-action@d70bba72b1f3fd22344832f00baa16ece964efeb # v3 - name: Login to Packages Container registry - uses: docker/login-action@v3 + uses: docker/login-action@0d4c9c5ea7693da7b068278f7b52bda2a190a446 # v3 with: registry: ghcr.io username: ${{ github.actor }} @@ -57,7 +57,7 @@ jobs: - run: echo "TODAY=$(date +"%Y.%m.%d")" >> $GITHUB_ENV - name: Build and push testing-env:${{env.TODAY}} image - uses: docker/build-push-action@v5 + uses: docker/build-push-action@ca052bb54ab0790a636c9b5f226502c73d547a25 # v5 with: cache-from: type=gha cache-to: type=gha,mode=max @@ -72,6 +72,6 @@ jobs: needs: [changes] if: needs.changes.outputs.should-run-build == 'false' name: Build Testing Env Image - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - run: 'echo "No build required"' \ No newline at end of file + - run: 'echo "No build required"' diff --git a/.github/workflows/website.yml b/.github/workflows/website.yml index 8d58751deb..147b6c4aef 100644 --- a/.github/workflows/website.yml +++ b/.github/workflows/website.yml @@ -24,16 +24,16 @@ jobs: outputs: should-run-link-check: ${{ steps.changes.outputs.src == 'true' }} if: github.event.pull_request.draft == false - runs-on: ubuntu-22.04 + runs-on: ubuntu-24.04 steps: - - uses: actions/checkout@v4 - - uses: dorny/paths-filter@v3 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 + - uses: dorny/paths-filter@de90cc6fb38fc0963ad72b210f1f284cd68cea36 # v3 id: changes with: filters: | src: - '**.js' - - 'pnpm-lock.yaml' + - 'package-lock.json' - 'package.json' - '.github/workflows/website.yml' @@ -46,36 +46,53 @@ jobs: name: Website Link Check runs-on: ubuntu-latest steps: - - uses: actions/checkout@v4 + - uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4 - - uses: wyvox/action-setup-pnpm@v3 + - name: markdown-lint + uses: DavidAnson/markdownlint-cli2-action@b4c9feab76d8025d1e83c653fa3990936df0e6c8 # v16 with: - node-version: 20 + config: .markdownlint.yaml + globs: 'runatlantis.io/**/*.md' + + - name: setup npm + uses: actions/setup-node@60edb5dd545a775178f52524783378180af0d1f8 # v4 + with: + node-version: '20' + cache: 'npm' - name: run http-server env: # renovate: datasource=github-releases depName=raviqqe/muffet - MUFFET_VERSION: 2.9.3 + MUFFET_VERSION: 2.10.1 run: | # install raviqqe/muffet to check for broken links. curl -Ls https://github.com/raviqqe/muffet/releases/download/v${MUFFET_VERSION}/muffet_linux_amd64.tar.gz | tar -xz # build site - pnpm install - pnpm website:build + npm install + npm run website:build # start http-server for integration testing - npx http-server runatlantis.io/.vuepress/dist & + npx http-server runatlantis.io/.vitepress/dist & + + - name: Run Playwright E2E tests + run: | + npx playwright install --with-deps + npm run e2e - name: wait until server listened run: curl --retry-delay 1 --retry 30 --retry-all-error http://localhost:8080 # medium.com => was being rate limited: HTTP 429 + # twitter.com => too many redirections - run: | ./muffet \ -e 'https://medium.com/runatlantis' \ + -e 'https://dev.to/*' \ + -e 'https://twitter.com/*' \ -e 'https://github\.com/runatlantis/atlantis/edit/main/.*' \ -e 'https://github.com/runatlantis/helm-charts#customization' \ + -e 'https://github.com/sethvargo/atlantis-on-gke/blob/master/terraform/tls.tf#L64-L84' \ -e 'https://confluence.atlassian.com/*' \ --header 'Accept-Encoding:deflate, gzip' \ --buffer-size 8192 \ diff --git a/.gitignore b/.gitignore index a3040a1ee5..fde423fb0f 100644 --- a/.gitignore +++ b/.gitignore @@ -8,7 +8,6 @@ output .cover .terraform/ node_modules/ -**/.vuepress/* helm/test-values.yaml *.swp golangci-lint @@ -16,7 +15,6 @@ atlantis .devcontainer atlantis.env *.act -package-lock.json Dockerfile.local # gitreleaser @@ -27,3 +25,14 @@ tmp-CHANGELOG.md # IDE files *.code-workspace + +# draw.io backup files +*.bkp + +# VitePress build output & cache directory +**/.vitepress/cache +**/.vitepress/dist +**/.vitepress/config.ts.timestamp-* + +# playwright +test-results/ diff --git a/.golangci.yml b/.golangci.yml index b4c6d83c26..0afa70118c 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -19,14 +19,14 @@ linters: - gofmt - gosec - gosimple + - govet - ineffassign - misspell - revive - staticcheck + - testifylint - typecheck - unconvert - unused - - vet - - vetshadow run: timeout: 10m diff --git a/.goreleaser.yml b/.goreleaser.yml index b937a3da07..52d450ba21 100644 --- a/.goreleaser.yml +++ b/.goreleaser.yml @@ -1,15 +1,27 @@ +version: 2 + env: - CGO_ENABLED=0 + builds: - - targets: - - darwin_amd64 - - darwin_arm64 - - linux_386 - - linux_amd64 - - linux_arm - - linux_arm64 - - windows_386 - - windows_amd64 + - id: atlantis + + targets: + - darwin_amd64 + - darwin_arm64 + - linux_386 + - linux_amd64 + - linux_arm + - linux_arm64 + - windows_386 + - windows_amd64 + + flags: + - -trimpath + + ldflags: + - -s -w + - -X main.version={{.Version}} -X main.commit={{.Commit}} -X main.date={{.Date}} archives: - id: zip @@ -19,10 +31,10 @@ archives: - none* checksum: - name_template: 'checksums.txt' + name_template: "checksums.txt" changelog: - skip: true + disable: true release: # If set to true, will not auto-publish the release. diff --git a/.markdownlint.yaml b/.markdownlint.yaml new file mode 100644 index 0000000000..9f4f9cacdc --- /dev/null +++ b/.markdownlint.yaml @@ -0,0 +1,38 @@ +# MD013/line-length +# +# We're not particular about line length, generally preferring longer +# lines, since tools like Grammarly and other writing assistance tools +# work best with "normal" lines not broken up arbitrary. +# +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md013.md +MD013: false + +# MD033/no-inline-html +# +# We're fine with inline HTML, there are lots of valid VitePress features +# that depends on this. +# +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md033.md +MD033: false + +# MD024/no-duplicate-heading +# +# VitePress do not follow GitHub heading styling, so duplicate headlines +# are fine as long as they are not siblings (aka same indention hierarchy) +# +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md024.md +MD024: + siblings_only: true + +# MD051/link-fragments +# +# VitePress generate these differently that markdownlint expects, so disabling +# for now, and something to improve on later (cc @jippi) +# +# https://github.com/DavidAnson/markdownlint/blob/main/doc/md051.md +MD051: false + +# for blog posts +MD025: false +MD045: false +MD001: false diff --git a/.node-version b/.node-version index 2dbbe00e67..48b14e6b2b 100644 --- a/.node-version +++ b/.node-version @@ -1 +1 @@ -20.11.1 +20.14.0 diff --git a/.tool-versions b/.tool-versions index 69031ad023..74dac79e69 100644 --- a/.tool-versions +++ b/.tool-versions @@ -1 +1 @@ -pnpm 8.15.4 +npm v10.7.0 diff --git a/CODE_OF_CONDUCT.md b/CODE_OF_CONDUCT.md index 819b7f5a8c..5a1cdca8b5 100644 --- a/CODE_OF_CONDUCT.md +++ b/CODE_OF_CONDUCT.md @@ -55,8 +55,9 @@ further defined and clarified by project maintainers. ## Enforcement Instances of abusive, harassing, or otherwise unacceptable behavior may be -reported by contacting the project team by messaging `@lkysow` on the [Atlantis Slack channel](https://join.slack.com/t/atlantis-community/shared_invite/zt-1nt7yx7uq-AnVRc_JItF1CDwZtfqv_OA). All -complaints will be reviewed and investigated and will result in a response that +reported by contacting the project team by messaging `@lkysow` on the +[Atlantis Slack community](https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw). +All complaints will be reviewed and investigated and will result in a response that is deemed necessary and appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately. diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md index 3bd4290095..c64cde6e5e 100644 --- a/CONTRIBUTING.md +++ b/CONTRIBUTING.md @@ -1,9 +1,24 @@ -# Topics -* [Reporting Issues](#reporting-issues) -* [Reporting Security Issues](#reporting-security-issues) -* [Updating The Website](#updating-the-website) -* [Developing](#developing) -* [Releasing](#creating-a-new-release) +# Contributing + +# Table of Contents +- [Reporting Issues](#reporting-issues) +- [Reporting Security Issues](#reporting-security-issues) +- [Updating The Website](#updating-the-website) +- [Developing](#developing) + - [Running Atlantis Locally](#running-atlantis-locally) + - [Running Atlantis With Local Changes](#running-atlantis-with-local-changes) + - [Rebuilding](#rebuilding) + - [Running Tests Locally](#running-tests-locally) + - [Running Tests In Docker](#running-tests-in-docker) + - [Calling Your Local Atlantis From GitHub](#calling-your-local-atlantis-from-github) + - [Code Style](#code-style) + - [Logging](#logging) + - [Errors](#errors) + - [Testing](#testing) + - [Mocks](#mocks) +- [Backporting Fixes](#backporting-fixes) + - [Manual Backporting Fixes](#manual-backporting-fixes) +- [Creating a New Release](#creating-a-new-release) # Reporting Issues * When reporting issues, please include the output of `atlantis version`. @@ -14,7 +29,7 @@ We take security issues seriously. Please report a security vulnerability to the maintainers using [private vulnerability reporting](https://github.com/runatlantis/atlantis/security/advisories/new). # Updating The Website -* To view the generated website locally, run `pnpm website:dev` and then +* To view the generated website locally, run `npm website:dev` and then open your browser to http://localhost:8080. * The website will be regenerated when your pull request is merged to main. @@ -23,11 +38,11 @@ open your browser to http://localhost:8080. ## Running Atlantis Locally * Clone the repo from https://github.com/runatlantis/atlantis/ * Compile Atlantis: - ``` + ```sh go install ``` * Run Atlantis: - ``` + ```sh atlantis server --gh-user --gh-token --repo-allowlist --gh-webhook-secret --log-level debug ``` If you get an error like `command not found: atlantis`, ensure that `$GOPATH/bin` is in your `$PATH`. @@ -36,43 +51,46 @@ open your browser to http://localhost:8080. Docker compose is set up to start an atlantis container and ngrok container in the same network in order to expose the atlantis instance to the internet. In order to do this, create a file in the repository called `atlantis.env` and add the required env vars for the atlantis server configuration. e.g. -``` + +```sh +NGROK_AUTH=1234567890 + ATLANTIS_GH_APP_ID=123 ATLANTIS_GH_APP_KEY_FILE="/.ssh/somekey.pem" ATLANTIS_GH_WEBHOOK_SECRET=12345 ``` -Note: `~/.ssh` is mounted to allow for referencing any local ssh keys +Note: `~/.ssh` is mounted to allow for referencing any local ssh keys. Following this just run: -``` +```sh make build-service -docker-compose up +docker-compose up --detach +docker-compose logs --follow ``` ### Rebuilding - If the ngrok container is restarted, the url will change which is a hassle. Fortunately, when we make a code change, we can rebuild and restart the atlantis container easily without disrupting ngrok. e.g. -``` +```sh make build-service docker-compose up --detach --build ``` -## Running Tests Locally: - +## Running Tests Locally `make test`. If you want to run the integration tests that actually run real `terraform` commands, run `make test-all`. -## Running Tests In Docker: -``` +## Running Tests In Docker +```sh docker run --rm -v $(pwd):/go/src/github.com/runatlantis/atlantis -w /go/src/github.com/runatlantis/atlantis ghcr.io/runatlantis/testing-env:latest make test ``` Or to run the integration tests -``` + +```sh docker run --rm -v $(pwd):/go/src/github.com/runatlantis/atlantis -w /go/src/github.com/runatlantis/atlantis ghcr.io/runatlantis/testing-env:latest make test-all ``` @@ -80,18 +98,19 @@ docker run --rm -v $(pwd):/go/src/github.com/runatlantis/atlantis -w /go/src/git - Create a test terraform repository in your GitHub. - Create a personal access token for Atlantis. See [Create a GitHub token](https://github.com/runatlantis/atlantis/tree/main/runatlantis.io/docs/access-credentials.md#generating-an-access-token). - Start Atlantis in server mode using that token: -``` +```sh atlantis server --gh-user --gh-token --repo-allowlist --gh-webhook-secret --log-level debug ``` - Download ngrok from https://ngrok.com/download. This will enable you to expose Atlantis running on your laptop to the internet so GitHub can call it. - When you've downloaded and extracted ngrok, run it on port `4141`: -``` +```sh ngrok http 4141 ``` - Create a Webhook in your repo and use the `https` url that `ngrok` printed out after running `ngrok http 4141`. Be sure to append `/events` so your webhook url looks something like `https://efce3bcd.ngrok.io/events`. See [Add GitHub Webhook](https://github.com/runatlantis/atlantis/blob/main/runatlantis.io/docs/configuring-webhooks.md#configuring-webhooks). - Create a pull request and type `atlantis help`. You should see the request in the `ngrok` and Atlantis logs and you should also see Atlantis comment back. ## Code Style + ### Logging - `ctx.Log` should be available in most methods. If not, pass it down. - levels: @@ -161,12 +180,11 @@ go get github.com/petergtz/pegomock/... ``` # Backporting Fixes - Atlantis now uses a [cherry-pick-bot](https://github.com/googleapis/repo-automation-bots/tree/main/packages/cherry-pick-bot) from Google. The bot assists in maintaining changes across releases branches by easily cherry-picking changes via pull requests. Maintainers and Core Contributors can add a comment to a pull request: -``` +```sh /cherry-pick target-branch-name ``` @@ -175,7 +193,6 @@ target-branch-name is the branch to cherry-pick to. cherry-pick-bot will cherry- The bot will immediately try to cherry-pick a merged PR. On unmerged pull request, it will not do anything immediately, but wait until merge. You can comment multiple times on a PR for multiple release branches. ## Manual Backporting Fixes - The bot will fail to cherry-pick if the feature branches' git history is not linear (merge commits instead of rebase). In that case, you will need to manually cherry-pick the squashed merged commit from main to the release branch 1. Switch to the release branch intended for the fix. diff --git a/Dockerfile b/Dockerfile index 49341b8bc5..35704e6523 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,16 +1,19 @@ -# syntax=docker/dockerfile:1 +# syntax=docker/dockerfile:1@sha256:a57df69d0ea827fb7266491f2813635de6f17269be881f696fbfdf2d83dda33e # what distro is the image being built for -ARG ALPINE_TAG=3.19.1 -ARG DEBIAN_TAG=12.5-slim +ARG ALPINE_TAG=3.20.0@sha256:77726ef6b57ddf65bb551896826ec38bc3e53f75cdde31354fbffb4f25238ebd +ARG DEBIAN_TAG=12.5-slim@sha256:67f3931ad8cb1967beec602d8c0506af1e37e8d73c2a0b38b181ec5d8560d395 +ARG GOLANG_TAG=1.22.4-alpine@sha256:6522f0ca555a7b14c46a2c9f50b86604a234cdc72452bf6a268cae6461d9000b # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp -ARG DEFAULT_TERRAFORM_VERSION=1.7.2 +ARG DEFAULT_TERRAFORM_VERSION=1.8.5 +# renovate: datasource=github-releases depName=opentofu/opentofu versioning=hashicorp +ARG DEFAULT_OPENTOFU_VERSION=1.7.2 # renovate: datasource=github-releases depName=open-policy-agent/conftest -ARG DEFAULT_CONFTEST_VERSION=0.49.1 +ARG DEFAULT_CONFTEST_VERSION=0.52.0 # Stage 1: build artifact and download deps -FROM golang:1.22.1-alpine AS builder +FROM golang:${GOLANG_TAG} AS builder ARG ATLANTIS_VERSION=dev ENV ATLANTIS_VERSION=${ATLANTIS_VERSION} @@ -68,7 +71,6 @@ ARG TARGETPLATFORM WORKDIR /tmp/build # install conftest -# renovate: datasource=github-releases depName=open-policy-agent/conftest ARG DEFAULT_CONFTEST_VERSION ENV DEFAULT_CONFTEST_VERSION=${DEFAULT_CONFTEST_VERSION} SHELL ["/bin/bash", "-o", "pipefail", "-c"] @@ -106,31 +108,26 @@ RUN case ${TARGETPLATFORM} in \ git-lfs --version # install terraform binaries -# renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp ARG DEFAULT_TERRAFORM_VERSION ENV DEFAULT_TERRAFORM_VERSION=${DEFAULT_TERRAFORM_VERSION} +ARG DEFAULT_OPENTOFU_VERSION +ENV DEFAULT_OPENTOFU_VERSION=${DEFAULT_OPENTOFU_VERSION} + +# COPY scripts/download-release.sh . +COPY --from=builder /app/scripts/download-release.sh download-release.sh # In the official Atlantis image, we only have the latest of each Terraform version. # Each binary is about 80 MB so we limit it to the 4 latest minor releases or fewer -RUN AVAILABLE_TERRAFORM_VERSIONS="1.4.7 1.5.7 1.6.6 ${DEFAULT_TERRAFORM_VERSION}" && \ - case "${TARGETPLATFORM}" in \ - "linux/amd64") TERRAFORM_ARCH=amd64 ;; \ - "linux/arm64") TERRAFORM_ARCH=arm64 ;; \ - "linux/arm/v7") TERRAFORM_ARCH=arm ;; \ - *) echo "ERROR: 'TARGETPLATFORM' value expected: ${TARGETPLATFORM}"; exit 1 ;; \ - esac && \ - for VERSION in ${AVAILABLE_TERRAFORM_VERSIONS}; do \ - curl -LOs "https://releases.hashicorp.com/terraform/${VERSION}/terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip" && \ - curl -LOs "https://releases.hashicorp.com/terraform/${VERSION}/terraform_${VERSION}_SHA256SUMS" && \ - sed -n "/terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip/p" "terraform_${VERSION}_SHA256SUMS" | sha256sum -c && \ - mkdir -p "/usr/local/bin/tf/versions/${VERSION}" && \ - unzip "terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip" -d "/usr/local/bin/tf/versions/${VERSION}" && \ - ln -s "/usr/local/bin/tf/versions/${VERSION}/terraform" "/usr/local/bin/terraform${VERSION}" && \ - rm "terraform_${VERSION}_linux_${TERRAFORM_ARCH}.zip" && \ - rm "terraform_${VERSION}_SHA256SUMS"; \ - done && \ - ln -s "/usr/local/bin/tf/versions/${DEFAULT_TERRAFORM_VERSION}/terraform" /usr/local/bin/terraform - +RUN ./download-release.sh \ + "terraform" \ + "${TARGETPLATFORM}" \ + "${DEFAULT_TERRAFORM_VERSION}" \ + "1.5.7 1.6.6 1.7.5 ${DEFAULT_TERRAFORM_VERSION}" \ + && ./download-release.sh \ + "tofu" \ + "${TARGETPLATFORM}" \ + "${DEFAULT_OPENTOFU_VERSION}" \ + "${DEFAULT_OPENTOFU_VERSION}" # Stage 2 - Alpine # Creating the individual distro builds using targets @@ -150,7 +147,8 @@ RUN addgroup atlantis && \ # copy atlantis binary COPY --from=builder /app/atlantis /usr/local/bin/atlantis # copy terraform binaries -COPY --from=deps /usr/local/bin/terraform* /usr/local/bin/ +COPY --from=deps /usr/local/bin/terraform/terraform* /usr/local/bin/ +COPY --from=deps /usr/local/bin/tofu/tofu* /usr/local/bin/ # copy dependencies COPY --from=deps /usr/local/bin/conftest /usr/local/bin/conftest COPY --from=deps /usr/bin/git-lfs /usr/bin/git-lfs @@ -159,7 +157,7 @@ COPY docker-entrypoint.sh /usr/local/bin/docker-entrypoint.sh # Install packages needed to run Atlantis. # We place this last as it will bust less docker layer caches when packages update RUN apk add --no-cache \ - ca-certificates~=20230506 \ + ca-certificates~=20240226-r0 \ curl~=8 \ git~=2 \ unzip~=6 \ @@ -168,7 +166,6 @@ RUN apk add --no-cache \ dumb-init~=1 \ gcompat~=1 - # Set the entry point to the atlantis user and run the atlantis command USER atlantis ENTRYPOINT ["docker-entrypoint.sh"] @@ -190,7 +187,8 @@ RUN useradd --create-home --user-group --shell /bin/bash atlantis && \ # copy atlantis binary COPY --from=builder /app/atlantis /usr/local/bin/atlantis # copy terraform binaries -COPY --from=deps /usr/local/bin/terraform* /usr/local/bin/ +COPY --from=deps /usr/local/bin/terraform/terraform* /usr/local/bin/ +COPY --from=deps /usr/local/bin/tofu/tofu* /usr/local/bin/ # copy dependencies COPY --from=deps /usr/local/bin/conftest /usr/local/bin/conftest COPY --from=deps /usr/bin/git-lfs /usr/bin/git-lfs diff --git a/Dockerfile.dev b/Dockerfile.dev index f85a5555e2..d5fc7ad46f 100644 --- a/Dockerfile.dev +++ b/Dockerfile.dev @@ -1,3 +1,3 @@ -FROM ghcr.io/runatlantis/atlantis:latest +FROM ghcr.io/runatlantis/atlantis:latest@sha256:b35c839a0d4673f0aaa99bfa5176e7d62c79812a3f311d179393aae3c7acc2d8 COPY atlantis /usr/local/bin/atlantis WORKDIR /atlantis/src diff --git a/Makefile b/Makefile index 1678ef588e..620e01bf95 100644 --- a/Makefile +++ b/Makefile @@ -6,6 +6,9 @@ IMAGE_NAME := runatlantis/atlantis .DEFAULT_GOAL := help +# renovate: datasource=github-releases depName=golangci/golangci-lint +GOLANGCI_LINT_VERSION := v1.59.1 + .PHONY: help help: ## List targets & descriptions @cat Makefile* | grep -E '^[a-zA-Z\/_-]+:.*?## .*$$' | sort | awk 'BEGIN {FS = ":.*?## "}; {printf "\033[36m%-30s\033[0m %s\n", $$1, $$2}' @@ -93,12 +96,12 @@ lint: ## Run linter locally .PHONY: check-lint check-lint: ## Run linter in CI/CD. If running locally use 'lint' - curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b ./bin v1.49.0 + curl -sfL https://raw.githubusercontent.com/golangci/golangci-lint/master/install.sh | sh -s -- -b ./bin $(GOLANGCI_LINT_VERSION) ./bin/golangci-lint run -j 4 --timeout 5m .PHONY: check-fmt check-fmt: ## Fail if not formatted - if [[ $$(goimports -l $$(find . -type f -name '*.go' ! -path "./vendor/*" ! -path "**/mocks/*")) ]]; then exit 1; fi + ./scripts/fmt.sh .PHONY: end-to-end-deps end-to-end-deps: ## Install e2e dependencies @@ -110,4 +113,4 @@ end-to-end-tests: ## Run e2e tests .PHONY: website-dev website-dev: ## Run runatlantic.io on localhost:8080 - pnpm website:dev + npm website:dev diff --git a/README.md b/README.md index 6471847770..fa9d26a6a8 100644 --- a/README.md +++ b/README.md @@ -1,15 +1,14 @@ # Atlantis [![Latest Release](https://img.shields.io/github/release/runatlantis/atlantis.svg)](https://github.com/runatlantis/atlantis/releases/latest) -[![SuperDopeBadge](./runatlantis.io/.vuepress/public/hightower-super-dope.svg)](https://twitter.com/kelseyhightower/status/893260922222813184) +[![SuperDopeBadge](./runatlantis.io/public/hightower-super-dope.svg)](https://twitter.com/kelseyhightower/status/893260922222813184) [![Go Report Card](https://goreportcard.com/badge/github.com/runatlantis/atlantis)](https://goreportcard.com/report/github.com/runatlantis/atlantis) [![Go Reference](https://pkg.go.dev/badge/github.com/runatlantis/atlantis.svg)](https://pkg.go.dev/github.com/runatlantis/atlantis) -[![codecov](https://codecov.io/gh/runatlantis/atlantis/branch/main/graph/badge.svg)](https://codecov.io/gh/runatlantis/atlantis) [![CircleCI](https://circleci.com/gh/runatlantis/atlantis/tree/main.svg?style=shield)](https://circleci.com/gh/runatlantis/atlantis/tree/main) -[![Slack](https://img.shields.io/badge/Join-Atlantis%20Community%20Slack-red)](https://join.slack.com/t/atlantis-community/shared_invite/zt-1nt7yx7uq-AnVRc_JItF1CDwZtfqv_OA) +[![Slack](https://img.shields.io/badge/Join-Atlantis%20Community%20Slack-red)](https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw)

- Atlantis Logo

+ Atlantis Logo

Terraform Pull Request Automation

@@ -23,7 +22,7 @@ * How to get started: [www.runatlantis.io/guide](https://www.runatlantis.io/guide) * Full documentation: [www.runatlantis.io/docs](https://www.runatlantis.io/docs) * Download the latest release: [github.com/runatlantis/atlantis/releases/latest](https://github.com/runatlantis/atlantis/releases/latest) -* Get help in our [Slack channel](https://join.slack.com/t/atlantis-community/shared_invite/zt-1nt7yx7uq-AnVRc_JItF1CDwZtfqv_OA) +* Get help in our [Slack channel](https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw) * Start Contributing: [CONTRIBUTING.md](CONTRIBUTING.md) ## What is Atlantis? diff --git a/cmd/server.go b/cmd/server.go index 31ecfd393e..950c17243c 100644 --- a/cmd/server.go +++ b/cmd/server.go @@ -76,9 +76,9 @@ const ( DisableUnlockLabelFlag = "disable-unlock-label" DiscardApprovalOnPlanFlag = "discard-approval-on-plan" EmojiReaction = "emoji-reaction" + EnableDiffMarkdownFormat = "enable-diff-markdown-format" EnablePolicyChecksFlag = "enable-policy-checks" EnableRegExpCmdFlag = "enable-regexp-cmd" - EnableDiffMarkdownFormat = "enable-diff-markdown-format" ExecutableName = "executable-name" FailOnPreWorkflowHookError = "fail-on-pre-workflow-hook-error" HideUnchangedPlanComments = "hide-unchanged-plan-comments" @@ -90,9 +90,15 @@ const ( GHAppKeyFlag = "gh-app-key" GHAppKeyFileFlag = "gh-app-key-file" GHAppSlugFlag = "gh-app-slug" + GHAppInstallationIDFlag = "gh-app-installation-id" GHOrganizationFlag = "gh-org" GHWebhookSecretFlag = "gh-webhook-secret" // nolint: gosec GHAllowMergeableBypassApply = "gh-allow-mergeable-bypass-apply" // nolint: gosec + GiteaBaseURLFlag = "gitea-base-url" + GiteaTokenFlag = "gitea-token" + GiteaUserFlag = "gitea-user" + GiteaWebhookSecretFlag = "gitea-webhook-secret" // nolint: gosec + GiteaPageSizeFlag = "gitea-page-size" GitlabHostnameFlag = "gitlab-hostname" GitlabTokenFlag = "gitlab-token" GitlabUserFlag = "gitlab-user" @@ -152,10 +158,12 @@ const ( DefaultCheckoutDepth = 0 DefaultBitbucketBaseURL = bitbucketcloud.BaseURL DefaultDataDir = "~/.atlantis" - DefaultEmojiReaction = "eyes" + DefaultEmojiReaction = "" DefaultExecutableName = "atlantis" DefaultMarkdownTemplateOverridesDir = "~/.markdown_templates" DefaultGHHostname = "github.com" + DefaultGiteaBaseURL = "https://gitea.com" + DefaultGiteaPageSize = 30 DefaultGitlabHostname = "gitlab.com" DefaultLockingDBType = "boltdb" DefaultLogLevel = "info" @@ -268,7 +276,7 @@ var stringFlags = map[string]stringFlag{ defaultValue: "", }, EmojiReaction: { - description: "Emoji Reaction to use to react to comments", + description: "Emoji Reaction to use to react to comments.", defaultValue: DefaultEmojiReaction, }, ExecutableName: { @@ -318,6 +326,22 @@ var stringFlags = map[string]stringFlag{ "This means that an attacker could spoof calls to Atlantis and cause it to perform malicious actions. " + "Should be specified via the ATLANTIS_GH_WEBHOOK_SECRET environment variable.", }, + GiteaBaseURLFlag: { + description: "Base URL of Gitea server installation. Must include 'http://' or 'https://'.", + }, + GiteaUserFlag: { + description: "Gitea username of API user.", + defaultValue: "", + }, + GiteaTokenFlag: { + description: "Gitea token of API user. Can also be specified via the ATLANTIS_GITEA_TOKEN environment variable.", + }, + GiteaWebhookSecretFlag: { + description: "Optional secret used to validate Gitea webhooks." + + " SECURITY WARNING: If not specified, Atlantis won't be able to validate that the incoming webhook call came from Gitea. " + + "This means that an attacker could spoof calls to Atlantis and cause it to perform malicious actions. " + + "Should be specified via the ATLANTIS_GITEA_WEBHOOK_SECRET environment variable.", + }, GitlabHostnameFlag: { description: "Hostname of your GitLab Enterprise installation. If using gitlab.com, no need to set.", defaultValue: DefaultGitlabHostname, @@ -436,6 +460,7 @@ var boolFlags = map[string]boolFlag{ description: "Disable atlantis auto planning feature", defaultValue: false, }, + DisableRepoLockingFlag: { description: "Disable atlantis locking repos", }, @@ -568,6 +593,10 @@ var intFlags = map[string]intFlag{ " If merge base is further behind than this number of commits from any of branches heads, full fetch will be performed.", defaultValue: DefaultCheckoutDepth, }, + GiteaPageSizeFlag: { + description: "Optional value that specifies the number of results per page to expect from Gitea.", + defaultValue: DefaultGiteaPageSize, + }, ParallelPoolSize: { description: "Max size of the wait group that runs parallel plans and applies (if enabled).", defaultValue: DefaultParallelPoolSize, @@ -591,6 +620,13 @@ var int64Flags = map[string]int64Flag{ description: "GitHub App Id. If defined, initializes the GitHub client with app-based credentials", defaultValue: 0, }, + GHAppInstallationIDFlag: { + description: "GitHub App Installation Id. If defined, initializes the GitHub client with app-based credentials " + + "using this specific GitHub Application Installation ID, otherwise it attempts to auto-detect it. " + + "Note that this value must be set if you want to have one App and multiple installations of that same " + + "application.", + defaultValue: 0, + }, } // ValidLogLevels are the valid log levels that can be set @@ -813,6 +849,12 @@ func (s *ServerCmd) setDefaults(c *server.UserConfig) { if c.GitlabHostname == "" { c.GitlabHostname = DefaultGitlabHostname } + if c.GiteaBaseURL == "" { + c.GiteaBaseURL = DefaultGiteaBaseURL + } + if c.GiteaPageSize == 0 { + c.GiteaPageSize = DefaultGiteaPageSize + } if c.BitbucketBaseURL == "" { c.BitbucketBaseURL = DefaultBitbucketBaseURL } @@ -885,12 +927,17 @@ func (s *ServerCmd) validate(userConfig server.UserConfig) error { // The following combinations are valid. // 1. github user and token set // 2. github app ID and (key file set or key set) - // 3. gitlab user and token set - // 4. bitbucket user and token set - // 5. azuredevops user and token set - // 6. any combination of the above - vcsErr := fmt.Errorf("--%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s must be set", GHUserFlag, GHTokenFlag, GHAppIDFlag, GHAppKeyFileFlag, GHAppIDFlag, GHAppKeyFlag, GitlabUserFlag, GitlabTokenFlag, BitbucketUserFlag, BitbucketTokenFlag, ADUserFlag, ADTokenFlag) - if ((userConfig.GithubUser == "") != (userConfig.GithubToken == "")) || ((userConfig.GitlabUser == "") != (userConfig.GitlabToken == "")) || ((userConfig.BitbucketUser == "") != (userConfig.BitbucketToken == "")) || ((userConfig.AzureDevopsUser == "") != (userConfig.AzureDevopsToken == "")) { + // 3. gitea user and token set + // 4. gitlab user and token set + // 5. bitbucket user and token set + // 6. azuredevops user and token set + // 7. any combination of the above + vcsErr := fmt.Errorf("--%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s or --%s/--%s must be set", GHUserFlag, GHTokenFlag, GHAppIDFlag, GHAppKeyFileFlag, GHAppIDFlag, GHAppKeyFlag, GiteaUserFlag, GiteaTokenFlag, GitlabUserFlag, GitlabTokenFlag, BitbucketUserFlag, BitbucketTokenFlag, ADUserFlag, ADTokenFlag) + if ((userConfig.GithubUser == "") != (userConfig.GithubToken == "")) || + ((userConfig.GiteaUser == "") != (userConfig.GiteaToken == "")) || + ((userConfig.GitlabUser == "") != (userConfig.GitlabToken == "")) || + ((userConfig.BitbucketUser == "") != (userConfig.BitbucketToken == "")) || + ((userConfig.AzureDevopsUser == "") != (userConfig.AzureDevopsToken == "")) { return vcsErr } if (userConfig.GithubAppID != 0) && ((userConfig.GithubAppKey == "") && (userConfig.GithubAppKeyFile == "")) { @@ -901,7 +948,7 @@ func (s *ServerCmd) validate(userConfig server.UserConfig) error { } // At this point, we know that there can't be a single user/token without // its partner, but we haven't checked if any user/token is set at all. - if userConfig.GithubAppID == 0 && userConfig.GithubUser == "" && userConfig.GitlabUser == "" && userConfig.BitbucketUser == "" && userConfig.AzureDevopsUser == "" { + if userConfig.GithubAppID == 0 && userConfig.GithubUser == "" && userConfig.GiteaUser == "" && userConfig.GitlabUser == "" && userConfig.BitbucketUser == "" && userConfig.AzureDevopsUser == "" { return vcsErr } @@ -924,6 +971,14 @@ func (s *ServerCmd) validate(userConfig server.UserConfig) error { return fmt.Errorf("--%s must have http:// or https://, got %q", BitbucketBaseURLFlag, userConfig.BitbucketBaseURL) } + parsed, err = url.Parse(userConfig.GiteaBaseURL) + if err != nil { + return fmt.Errorf("error parsing --%s flag value %q: %s", GiteaWebhookSecretFlag, userConfig.GiteaBaseURL, err) + } + if parsed.Scheme != "http" && parsed.Scheme != "https" { + return fmt.Errorf("--%s must have http:// or https://, got %q", GiteaBaseURLFlag, userConfig.GiteaBaseURL) + } + if userConfig.RepoConfig != "" && userConfig.RepoConfigJSON != "" { return fmt.Errorf("cannot use --%s and --%s at the same time", RepoConfigFlag, RepoConfigJSONFlag) } @@ -936,6 +991,8 @@ func (s *ServerCmd) validate(userConfig server.UserConfig) error { GitlabWebhookSecretFlag: userConfig.GitlabWebhookSecret, BitbucketTokenFlag: userConfig.BitbucketToken, BitbucketWebhookSecretFlag: userConfig.BitbucketWebhookSecret, + GiteaTokenFlag: userConfig.GiteaToken, + GiteaWebhookSecretFlag: userConfig.GiteaWebhookSecret, } { if strings.Contains(token, "\n") { s.Logger.Warn("--%s contains a newline which is usually unintentional", name) @@ -1029,6 +1086,7 @@ func (s *ServerCmd) setVarFileAllowlist(userConfig *server.UserConfig) { // trimAtSymbolFromUsers trims @ from the front of the github and gitlab usernames func (s *ServerCmd) trimAtSymbolFromUsers(userConfig *server.UserConfig) { userConfig.GithubUser = strings.TrimPrefix(userConfig.GithubUser, "@") + userConfig.GiteaUser = strings.TrimPrefix(userConfig.GiteaUser, "@") userConfig.GitlabUser = strings.TrimPrefix(userConfig.GitlabUser, "@") userConfig.BitbucketUser = strings.TrimPrefix(userConfig.BitbucketUser, "@") userConfig.AzureDevopsUser = strings.TrimPrefix(userConfig.AzureDevopsUser, "@") @@ -1038,6 +1096,9 @@ func (s *ServerCmd) securityWarnings(userConfig *server.UserConfig) { if userConfig.GithubUser != "" && userConfig.GithubWebhookSecret == "" && !s.SilenceOutput { s.Logger.Warn("no GitHub webhook secret set. This could allow attackers to spoof requests from GitHub") } + if userConfig.GiteaUser != "" && userConfig.GiteaWebhookSecret == "" && !s.SilenceOutput { + s.Logger.Warn("no Gitea webhook secret set. This could allow attackers to spoof requests from Gitea") + } if userConfig.GitlabUser != "" && userConfig.GitlabWebhookSecret == "" && !s.SilenceOutput { s.Logger.Warn("no GitLab webhook secret set. This could allow attackers to spoof requests from GitLab") } diff --git a/cmd/server_test.go b/cmd/server_test.go index 81b834151d..96e174c970 100644 --- a/cmd/server_test.go +++ b/cmd/server_test.go @@ -91,8 +91,14 @@ var testFlags = map[string]interface{}{ GHAppKeyFlag: "", GHAppKeyFileFlag: "", GHAppSlugFlag: "atlantis", + GHAppInstallationIDFlag: int64(0), GHOrganizationFlag: "", GHWebhookSecretFlag: "secret", + GiteaBaseURLFlag: "http://localhost", + GiteaTokenFlag: "gitea-token", + GiteaUserFlag: "gitea-user", + GiteaWebhookSecretFlag: "gitea-secret", + GiteaPageSizeFlag: 30, GitlabHostnameFlag: "gitlab-hostname", GitlabTokenFlag: "gitlab-token", GitlabUserFlag: "gitlab-user", @@ -156,6 +162,7 @@ func TestExecute_Defaults(t *testing.T) { c := setup(map[string]interface{}{ GHUserFlag: "user", GHTokenFlag: "token", + GiteaBaseURLFlag: "http://localhost", RepoAllowlistFlag: "*", }, t) err := c.Execute() @@ -174,6 +181,7 @@ func TestExecute_Defaults(t *testing.T) { strExceptions := map[string]string{ GHUserFlag: "user", GHTokenFlag: "token", + GiteaBaseURLFlag: "http://localhost", DataDirFlag: dataDir, MarkdownTemplateOverridesDirFlag: markdownTemplateOverridesDir, AtlantisURLFlag: "http://" + hostname + ":4141", @@ -422,7 +430,7 @@ func TestExecute_ValidateSSLConfig(t *testing.T) { } func TestExecute_ValidateVCSConfig(t *testing.T) { - expErr := "--gh-user/--gh-token or --gh-app-id/--gh-app-key-file or --gh-app-id/--gh-app-key or --gitlab-user/--gitlab-token or --bitbucket-user/--bitbucket-token or --azuredevops-user/--azuredevops-token must be set" + expErr := "--gh-user/--gh-token or --gh-app-id/--gh-app-key-file or --gh-app-id/--gh-app-key or --gitea-user/--gitea-token or --gitlab-user/--gitlab-token or --bitbucket-user/--bitbucket-token or --azuredevops-user/--azuredevops-token must be set" cases := []struct { description string flags map[string]interface{} @@ -440,6 +448,13 @@ func TestExecute_ValidateVCSConfig(t *testing.T) { }, true, }, + { + "just gitea token set", + map[string]interface{}{ + GiteaTokenFlag: "token", + }, + true, + }, { "just gitlab token set", map[string]interface{}{ @@ -468,6 +483,13 @@ func TestExecute_ValidateVCSConfig(t *testing.T) { }, true, }, + { + "just gitea user set", + map[string]interface{}{ + GiteaUserFlag: "user", + }, + true, + }, { "just github app set", map[string]interface{}{ @@ -534,6 +556,22 @@ func TestExecute_ValidateVCSConfig(t *testing.T) { }, true, }, + { + "github user and gitea token set", + map[string]interface{}{ + GHUserFlag: "user", + GiteaTokenFlag: "token", + }, + true, + }, + { + "gitea user and github token set", + map[string]interface{}{ + GiteaUserFlag: "user", + GHTokenFlag: "token", + }, + true, + }, { "github user and github token set and should be successful", map[string]interface{}{ @@ -542,6 +580,14 @@ func TestExecute_ValidateVCSConfig(t *testing.T) { }, false, }, + { + "gitea user and gitea token set and should be successful", + map[string]interface{}{ + GiteaUserFlag: "user", + GiteaTokenFlag: "token", + }, + false, + }, { "github app and key file set and should be successful", map[string]interface{}{ @@ -587,6 +633,8 @@ func TestExecute_ValidateVCSConfig(t *testing.T) { map[string]interface{}{ GHUserFlag: "user", GHTokenFlag: "token", + GiteaUserFlag: "user", + GiteaTokenFlag: "token", GitlabUserFlag: "user", GitlabTokenFlag: "token", BitbucketUserFlag: "user", @@ -699,6 +747,34 @@ func TestExecute_GithubApp(t *testing.T) { Equals(t, int64(1), passedConfig.GithubAppID) } +func TestExecute_GithubAppWithInstallationID(t *testing.T) { + t.Log("Should pass the installation ID to the config.") + c := setup(map[string]interface{}{ + GHAppKeyFlag: testdata.GithubPrivateKey, + GHAppIDFlag: "1", + GHAppInstallationIDFlag: "2", + RepoAllowlistFlag: "*", + }, t) + err := c.Execute() + Ok(t, err) + + Equals(t, int64(1), passedConfig.GithubAppID) + Equals(t, int64(2), passedConfig.GithubAppInstallationID) +} + +func TestExecute_GiteaUser(t *testing.T) { + t.Log("Should remove the @ from the gitea username if it's passed.") + c := setup(map[string]interface{}{ + GiteaUserFlag: "@user", + GiteaTokenFlag: "token", + RepoAllowlistFlag: "*", + }, t) + err := c.Execute() + Ok(t, err) + + Equals(t, "user", passedConfig.GiteaUser) +} + func TestExecute_GitlabUser(t *testing.T) { t.Log("Should remove the @ from the gitlab username if it's passed.") c := setup(map[string]interface{}{ @@ -934,3 +1010,45 @@ func configVal(t *testing.T, u server.UserConfig, tag string) interface{} { t.Fatalf("no field with tag %q found", tag) return nil } + +// Gitea base URL must have a scheme. +func TestExecute_GiteaBaseURLScheme(t *testing.T) { + c := setup(map[string]interface{}{ + GiteaUserFlag: "user", + GiteaTokenFlag: "token", + RepoAllowlistFlag: "*", + GiteaBaseURLFlag: "mydomain.com", + }, t) + ErrEquals(t, "--gitea-base-url must have http:// or https://, got \"mydomain.com\"", c.Execute()) + + c = setup(map[string]interface{}{ + GiteaUserFlag: "user", + GiteaTokenFlag: "token", + RepoAllowlistFlag: "*", + GiteaBaseURLFlag: "://mydomain.com", + }, t) + ErrEquals(t, "error parsing --gitea-webhook-secret flag value \"://mydomain.com\": parse \"://mydomain.com\": missing protocol scheme", c.Execute()) +} + +func TestExecute_GiteaWithWebhookSecret(t *testing.T) { + c := setup(map[string]interface{}{ + GiteaUserFlag: "user", + GiteaTokenFlag: "token", + RepoAllowlistFlag: "*", + GiteaWebhookSecretFlag: "my secret", + }, t) + err := c.Execute() + Ok(t, err) +} + +// Port should be retained on base url. +func TestExecute_GiteaBaseURLPort(t *testing.T) { + c := setup(map[string]interface{}{ + GiteaUserFlag: "user", + GiteaTokenFlag: "token", + RepoAllowlistFlag: "*", + GiteaBaseURLFlag: "http://mydomain.com:7990", + }, t) + Ok(t, c.Execute()) + Equals(t, "http://mydomain.com:7990", passedConfig.GiteaBaseURL) +} diff --git a/docker-compose.yml b/docker-compose.yml index ab2b2f1cab..cd1484abf0 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -1,42 +1,41 @@ # Note: This file is only used for Atlantis local development -version: "3.8" services: - ngrok: - image: wernight/ngrok:latest - ports: - - 4040:4040 - environment: - # https://dashboard.ngrok.com/get-started/your-authtoken - # NGROK_AUTH: REPLACE-WITH-YOUR-TOKEN // set this in atlantis.env - NGROK_PROTOCOL: http - NGROK_PORT: atlantis:4141 - env_file: - - ./atlantis.env - depends_on: - - atlantis - redis: - image: redis:7.2-alpine - restart: always - ports: - - '6379:6379' - command: redis-server --save 20 1 --loglevel warning --requirepass test123 - volumes: - - redis:/data - atlantis: - depends_on: - - redis - build: - context: . - dockerfile: Dockerfile.dev - ports: - - 4141:4141 - volumes: - - ~/.ssh:/.ssh - - ./:/atlantis/src - # Contains the flags that atlantis uses in env var form - env_file: - - ./atlantis.env + ngrok: + image: wernight/ngrok:latest@sha256:d211f29ebcfe5f4e72df4fa8bdd9a667886e127d7fcb1be4a1af5ad83a8a1b77 + ports: + - 4040:4040 + environment: + # https://dashboard.ngrok.com/get-started/your-authtoken + # NGROK_AUTH: REPLACE-WITH-YOUR-TOKEN // set this in atlantis.env + NGROK_PROTOCOL: http + NGROK_PORT: atlantis:4141 + env_file: + - atlantis.env + depends_on: + - atlantis + redis: + image: redis:7.2-alpine@sha256:0389bb8416d7c6ed065c25745179bf5d358e5d9472dd30a687ab36ffbb650262 + restart: always + ports: + - 6379:6379 + command: redis-server --save 20 1 --loglevel warning --requirepass test123 + volumes: + - redis:/data + atlantis: + depends_on: + - redis + build: + context: . + dockerfile: Dockerfile.dev + ports: + - 4141:4141 + volumes: + - ${HOME}/.ssh:/.ssh:ro + - ${PWD}:/atlantis/src:ro + # Contains the flags that atlantis uses in env var form + env_file: + - atlantis.env volumes: - redis: - driver: local + redis: + driver: local diff --git a/e2e/.gitconfig b/e2e/.gitconfig deleted file mode 100644 index 3424a0e076..0000000000 --- a/e2e/.gitconfig +++ /dev/null @@ -1,3 +0,0 @@ -[user] - name = atlantisbot - email = lkysow+atlantis@gmail.com \ No newline at end of file diff --git a/e2e/main.go b/e2e/main.go index f04b823906..c5ae0e2582 100644 --- a/e2e/main.go +++ b/e2e/main.go @@ -38,13 +38,13 @@ type Project struct { func main() { - githubUsername := os.Getenv("GITHUB_USERNAME") + githubUsername := os.Getenv("ATLANTISBOT_GITHUB_USERNAME") if githubUsername == "" { - log.Fatalf("GITHUB_USERNAME cannot be empty") + log.Fatalf("ATLANTISBOT_GITHUB_USERNAME cannot be empty") } - githubToken := os.Getenv("GITHUB_PASSWORD") + githubToken := os.Getenv("ATLANTISBOT_GITHUB_TOKEN") if githubToken == "" { - log.Fatalf("GITHUB_PASSWORD cannot be empty") + log.Fatalf("ATLANTISBOT_GITHUB_TOKEN cannot be empty") } atlantisURL := os.Getenv("ATLANTIS_URL") if atlantisURL == "" { diff --git a/go.mod b/go.mod index e3f32b29a0..31123f3115 100644 --- a/go.mod +++ b/go.mod @@ -1,12 +1,13 @@ module github.com/runatlantis/atlantis -go 1.22.0 +go 1.22.4 require ( + code.gitea.io/sdk/gitea v0.18.0 github.com/Masterminds/sprig/v3 v3.2.3 - github.com/alicebob/miniredis/v2 v2.31.1 - github.com/bradleyfalzon/ghinstallation/v2 v2.9.0 - github.com/briandowns/spinner v1.23.0 + github.com/alicebob/miniredis/v2 v2.32.1 + github.com/bradleyfalzon/ghinstallation/v2 v2.10.0 + github.com/briandowns/spinner v1.23.1 github.com/cactus/go-statsd-client/v5 v5.1.0 github.com/go-ozzo/ozzo-validation v3.6.0+incompatible github.com/go-playground/validator/v10 v10.19.0 @@ -16,12 +17,14 @@ require ( github.com/google/shlex v0.0.0-20191202100458-e7afc7fbc510 github.com/google/uuid v1.6.0 github.com/gorilla/mux v1.8.1 - github.com/gorilla/websocket v1.5.1 - github.com/hashicorp/go-getter/v2 v2.2.1 + github.com/gorilla/websocket v1.5.2 + github.com/hashicorp/go-getter/v2 v2.2.2 github.com/hashicorp/go-multierror v1.1.1 - github.com/hashicorp/go-version v1.6.0 + github.com/hashicorp/go-version v1.7.0 github.com/hashicorp/golang-lru/v2 v2.0.7 - github.com/hashicorp/terraform-config-inspect v0.0.0-20231204233900-a34142ec2a72 + github.com/hashicorp/hc-install v0.7.1-0.20240607080111-03e0bd63529f + github.com/hashicorp/terraform-config-inspect v0.0.0-20240607080351-271db412dbcb + github.com/jpillora/backoff v1.0.0 github.com/kr/pretty v0.3.1 github.com/mcdafydd/go-azuredevops v0.12.1 github.com/microcosm-cc/bluemonday v1.0.26 @@ -31,9 +34,9 @@ require ( github.com/mohae/deepcopy v0.0.0-20170929034955-c48cc78d4826 github.com/petergtz/pegomock/v4 v4.0.0 github.com/pkg/errors v0.9.1 - github.com/redis/go-redis/v9 v9.5.1 + github.com/redis/go-redis/v9 v9.5.2 github.com/remeh/sizedwaitgroup v1.0.0 - github.com/shurcooL/githubv4 v0.0.0-20240120211514-18a1ae0e79dc + github.com/shurcooL/githubv4 v0.0.0-20240429030203-be2daab69064 github.com/slack-go/slack v0.12.5 github.com/spf13/cobra v1.8.0 github.com/spf13/pflag v1.0.5 @@ -41,63 +44,64 @@ require ( github.com/stretchr/testify v1.9.0 github.com/uber-go/tally/v4 v4.1.10 github.com/urfave/negroni/v3 v3.1.0 - github.com/warrensbox/terraform-switcher v0.1.1-0.20230206012955-d7dfd1b44605 - github.com/xanzy/go-gitlab v0.99.0 - go.etcd.io/bbolt v1.3.9 + github.com/xanzy/go-gitlab v0.102.0 + go.etcd.io/bbolt v1.3.10 go.uber.org/zap v1.27.0 - golang.org/x/term v0.18.0 - golang.org/x/text v0.14.0 + golang.org/x/term v0.20.0 + golang.org/x/text v0.15.0 gopkg.in/yaml.v3 v3.0.1 ) require ( github.com/agext/levenshtein v1.2.3 github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 // indirect - github.com/go-playground/locales v0.14.1 // indirect - github.com/go-playground/universal-translator v0.18.1 // indirect - github.com/hashicorp/hcl/v2 v2.20.0 - github.com/inconshreveable/mousetrap v1.1.0 // indirect - github.com/leodido/go-urn v1.4.0 // indirect + github.com/hashicorp/hcl/v2 v2.20.1 github.com/shurcooL/graphql v0.0.0-20220606043923-3cf50f8a0a29 // indirect go.uber.org/atomic v1.11.0 // indirect ) require github.com/twmb/murmur3 v1.1.8 // indirect -require github.com/google/go-github/v57 v57.0.0 // indirect - require ( github.com/Masterminds/goutils v1.1.1 // indirect github.com/Masterminds/semver/v3 v3.2.1 // indirect + github.com/ProtonMail/go-crypto v1.1.0-alpha.2 // indirect github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a // indirect - github.com/apparentlymart/go-textseg/v13 v13.0.0 // indirect github.com/apparentlymart/go-textseg/v15 v15.0.0 // indirect github.com/aymerick/douceur v0.2.0 // indirect github.com/beorn7/perks v1.0.1 // indirect github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d // indirect github.com/cespare/xxhash/v2 v2.2.0 // indirect + github.com/cloudflare/circl v1.3.7 // indirect github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect + github.com/davidmz/go-pageant v1.0.2 // indirect github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f // indirect - github.com/fatih/color v1.15.0 // indirect + github.com/fatih/color v1.16.0 // indirect github.com/fsnotify/fsnotify v1.7.0 // indirect github.com/gabriel-vasile/mimetype v1.4.3 // indirect + github.com/go-fed/httpsig v1.1.0 // indirect + github.com/go-playground/locales v0.14.1 // indirect + github.com/go-playground/universal-translator v0.18.1 // indirect github.com/golang-jwt/jwt/v4 v4.5.0 // indirect github.com/golang/protobuf v1.5.3 // indirect github.com/google/go-cmp v0.6.0 // indirect + github.com/google/go-github/v60 v60.0.0 // indirect github.com/google/go-querystring v1.1.0 // indirect github.com/gorilla/css v1.0.0 // indirect - github.com/hashicorp/errwrap v1.1.0 // indirect + github.com/hashicorp/errwrap v1.0.0 // indirect github.com/hashicorp/go-cleanhttp v0.5.2 // indirect - github.com/hashicorp/go-retryablehttp v0.7.4 // indirect + github.com/hashicorp/go-retryablehttp v0.7.7 // indirect github.com/hashicorp/go-safetemp v1.0.0 // indirect github.com/hashicorp/hcl v1.0.0 // indirect github.com/huandu/xstrings v1.4.0 // indirect github.com/imdario/mergo v0.3.16 // indirect + github.com/inconshreveable/mousetrap v1.1.0 // indirect github.com/klauspost/compress v1.17.0 // indirect github.com/kr/text v0.2.0 // indirect + github.com/leodido/go-urn v1.4.0 // indirect github.com/magiconair/properties v1.8.7 // indirect github.com/mattn/go-colorable v0.1.13 // indirect - github.com/mattn/go-isatty v0.0.19 // indirect + github.com/mattn/go-isatty v0.0.20 // indirect github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect github.com/mitchellh/copystructure v1.2.0 // indirect github.com/mitchellh/go-testing-interface v1.14.1 // indirect @@ -120,18 +124,19 @@ require ( github.com/spf13/cast v1.6.0 // indirect github.com/subosito/gotenv v1.6.0 // indirect github.com/ulikunitz/xz v0.5.11 // indirect - github.com/yuin/gopher-lua v1.1.0 // indirect - github.com/zclconf/go-cty v1.13.2 // indirect + github.com/yuin/gopher-lua v1.1.1 // indirect + github.com/zclconf/go-cty v1.14.4 // indirect go.uber.org/multierr v1.11.0 // indirect - golang.org/x/crypto v0.19.0 // indirect - golang.org/x/exp v0.0.0-20230905200255-921286631fa9 // indirect - golang.org/x/mod v0.12.0 // indirect - golang.org/x/net v0.21.0 // indirect + golang.org/x/crypto v0.23.0 // indirect + golang.org/x/exp v0.0.0-20231006140011-7918f672742d // indirect + golang.org/x/mod v0.18.0 // indirect + golang.org/x/net v0.25.0 // indirect golang.org/x/oauth2 v0.15.0 // indirect - golang.org/x/sys v0.18.0 // indirect + golang.org/x/sync v0.7.0 // indirect + golang.org/x/sys v0.20.0 // indirect golang.org/x/time v0.5.0 // indirect - golang.org/x/tools v0.13.0 // indirect + golang.org/x/tools v0.21.0 // indirect google.golang.org/appengine v1.6.7 // indirect - google.golang.org/protobuf v1.31.0 // indirect + google.golang.org/protobuf v1.33.0 // indirect gopkg.in/ini.v1 v1.67.0 // indirect ) diff --git a/go.sum b/go.sum index df24849fc2..6021fe785e 100644 --- a/go.sum +++ b/go.sum @@ -30,10 +30,13 @@ cloud.google.com/go/storage v1.5.0/go.mod h1:tpKbwo567HUNpVclU5sGELwQWBDZ8gh0Zeo cloud.google.com/go/storage v1.6.0/go.mod h1:N7U0C8pVQ/+NIKOBQyamJIeKQKkZ+mxpohlUTyfDhBk= cloud.google.com/go/storage v1.8.0/go.mod h1:Wv1Oy7z6Yz3DshWRJFhqM/UCfaWIRTdp0RXyy7KQOVs= cloud.google.com/go/storage v1.10.0/go.mod h1:FLPqc6j+Ki4BU591ie1oL6qBQGu2Bl/tZ9ullr3+Kg0= +code.gitea.io/sdk/gitea v0.18.0 h1:+zZrwVmujIrgobt6wVBWCqITz6bn1aBjnCUHmpZrerI= +code.gitea.io/sdk/gitea v0.18.0/go.mod h1:IG9xZJoltDNeDSW0qiF2Vqx5orMWa7OhVWrjvrd5NpI= +dario.cat/mergo v1.0.0 h1:AGCNq9Evsj31mOgNPcLyXc+4PNABt905YmuqPYYpBWk= +dario.cat/mergo v1.0.0/go.mod h1:uNxQE+84aUszobStD9th8a29P2fMDhsBdgRYvZOxGmk= dmitri.shuralyov.com/gpu/mtl v0.0.0-20190408044501-666a987793e9/go.mod h1:H6x//7gZCb22OMCxBHrMx7a5I7Hp++hsVxbQ4BYO7hU= github.com/BurntSushi/toml v0.3.1/go.mod h1:xHWCNGjB5oqiDr8zfno3MHue2Ht5sIBksp03qcyfWMU= github.com/BurntSushi/xgb v0.0.0-20160522181843-27f122750802/go.mod h1:IVnqGOEym/WlBOVXweHU+Q+/VP0lqqI8lqeDx9IjBqo= -github.com/DmitriyVTitov/size v1.5.0/go.mod h1:le6rNI4CoLQV1b9gzp1+3d7hMAD/uu2QcJ+aYbNgiU0= github.com/Masterminds/goutils v1.1.1 h1:5nUrii3FMTL5diU80unEVvNevw1nH4+ZV4DSLVJLSYI= github.com/Masterminds/goutils v1.1.1/go.mod h1:8cTjp+g8YejhMuvIA5y2vz3BpJxksy863GQaJW2MFNU= github.com/Masterminds/semver/v3 v3.2.0/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= @@ -41,6 +44,10 @@ github.com/Masterminds/semver/v3 v3.2.1 h1:RN9w6+7QoMeJVGyfmbcgs28Br8cvmnucEXnY0 github.com/Masterminds/semver/v3 v3.2.1/go.mod h1:qvl/7zhW3nngYb5+80sSMF+FG2BjYrf8m9wsX0PNOMQ= github.com/Masterminds/sprig/v3 v3.2.3 h1:eL2fZNezLomi0uOLqjQoN6BfsDD+fyLtgbJMAj9n6YA= github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBaRMhvYXJNkGuM= +github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow= +github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM= +github.com/ProtonMail/go-crypto v1.1.0-alpha.2 h1:bkyFVUP+ROOARdgCiJzNQo2V2kiB97LyUpzH9P6Hrlg= +github.com/ProtonMail/go-crypto v1.1.0-alpha.2/go.mod h1:rA3QumHc/FZ8pAHreoekgiAbzpNsfQAosU5td4SnOrE= github.com/agext/levenshtein v1.2.3 h1:YB2fHEn0UJagG8T1rrWknE3ZQzWM06O8AMAatNn7lmo= github.com/agext/levenshtein v1.2.3/go.mod h1:JEDfjyjHDjOF/1e4FlBE/PkbqA9OfWu2ki2W0IB5558= github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc= @@ -50,10 +57,8 @@ github.com/alecthomas/units v0.0.0-20190717042225-c3de453c63f4/go.mod h1:ybxpYRF github.com/alecthomas/units v0.0.0-20190924025748-f65c72e2690d/go.mod h1:rBZYJk541a8SKzHPHnH3zbiI+7dagKZ0cgpgrD7Fyho= github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a h1:HbKu58rmZpUGpz5+4FfNmIU+FmZg2P3Xaj2v2bfNWmk= github.com/alicebob/gopher-json v0.0.0-20200520072559-a9ecdc9d1d3a/go.mod h1:SGnFV6hVsYE877CKEZ6tDNTjaSXYUk6QqoIK6PrAtcc= -github.com/alicebob/miniredis/v2 v2.31.1 h1:7XAt0uUg3DtwEKW5ZAGa+K7FZV2DdKQo5K/6TTnfX8Y= -github.com/alicebob/miniredis/v2 v2.31.1/go.mod h1:UB/T2Uztp7MlFSDakaX1sTXUv5CASoprx0wulRT6HBg= -github.com/apparentlymart/go-textseg/v13 v13.0.0 h1:Y+KvPE1NYz0xl601PVImeQfFyEy6iT90AvPUL1NNfNw= -github.com/apparentlymart/go-textseg/v13 v13.0.0/go.mod h1:ZK2fH7c4NqDTLtiYLvIkEghdlcqw7yxLeM89kiTRPUo= +github.com/alicebob/miniredis/v2 v2.32.1 h1:Bz7CciDnYSaa0mX5xODh6GUITRSx+cVhjNoOR4JssBo= +github.com/alicebob/miniredis/v2 v2.32.1/go.mod h1:AqkLNAfUm0K07J28hnAyyQKf/x0YkCY/g5DCtuL01Mw= github.com/apparentlymart/go-textseg/v15 v15.0.0 h1:uYvfpb3DyLSCGWnctWKGj857c6ew1u1fNQOlOtuGxQY= github.com/apparentlymart/go-textseg/v15 v15.0.0/go.mod h1:K8XmNZdhEBkdlyDdvbmmsvpAG721bKi0joRfFdHIWJ4= github.com/asaskevich/govalidator v0.0.0-20230301143203-a9d515a09cc2 h1:DklsrG3dyBCFEj5IhUbnKptjxatkF07cF2ak3yi77so= @@ -66,10 +71,10 @@ github.com/beorn7/perks v1.0.1 h1:VlbKKnNfV8bJzeqoa4cOKqO6bYr3WgKZxO8Z16+hsOM= github.com/beorn7/perks v1.0.1/go.mod h1:G2ZrVWU2WbWT9wwq4/hrbKbnv/1ERSJQ0ibhJ6rlkpw= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d h1:xDfNPAt8lFiC1UJrqV3uuy861HCTo708pDMbjHHdCas= github.com/bgentry/go-netrc v0.0.0-20140422174119-9fd32a8b3d3d/go.mod h1:6QX/PXZ00z/TKoufEY6K/a0k6AhaJrQKdFe6OfVXsa4= -github.com/bradleyfalzon/ghinstallation/v2 v2.9.0 h1:HmxIYqnxubRYcYGRc5v3wUekmo5Wv2uX3gukmWJ0AFk= -github.com/bradleyfalzon/ghinstallation/v2 v2.9.0/go.mod h1:wmkTDJf8CmVypxE8ijIStFnKoTa6solK5QfdmJrP9KI= -github.com/briandowns/spinner v1.23.0 h1:alDF2guRWqa/FOZZYWjlMIx2L6H0wyewPxo/CH4Pt2A= -github.com/briandowns/spinner v1.23.0/go.mod h1:rPG4gmXeN3wQV/TsAY4w8lPdIM6RX3yqeBQJSrbXjuE= +github.com/bradleyfalzon/ghinstallation/v2 v2.10.0 h1:XWuWBRFEpqVrHepQob9yPS3Xg4K3Wr9QCx4fu8HbUNg= +github.com/bradleyfalzon/ghinstallation/v2 v2.10.0/go.mod h1:qoGA4DxWPaYTgVCrmEspVSjlTu4WYAiSxMIhorMRXXc= +github.com/briandowns/spinner v1.23.1 h1:t5fDPmScwUjozhDj4FA46p5acZWIPXYE30qW2Ptu650= +github.com/briandowns/spinner v1.23.1/go.mod h1:LaZeM4wm2Ywy6vO571mvhQNRcWfRUnXOs0RcKV0wYKM= github.com/bsm/ginkgo/v2 v2.12.0 h1:Ny8MWAHyOepLGlLKYmXG4IEkioBysk6GpaRTLC8zwWs= github.com/bsm/ginkgo/v2 v2.12.0/go.mod h1:SwYbGRRDovPVboqFv0tPTcG1sN61LM1Z4ARdbAV9g4c= github.com/bsm/gomega v1.27.10 h1:yeMWxP2pV2fG3FgAODIY8EiRE3dy0aeFYt4l7wh6yKA= @@ -86,27 +91,43 @@ github.com/chzyer/logex v1.1.10/go.mod h1:+Ywpsq7O8HXn0nuIou7OrIPyXbp3wmkHB+jjWR github.com/chzyer/readline v0.0.0-20180603132655-2972be24d48e/go.mod h1:nSuG5e5PlCu98SY8svDHJxuZscDgtXS6KTTbou5AhLI= github.com/chzyer/test v0.0.0-20180213035817-a1ea475d72b1/go.mod h1:Q3SI9o4m/ZMnBNeIyt5eFwwo7qiLfzFZmjNmxjkiQlU= github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw= +github.com/cloudflare/circl v1.3.7 h1:qlCDlTPz2n9fu58M0Nh1J/JzcFpfgkFHHX3O35r5vcU= +github.com/cloudflare/circl v1.3.7/go.mod h1:sRTcRWXGLrKw6yIGJ+l7amYJFfAXbZG0kBSc8r4zxgA= github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc= github.com/cpuguy83/go-md2man/v2 v2.0.3/go.mod h1:tgQtvFlXSQOSOSIRvRPT7W67SCa46tRHOmNcaadrF8o= github.com/creack/pty v1.1.9/go.mod h1:oKZEueFk5CKHvIhNR5MUki03XCEU+Q6VDXinZuGJ33E= +github.com/cyphar/filepath-securejoin v0.2.4 h1:Ugdm7cg7i6ZK6x3xDF1oEu1nfkyfH53EtKeQYTC3kyg= +github.com/cyphar/filepath-securejoin v0.2.4/go.mod h1:aPGpWjXOXUn2NCNjFvBE6aRxGGx79pTxQpKOJNYHHl4= github.com/davecgh/go-spew v1.1.0/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1VwoXQT9A3Wy9MM3WgvqSxFWenqJduM= github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38= +github.com/davidmz/go-pageant v1.0.2 h1:bPblRCh5jGU+Uptpz6LgMZGD5hJoOt7otgT454WvHn0= +github.com/davidmz/go-pageant v1.0.2/go.mod h1:P2EDDnMqIwG5Rrp05dTRITj9z2zpGcD9efWSkTNKLIE= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f h1:lO4WD4F/rVNCu3HqELle0jiPLLBs70cWOduZpkS1E78= github.com/dgryski/go-rendezvous v0.0.0-20200823014737-9f7001d12a5f/go.mod h1:cuUVRXasLTGF7a8hSLbxyZXjz+1KgoB3wDUb6vlszIc= +github.com/emirpasic/gods v1.18.1 h1:FXtiHYKDGKCW2KzwZKx0iC0PQmdlorYgdFG9jPXJ1Bc= +github.com/emirpasic/gods v1.18.1/go.mod h1:8tpGGwCnJ5H4r6BWwaV6OrWmMoPhUl5jm/FMNAnJvWQ= github.com/envoyproxy/go-control-plane v0.9.0/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.1-0.20191026205805-5f8ba28d4473/go.mod h1:YTl/9mNaCwkRvm6d1a2C3ymFceY/DCBVvsKhRF0iEA4= github.com/envoyproxy/go-control-plane v0.9.4/go.mod h1:6rpuAdCZL397s3pYoYcLgu1mIlRU8Am5FuJP05cCM98= github.com/envoyproxy/protoc-gen-validate v0.1.0/go.mod h1:iSmxcyjqTsJpI2R4NaDN7+kN2VEUnK/pcBlmesArF7c= -github.com/fatih/color v1.15.0 h1:kOqh6YHBtK8aywxGerMG2Eq3H6Qgoqeo13Bk2Mv/nBs= -github.com/fatih/color v1.15.0/go.mod h1:0h5ZqXfHYED7Bhv2ZJamyIOUej9KtShiJESRwBDUSsw= +github.com/fatih/color v1.16.0 h1:zmkK9Ngbjj+K0yRhTVONQh1p/HknKYSlNT+vZCzyokM= +github.com/fatih/color v1.16.0/go.mod h1:fL2Sau1YI5c0pdGEVCbKQbLXB6edEj1ZgiY4NijnWvE= github.com/frankban/quicktest v1.14.6 h1:7Xjx+VpznH+oBnejlPUj8oUpdxnVs4f8XU8WnHkI4W8= github.com/frankban/quicktest v1.14.6/go.mod h1:4ptaffx2x8+WTWXmUCuVU6aPUX1/Mz7zb5vbUoiM6w0= github.com/fsnotify/fsnotify v1.7.0 h1:8JEhPFa5W2WU7YfeZzPNqzMP6Lwt7L2715Ggo0nosvA= github.com/fsnotify/fsnotify v1.7.0/go.mod h1:40Bi/Hjc2AVfZrqy+aj+yEI+/bRxZnMJyTJwOpGvigM= github.com/gabriel-vasile/mimetype v1.4.3 h1:in2uUcidCuFcDKtdcBxlR0rJ1+fsokWf+uqxgUFjbI0= github.com/gabriel-vasile/mimetype v1.4.3/go.mod h1:d8uq/6HKRL6CGdk+aubisF/M5GcPfT7nKyLpA0lbSSk= +github.com/go-fed/httpsig v1.1.0 h1:9M+hb0jkEICD8/cAiNqEB66R87tTINszBRTjwjQzWcI= +github.com/go-fed/httpsig v1.1.0/go.mod h1:RCMrTZvN1bJYtofsG4rd5NaO5obxQ5xBkdiS7xsT7bM= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376 h1:+zs/tPmkDkHx3U66DAb0lQFJrpS6731Oaa12ikc+DiI= +github.com/go-git/gcfg v1.5.1-0.20230307220236-3a3c6141e376/go.mod h1:an3vInlBmSxCcxctByoQdvwPiA7DTK7jaaFDBTtu0ic= +github.com/go-git/go-billy/v5 v5.5.0 h1:yEY4yhzCDuMGSv83oGxiBotRzhwhNr8VZyphhiu+mTU= +github.com/go-git/go-billy/v5 v5.5.0/go.mod h1:hmexnoNsr2SJU1Ju67OaNz5ASJY3+sHgFRpCtpDCKow= +github.com/go-git/go-git/v5 v5.12.0 h1:7Md+ndsjrzZxbddRDZjF14qK+NN56sy6wkqaVrjZtys= +github.com/go-git/go-git/v5 v5.12.0/go.mod h1:FTM9VKtnI2m65hNI/TenDDDnUf2Q9FHnXYjuz9i5OEY= github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20191125211704-12ad95a8df72/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= github.com/go-gl/glfw/v3.3/glfw v0.0.0-20200222043503-6f7a984d4dc4/go.mod h1:tQ2UAYgL5IevRw8kRxooKSPJfGvJ9fJQFa0TUsXzTg8= @@ -145,6 +166,7 @@ github.com/golang/glog v0.0.0-20160126235308-23def4e6c14b/go.mod h1:SBH7ygxi8pfU github.com/golang/groupcache v0.0.0-20190702054246-869f871628b6/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20191227052852-215e87163ea7/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/groupcache v0.0.0-20200121045136-8c9f03a8e57e/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= +github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da h1:oI5xCqsCo564l8iNU+DwB5epxmsaqB+rhGL0m5jtYqE= github.com/golang/groupcache v0.0.0-20210331224755-41bb18bfe9da/go.mod h1:cIg4eruTrX1D+g88fzRXU5OdNfaM+9IcxsU14FzY7Hc= github.com/golang/mock v1.1.1/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= github.com/golang/mock v1.2.0/go.mod h1:oTYuIxOrZwtPieC+H1uAHpcLFnEyAGVDL/k47Jfbm0A= @@ -186,10 +208,10 @@ github.com/google/go-cmp v0.5.5/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/ github.com/google/go-cmp v0.5.7/go.mod h1:n+brtR0CgQNWTVd5ZUFpTBC8YFBDLK/h/bpaJ8/DtOE= github.com/google/go-cmp v0.6.0 h1:ofyhxvXcZhMsU5ulbFiLKl/XBFqE1GSq7atu8tAmTRI= github.com/google/go-cmp v0.6.0/go.mod h1:17dUlkBOakJ0+DkrSSNjCkIjxS6bF9zb3elmeNGIjoY= -github.com/google/go-github/v57 v57.0.0 h1:L+Y3UPTY8ALM8x+TV0lg+IEBI+upibemtBD8Q9u7zHs= -github.com/google/go-github/v57 v57.0.0/go.mod h1:s0omdnye0hvK/ecLvpsGfJMiRt85PimQh4oygmLIxHw= github.com/google/go-github/v59 v59.0.0 h1:7h6bgpF5as0YQLLkEiVqpgtJqjimMYhBkD4jT5aN3VA= github.com/google/go-github/v59 v59.0.0/go.mod h1:rJU4R0rQHFVFDOkqGWxfLNo6vEk4dv40oDjhV/gH6wM= +github.com/google/go-github/v60 v60.0.0 h1:oLG98PsLauFvvu4D/YPxq374jhSxFYdzQGNCyONLfn8= +github.com/google/go-github/v60 v60.0.0/go.mod h1:ByhX2dP9XT9o/ll2yXAu2VD8l5eNVg8hD4Cr0S/LmQk= github.com/google/go-querystring v1.0.0/go.mod h1:odCYkC5MyYFN7vkCjXpyrEuKhc/BUO6wN/zVPAxq5ck= github.com/google/go-querystring v1.1.0 h1:AnCroh3fv4ZBgVIf1Iwtovgjaw/GiKJo8M8yD/fhyJ8= github.com/google/go-querystring v1.1.0/go.mod h1:Kcdr2DB4koayq7X8pmAG4sNG59So17icRSOU623lUBU= @@ -218,36 +240,36 @@ github.com/gorilla/css v1.0.0/go.mod h1:Dn721qIggHpt4+EFCcTLTU/vk5ySda2ReITrtgBl github.com/gorilla/mux v1.8.1 h1:TuBL49tXwgrFYWhqrNgrUNEY92u81SPhu7sTdzQEiWY= github.com/gorilla/mux v1.8.1/go.mod h1:AKf9I4AEqPTmMytcMc0KkNouC66V3BtZ4qD5fmWSiMQ= github.com/gorilla/websocket v1.4.2/go.mod h1:YR8l580nyteQvAITg2hZ9XVh4b55+EU/adAjf1fMHhE= -github.com/gorilla/websocket v1.5.1 h1:gmztn0JnHVt9JZquRuzLw3g4wouNVzKL15iLr/zn/QY= -github.com/gorilla/websocket v1.5.1/go.mod h1:x3kM2JMyaluk02fnUJpQuwD2dCS5NDG2ZHL0uE0tcaY= +github.com/gorilla/websocket v1.5.2 h1:qoW6V1GT3aZxybsbC6oLnailWnB+qTMVwMreOso9XUw= +github.com/gorilla/websocket v1.5.2/go.mod h1:0n9H61RBAcf5/38py2MCYbxzPIY9rOkpvvMT24Rqs30= +github.com/hashicorp/errwrap v1.0.0 h1:hLrqtEDnRye3+sgx6z4qVLNuviH3MR5aQ0ykNJa/UYA= github.com/hashicorp/errwrap v1.0.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= -github.com/hashicorp/errwrap v1.1.0 h1:OxrOeh75EUXMY8TBjag2fzXGZ40LB6IKw45YeGUDY2I= -github.com/hashicorp/errwrap v1.1.0/go.mod h1:YH+1FKiLXxHSkmPseP+kNlulaMuP3n2brvKWEqk/Jc4= github.com/hashicorp/go-cleanhttp v0.5.2 h1:035FKYIWjmULyFRBKPs8TBQoi0x6d9G4xc9neXJWAZQ= github.com/hashicorp/go-cleanhttp v0.5.2/go.mod h1:kO/YDlP8L1346E6Sodw+PrpBSV4/SoxCXGY6BqNFT48= -github.com/hashicorp/go-getter/v2 v2.2.1 h1:2JXqPZs1Jej67RtdTi0YZaEB2hEFB3fkBA4cPYKQwFQ= -github.com/hashicorp/go-getter/v2 v2.2.1/go.mod h1:EcJx6oZE8hmGuRR1l38QrfnyiujQbwsEAn11eHv6l2M= -github.com/hashicorp/go-hclog v0.9.2/go.mod h1:5CU+agLiy3J7N7QjHK5d05KxGsuXiQLrjA0H7acj2lQ= -github.com/hashicorp/go-hclog v1.5.0 h1:bI2ocEMgcVlz55Oj1xZNBsVi900c7II+fWDyV9o+13c= -github.com/hashicorp/go-hclog v1.5.0/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= +github.com/hashicorp/go-getter/v2 v2.2.2 h1:Al5bzCNW5DrlZMK6TumGrSue7Xz8beyLcen+4N4erwo= +github.com/hashicorp/go-getter/v2 v2.2.2/go.mod h1:hp5Yy0GMQvwWVUmwLs3ygivz1JSLI323hdIE9J9m7TY= +github.com/hashicorp/go-hclog v1.6.3 h1:Qr2kF+eVWjTiYmU7Y31tYlP1h0q/X3Nl3tPGdaB11/k= +github.com/hashicorp/go-hclog v1.6.3/go.mod h1:W4Qnvbt70Wk/zYJryRzDRU/4r0kIg0PVHBcfoyhpF5M= github.com/hashicorp/go-multierror v1.1.1 h1:H5DkEtf6CXdFp0N0Em5UCwQpXMWke8IA0+lD48awMYo= github.com/hashicorp/go-multierror v1.1.1/go.mod h1:iw975J/qwKPdAO1clOe2L8331t/9/fmwbPZ6JB6eMoM= -github.com/hashicorp/go-retryablehttp v0.7.4 h1:ZQgVdpTdAL7WpMIwLzCfbalOcSUdkDZnpUv3/+BxzFA= -github.com/hashicorp/go-retryablehttp v0.7.4/go.mod h1:Jy/gPYAdjqffZ/yFGCFV2doI5wjtH1ewM9u8iYVjtX8= +github.com/hashicorp/go-retryablehttp v0.7.7 h1:C8hUCYzor8PIfXHa4UrZkU4VvK8o9ISHxT2Q8+VepXU= +github.com/hashicorp/go-retryablehttp v0.7.7/go.mod h1:pkQpWZeYWskR+D1tR2O5OcBFOxfA7DoAO6xtkuQnHTk= github.com/hashicorp/go-safetemp v1.0.0 h1:2HR189eFNrjHQyENnQMMpCiBAsRxzbTMIgBhEyExpmo= github.com/hashicorp/go-safetemp v1.0.0/go.mod h1:oaerMy3BhqiTbVye6QuFhFtIceqFoDHxNAB65b+Rj1I= -github.com/hashicorp/go-version v1.6.0 h1:feTTfFNnjP967rlCxM/I9g701jU+RN74YKx2mOkIeek= -github.com/hashicorp/go-version v1.6.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= +github.com/hashicorp/go-version v1.7.0 h1:5tqGy27NaOTB8yJKUZELlFAS/LTKJkrmONwQKeRZfjY= +github.com/hashicorp/go-version v1.7.0/go.mod h1:fltr4n8CU8Ke44wwGCBoEymUuxUHl09ZGVZPK5anwXA= github.com/hashicorp/golang-lru v0.5.0/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru v0.5.1/go.mod h1:/m3WP610KZHVQ1SGc6re/UDhFvYD7pJ4Ao+sR/qLZy8= github.com/hashicorp/golang-lru/v2 v2.0.7 h1:a+bsQ5rvGLjzHuww6tVxozPZFVghXaHOwFs4luLUK2k= github.com/hashicorp/golang-lru/v2 v2.0.7/go.mod h1:QeFd9opnmA6QUJc5vARoKUSoFhyfM2/ZepoAG6RGpeM= +github.com/hashicorp/hc-install v0.7.1-0.20240607080111-03e0bd63529f h1:vRnx/KymAhdkO7+m1JgBWnf4B4C3Ex9AYXg3jOlll+8= +github.com/hashicorp/hc-install v0.7.1-0.20240607080111-03e0bd63529f/go.mod h1:wGjPKFtnckoLzR4SyYDx0fDId27R6Mb4F5xiVzHUBjU= github.com/hashicorp/hcl v1.0.0 h1:0Anlzjpi4vEasTeNFn2mLJgTSwt0+6sfsiTG8qcWGx4= github.com/hashicorp/hcl v1.0.0/go.mod h1:E5yfLk+7swimpb2L/Alb/PJmXilQ/rhwaUYs4T20WEQ= -github.com/hashicorp/hcl/v2 v2.20.0 h1:l++cRs/5jQOiKVvqXZm/P1ZEfVXJmvLS9WSVxkaeTb4= -github.com/hashicorp/hcl/v2 v2.20.0/go.mod h1:WmcD/Ym72MDOOx5F62Ly+leloeu6H7m0pG7VBiU6pQk= -github.com/hashicorp/terraform-config-inspect v0.0.0-20231204233900-a34142ec2a72 h1:nZ5gGjbe5o7XUu1d7j+Y5Ztcxlp+yaumTKH9i0D3wlg= -github.com/hashicorp/terraform-config-inspect v0.0.0-20231204233900-a34142ec2a72/go.mod h1:l8HcFPm9cQh6Q0KSWoYPiePqMvRFenybP1CH2MjKdlg= +github.com/hashicorp/hcl/v2 v2.20.1 h1:M6hgdyz7HYt1UN9e61j+qKJBqR3orTWbI1HKBJEdxtc= +github.com/hashicorp/hcl/v2 v2.20.1/go.mod h1:TZDqQ4kNKCbh1iJp99FdPiUaVDDUPivbqxZulxDYqL4= +github.com/hashicorp/terraform-config-inspect v0.0.0-20240607080351-271db412dbcb h1:6gCfY5aQdQgRr0G5VDjnV5ENpd+hTamWaZfVz+lJ724= +github.com/hashicorp/terraform-config-inspect v0.0.0-20240607080351-271db412dbcb/go.mod h1:Gz/z9Hbn+4KSp8A2FBtNszfLSdT2Tn/uAKGuVqqWmDI= github.com/huandu/xstrings v1.3.3/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= github.com/huandu/xstrings v1.4.0 h1:D17IlohoQq4UcpqD7fDk80P7l+lwAmlFaBHgOipl2FU= github.com/huandu/xstrings v1.4.0/go.mod h1:y5/lhBue+AyNmUVz9RLU9xbLR0o4KIIExikq4ovT0aE= @@ -257,7 +279,10 @@ github.com/imdario/mergo v0.3.16 h1:wwQJbIsHYGMUyLSPrEq1CT16AhnhNJQ51+4fdHUnCl4= github.com/imdario/mergo v0.3.16/go.mod h1:WBLT9ZmE3lPoWsEzCh9LPo3TiwVN+ZKEjmz+hD27ysY= github.com/inconshreveable/mousetrap v1.1.0 h1:wN+x4NVGpMsO7ErUn/mUI3vEoE6Jt13X2s0bqwp9tc8= github.com/inconshreveable/mousetrap v1.1.0/go.mod h1:vpF70FUmC8bwa3OWnCshd2FqLfsEA9PFc4w1p2J65bw= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99 h1:BQSFePA1RWJOlocH6Fxy8MmwDt+yVQYULKfN0RoTN8A= +github.com/jbenet/go-context v0.0.0-20150711004518-d14ea06fba99/go.mod h1:1lJo3i6rXxKeerYnT8Nvf0QmHCRC1n8sfWVwXF2Frvo= github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI= +github.com/jpillora/backoff v1.0.0 h1:uvFg412JmmHBHw7iwprIxkPMI+sGQ4kzOWsMeHnm2EA= github.com/jpillora/backoff v1.0.0/go.mod h1:J/6gKK9jxlEcS3zixgDgUAsiuZ7yrSoa/FX5e0EB2j4= github.com/json-iterator/go v1.1.6/go.mod h1:+SdeFBvtyEkXs7REEP0seUULqWtbJapLOCVDaaPEHmU= github.com/json-iterator/go v1.1.10/go.mod h1:KdQUCv79m/52Kvf8AW2vK1V8akMuk1QjK/uOdHXbAo4= @@ -267,6 +292,8 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1 github.com/jstemmer/go-junit-report v0.9.1/go.mod h1:Brl9GWCQeLvo8nXZwPNNblvFj/XSXhF0NWZEnDohbsk= github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w= github.com/julienschmidt/httprouter v1.3.0/go.mod h1:JR6WtHb+2LUe8TCKY3cZOxFyyO8IZAc4RVcycCCAKdM= +github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4= +github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM= github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck= github.com/klauspost/compress v1.17.0 h1:Rnbp4K9EjcDuVuHtd0dgA4qNuv9yKDYKK1ulpJwgrqM= github.com/klauspost/compress v1.17.0/go.mod h1:ntbaceVETuRiXiv4DpjP66DpAtAGkEQskQzEyD//IeE= @@ -280,8 +307,6 @@ github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ= github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI= github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY= github.com/kr/text v0.2.0/go.mod h1:eLer722TekiGuMkidMxC/pM04lWEeraHUUmBw8l2grE= -github.com/kylelemons/godebug v1.1.0 h1:RPNrshWIDI6G2gRW9EHilWtl7Z6Sb1BR0xunSBf0SNc= -github.com/kylelemons/godebug v1.1.0/go.mod h1:9/0rRGxNHcop5bhtWyNeEfOS8JIWk580+fNqagV/RAw= github.com/leodido/go-urn v1.4.0 h1:WT9HwE9SGECu3lg4d/dIA+jxlljEa1/ffXKmRjqdmIQ= github.com/leodido/go-urn v1.4.0/go.mod h1:bvxc+MVxLKB4z00jd1z+Dvzr47oO32F/QSNjSBOlFxI= github.com/magiconair/properties v1.8.7 h1:IeQXZAiQcpL9mgcAe1Nu6cX9LLw6ExEHKjN0VQdvPDY= @@ -289,8 +314,8 @@ github.com/magiconair/properties v1.8.7/go.mod h1:Dhd985XPs7jluiymwWYZ0G4Z61jb3v github.com/mattn/go-colorable v0.1.13 h1:fFA4WZxdEF4tXPZVKMLwD8oUnCTTo08duU7wxecdEvA= github.com/mattn/go-colorable v0.1.13/go.mod h1:7S9/ev0klgBDR4GtXTXX8a3vIGJpMovkB8vQcUbaXHg= github.com/mattn/go-isatty v0.0.16/go.mod h1:kYGgaQfpe5nmfYZH+SKPsOc2e4SrIfOl2e/yFXSvRLM= -github.com/mattn/go-isatty v0.0.19 h1:JITubQf0MOLdlGRuRq+jtsDlekdYPia9ZFsB8h/APPA= -github.com/mattn/go-isatty v0.0.19/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= +github.com/mattn/go-isatty v0.0.20 h1:xfD0iDuEKnDkl03q4limB+vH+GxLEtL/jb4xVJSWWEY= +github.com/mattn/go-isatty v0.0.20/go.mod h1:W+V8PltTTMOvKvAeJH7IuucS94S2C6jfK/D7dTCTo3Y= github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0= github.com/matttproud/golang_protobuf_extensions v1.0.4 h1:mmDVorXM7PCGKw94cs5zkfA9PSy5pEvNWRP0ET0TIVo= github.com/matttproud/golang_protobuf_extensions v1.0.4/go.mod h1:BSXmuO+STAnVfrANrmjBb36TMTDstsz7MSK+HVaYKv4= @@ -333,6 +358,8 @@ github.com/pelletier/go-toml/v2 v2.1.0 h1:FnwAJ4oYMvbT/34k9zzHuZNrhlz48GB3/s6at6 github.com/pelletier/go-toml/v2 v2.1.0/go.mod h1:tJU2Z3ZkXwnxa4DPO899bsyIoywizdUvyaeZurnPPDc= github.com/petergtz/pegomock/v4 v4.0.0 h1:BIGMUof4NXc+xBbuFk0VBfK5Ls7DplcP+LWz4hfYWsY= github.com/petergtz/pegomock/v4 v4.0.0/go.mod h1:Xscaw/kXYcuh9sGsns+If19FnSMMQy4Wz60YJTn3XOU= +github.com/pjbgf/sha1cd v0.3.0 h1:4D5XXmUUBUl/xQ6IjCkEAbqXskkq/4O7LmGn0AqMDs4= +github.com/pjbgf/sha1cd v0.3.0/go.mod h1:nZ1rrWOcGJ5uZgEEVL1VUM9iRQiZvWdbZjkKyFzPPsI= github.com/pkg/diff v0.0.0-20210226163009-20ebb0f2a09e/go.mod h1:pJLUxLENpZxwdsKMEsNbx1VGcRFpLqf3715MtcvvzbA= github.com/pkg/errors v0.8.0/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= github.com/pkg/errors v0.8.1/go.mod h1:bwawxfHBFNV+L2hUp1rHADufV3IMtnDRdf1r5NINEl0= @@ -365,8 +392,8 @@ github.com/prometheus/procfs v0.6.0/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1 github.com/prometheus/procfs v0.7.3/go.mod h1:cz+aTbrPOrUb4q7XlbU9ygM+/jj0fzG6c1xBZuNvfVA= github.com/prometheus/procfs v0.10.1 h1:kYK1Va/YMlutzCGazswoHKo//tZVlFpKYh+PymziUAg= github.com/prometheus/procfs v0.10.1/go.mod h1:nwNm2aOCAYw8uTR/9bWRREkZFxAUcWzPHWJq+XBB/FM= -github.com/redis/go-redis/v9 v9.5.1 h1:H1X4D3yHPaYrkL5X06Wh6xNVM/pX0Ft4RV0vMGvLBh8= -github.com/redis/go-redis/v9 v9.5.1/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M= +github.com/redis/go-redis/v9 v9.5.2 h1:L0L3fcSNReTRGyZ6AqAEN0K56wYeYAwapBIhkvh0f3E= +github.com/redis/go-redis/v9 v9.5.2/go.mod h1:hdY0cQFCN4fnSYT6TkisLufl/4W5UIXyv0b/CLO2V2M= github.com/remeh/sizedwaitgroup v1.0.0 h1:VNGGFwNo/R5+MJBf6yrsr110p0m4/OX4S3DCy7Kyl5E= github.com/remeh/sizedwaitgroup v1.0.0/go.mod h1:3j2R4OIe/SeS6YDhICBy22RWjJC5eNCJ1V+9+NVNYlo= github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4= @@ -379,18 +406,20 @@ github.com/sagikazarmark/slog-shim v0.1.0 h1:diDBnUNK9N/354PgrxMywXnAwEr1QZcOr6g github.com/sagikazarmark/slog-shim v0.1.0/go.mod h1:SrcSrq8aKtyuqEI1uvTDTK1arOWRIczQRv+GVI1AkeQ= github.com/samber/lo v1.38.1 h1:j2XEAqXKb09Am4ebOg31SpvzUTTs6EN3VfgeLUhPdXM= github.com/samber/lo v1.38.1/go.mod h1:+m/ZKRl6ClXCE2Lgf3MsQlWfh4bn1bz6CXEOxnEXnEA= -github.com/sergi/go-diff v1.0.0 h1:Kpca3qRNrduNnOQeazBd0ysaKrUJiIuISHxogkT9RPQ= -github.com/sergi/go-diff v1.0.0/go.mod h1:0CfEIISq7TuYL3j771MWULgwwjU+GofnZX9QAmXWZgo= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3 h1:n661drycOFuPLCN3Uc8sB6B/s6Z4t2xvBgU1htSHuq8= +github.com/sergi/go-diff v1.3.2-0.20230802210424-5b0b94c5c0d3/go.mod h1:A0bzQcvG0E7Rwjx0REVgAGH58e96+X0MeOfepqsbeW4= github.com/shopspring/decimal v1.2.0/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= github.com/shopspring/decimal v1.3.1 h1:2Usl1nmF/WZucqkFZhnfFYxxxu8LG21F6nPQBE5gKV8= github.com/shopspring/decimal v1.3.1/go.mod h1:DKyhrW/HYNuLGql+MJL6WCR6knT2jwCFRcu2hWCYk4o= -github.com/shurcooL/githubv4 v0.0.0-20240120211514-18a1ae0e79dc h1:vH0NQbIDk+mJLvBliNGfcQgUmhlniWBDXC79oRxfZA0= -github.com/shurcooL/githubv4 v0.0.0-20240120211514-18a1ae0e79dc/go.mod h1:zqMwyHmnN/eDOZOdiTohqIUKUrTFX62PNlu7IJdu0q8= +github.com/shurcooL/githubv4 v0.0.0-20240429030203-be2daab69064 h1:RCQBSFx5JrsbHltqTtJ+kN3U0Y3a/N/GlVdmRSoxzyE= +github.com/shurcooL/githubv4 v0.0.0-20240429030203-be2daab69064/go.mod h1:zqMwyHmnN/eDOZOdiTohqIUKUrTFX62PNlu7IJdu0q8= github.com/shurcooL/graphql v0.0.0-20220606043923-3cf50f8a0a29 h1:B1PEwpArrNp4dkQrfxh/abbBAOZBVp0ds+fBEOUOqOc= github.com/shurcooL/graphql v0.0.0-20220606043923-3cf50f8a0a29/go.mod h1:AuYgA5Kyo4c7HfUmvRGs/6rGlMMV/6B1bVnB9JxJEEg= github.com/sirupsen/logrus v1.2.0/go.mod h1:LxeOpSwHxABJmUn/MG1IvRgCAasNZTLOkJPxbbu5VWo= github.com/sirupsen/logrus v1.4.2/go.mod h1:tLMulIdttU9McNUspp0xgXVQah82FyeX6MwdIuYE2rE= github.com/sirupsen/logrus v1.6.0/go.mod h1:7uNnSEd1DgxDLC74fIahvMZmmYsHGZGEOFrfsX/uA88= +github.com/skeema/knownhosts v1.2.2 h1:Iug2P4fLmDw9f41PB6thxUkNUkJzB5i+1/exaj40L3A= +github.com/skeema/knownhosts v1.2.2/go.mod h1:xYbVRSPxqBZFrdmDyMmsOs+uX1UZC3nTN3ThzgDxUwo= github.com/slack-go/slack v0.12.5 h1:ddZ6uz6XVaB+3MTDhoW04gG+Vc/M/X1ctC+wssy2cqs= github.com/slack-go/slack v0.12.5/go.mod h1:hlGi5oXA+Gt+yWTPP0plCdRKmjsDxecdHxYQdlMQKOw= github.com/sourcegraph/conc v0.3.0 h1:OQTbbt6P72L20UqAkXXuLOj79LfEanQ+YQFNpLA9ySo= @@ -430,20 +459,22 @@ github.com/ulikunitz/xz v0.5.11 h1:kpFauv27b6ynzBNT/Xy+1k+fK4WswhN/6PN5WhFAGw8= github.com/ulikunitz/xz v0.5.11/go.mod h1:nbz6k7qbPmH4IRqmfOplQw/tblSgqTqBwxkY0oWt/14= github.com/urfave/negroni/v3 v3.1.0 h1:lzmuxGSpnJCT/ujgIAjkU3+LW3NX8alCglO/L6KjIGQ= github.com/urfave/negroni/v3 v3.1.0/go.mod h1:jWvnX03kcSjDBl/ShB0iHvx5uOs7mAzZXW+JvJ5XYAs= -github.com/warrensbox/terraform-switcher v0.1.1-0.20230206012955-d7dfd1b44605 h1:bRt3KvPapqnO3s9XenyU4COpU9X7cNW3BMELyHRxuSs= -github.com/warrensbox/terraform-switcher v0.1.1-0.20230206012955-d7dfd1b44605/go.mod h1:saryXNaL624mlulV138FP+HhVw7IpvETUXLS3nTvH1g= -github.com/xanzy/go-gitlab v0.99.0 h1:0W5dmFQejPlqnScZoGRXNPmx+evOxBMk50P40cxlnWU= -github.com/xanzy/go-gitlab v0.99.0/go.mod h1:ETg8tcj4OhrB84UEgeE8dSuV/0h4BBL1uOV/qK0vlyI= +github.com/xanzy/go-gitlab v0.102.0 h1:ExHuJ1OTQ2yt25zBMMj0G96ChBirGYv8U7HyUiYkZ+4= +github.com/xanzy/go-gitlab v0.102.0/go.mod h1:ETg8tcj4OhrB84UEgeE8dSuV/0h4BBL1uOV/qK0vlyI= +github.com/xanzy/ssh-agent v0.3.3 h1:+/15pJfg/RsTxqYcX6fHqOXZwwMP+2VyYWJeWM2qQFM= +github.com/xanzy/ssh-agent v0.3.3/go.mod h1:6dzNDKs0J9rVPHPhaGCukekBHKqfl+L3KghI1Bc68Uw= github.com/yuin/goldmark v1.1.25/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.27/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.1.32/go.mod h1:3hX8gzYuyVAZsxl0MRgGTJEmQBFcNTphYh9decYSb74= github.com/yuin/goldmark v1.4.13/go.mod h1:6yULJ656Px+3vBD8DxQVa3kxgyrAnzto9xy5taEt/CY= -github.com/yuin/gopher-lua v1.1.0 h1:BojcDhfyDWgU2f2TOzYK/g5p2gxMrku8oupLDqlnSqE= -github.com/yuin/gopher-lua v1.1.0/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= -github.com/zclconf/go-cty v1.13.2 h1:4GvrUxe/QUDYuJKAav4EYqdM47/kZa672LwmXFmEKT0= -github.com/zclconf/go-cty v1.13.2/go.mod h1:YKQzy/7pZ7iq2jNFzy5go57xdxdWoLLpaEp4u238AE0= -go.etcd.io/bbolt v1.3.9 h1:8x7aARPEXiXbHmtUwAIv7eV2fQFHrLLavdiJ3uzJXoI= -go.etcd.io/bbolt v1.3.9/go.mod h1:zaO32+Ti0PK1ivdPtgMESzuzL2VPoIG1PCQNvOdo/dE= +github.com/yuin/gopher-lua v1.1.1 h1:kYKnWBjvbNP4XLT3+bPEwAXJx262OhaHDWDVOPjL46M= +github.com/yuin/gopher-lua v1.1.1/go.mod h1:GBR0iDaNXjAgGg9zfCvksxSRnQx76gclCIb7kdAd1Pw= +github.com/zclconf/go-cty v1.14.4 h1:uXXczd9QDGsgu0i/QFR/hzI5NYCHLf6NQw/atrbnhq8= +github.com/zclconf/go-cty v1.14.4/go.mod h1:VvMs5i0vgZdhYawQNq5kePSpLAoz8u1xvZgrPIxfnZE= +github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b h1:FosyBZYxY34Wul7O/MSKey3txpPYyCqVO5ZyceuQJEI= +github.com/zclconf/go-cty-debug v0.0.0-20191215020915-b22d67c1ba0b/go.mod h1:ZRKQfBXbGkpdV6QMzT3rU1kSTAnfu1dO8dPKjYprgj8= +go.etcd.io/bbolt v1.3.10 h1:+BqfJTcCzTItrop8mq/lbzL8wSGtj94UO/3U31shqG0= +go.etcd.io/bbolt v1.3.10/go.mod h1:bK3UQLPJZly7IlNmV7uVHJDxfe5aK9Ll93e/74Y9oEQ= go.opencensus.io v0.21.0/go.mod h1:mSImk1erAIZhrmZN+AvHh14ztQfjbGwt4TtuofqLduU= go.opencensus.io v0.22.0/go.mod h1:+kGneAE2xo2IficOXnaByMWTGM9T73dGwxeWcUqIpI8= go.opencensus.io v0.22.2/go.mod h1:yxeiOL68Rb0Xd1ddK5vPZ/oVn4vY4Ynel7k9FzqtOIw= @@ -466,10 +497,11 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI= golang.org/x/crypto v0.0.0-20200403201458-baeed622b8d8/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto= +golang.org/x/crypto v0.0.0-20210513164829-c07d793c2f9a/go.mod h1:P+XmwS30IXTQdn5tA2iutPOUgjI07+tq3H3K9MVA1s8= golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc= golang.org/x/crypto v0.3.0/go.mod h1:hebNnKkNXi2UzZN1eVRvBB7co0a+JxK6XbPiWVs/3J4= -golang.org/x/crypto v0.19.0 h1:ENy+Az/9Y1vSrlrvBSyna3PITt4tiZLf7sgCjZBX7Wo= -golang.org/x/crypto v0.19.0/go.mod h1:Iy9bg/ha4yyC70EfRS8jz+B6ybOBKMaSxLj6P6oBDfU= +golang.org/x/crypto v0.23.0 h1:dIJU/v2J8Mdglj/8rJ6UUOM3Zc9zLZxVZwwxMooUSAI= +golang.org/x/crypto v0.23.0/go.mod h1:CKFgDieR+mRhux2Lsu27y0fO304Db0wZe70UKqHu0v8= golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA= golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8= @@ -480,8 +512,8 @@ golang.org/x/exp v0.0.0-20191227195350-da58074b4299/go.mod h1:2RIsYlXP63K8oxa1u0 golang.org/x/exp v0.0.0-20200119233911-0405dc783f0a/go.mod h1:2RIsYlXP63K8oxa1u096TMicItID8zy7Y6sNkU49FU4= golang.org/x/exp v0.0.0-20200207192155-f17229e696bd/go.mod h1:J/WKrq2StrnmMY6+EHIKF9dgMWnmCNThgcyBT1FY9mM= golang.org/x/exp v0.0.0-20200224162631-6cc2880d07d6/go.mod h1:3jZMyOhIsHpP37uCMkUooju7aAi5cS1Q23tOzKc+0MU= -golang.org/x/exp v0.0.0-20230905200255-921286631fa9 h1:GoHiUyI/Tp2nVkLI2mCxVkOjsbSXD66ic0XW0js0R9g= -golang.org/x/exp v0.0.0-20230905200255-921286631fa9/go.mod h1:S2oDrQGGwySpoQPVqRShND87VCbxmc6bL1Yd2oYrm6k= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d h1:jtJma62tbqLibJ5sFQz8bKtEM8rJBtfilJ2qTU199MI= +golang.org/x/exp v0.0.0-20231006140011-7918f672742d/go.mod h1:ldy0pHrwJyGW56pPQzzkH36rKxoZW1tw7ZJpeKx+hdo= golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js= golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0= golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE= @@ -503,8 +535,8 @@ golang.org/x/mod v0.1.1-0.20191107180719-034126e5016b/go.mod h1:QqPTAvyqsEbceGzB golang.org/x/mod v0.2.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.3.0/go.mod h1:s0Qsj1ACt9ePp/hMypM3fl4fZqREWJwdYDEqhRiZZUA= golang.org/x/mod v0.6.0-dev.0.20220419223038-86c51ed26bb4/go.mod h1:jJ57K6gSWd91VN4djpZkiMVwK6gcyfeH4XE8wZrZaV4= -golang.org/x/mod v0.12.0 h1:rmsUpXtvNzj340zd98LZ4KntptpfRHwpFOHG188oHXc= -golang.org/x/mod v0.12.0/go.mod h1:iBbtSCu2XBx23ZKBPSOrRkjjQPZFPuis4dIYUhu/chs= +golang.org/x/mod v0.18.0 h1:5+9lSbEzPSdWkH32vYPBwEpX8KwDbM52Ud9xBUvNlb0= +golang.org/x/mod v0.18.0/go.mod h1:hTbmBsO62+eylJbnUtE2MGJUyE7QWk4xUqPFrRgJ+7c= golang.org/x/net v0.0.0-20180724234803-3673e40ba225/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20180826012351-8a410e7b638d/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= golang.org/x/net v0.0.0-20181114220301-adae6a3d119a/go.mod h1:mL1N/T3taQHkDXs73rZJwtUhF3w3ftmwwsq0BUmARs4= @@ -539,8 +571,8 @@ golang.org/x/net v0.0.0-20220127200216-cd36cc0744dd/go.mod h1:CfG3xpIq0wQ8r1q4Su golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su4UZFWDARRcnwPjda9FqA0JpMk= golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c= golang.org/x/net v0.2.0/go.mod h1:KqCZLdyyvdV855qA2rE3GC2aiw5xGR5TEjj8smXukLY= -golang.org/x/net v0.21.0 h1:AQyQV4dYCvJ7vGmJyKki9+PBdyvhkSd8EIx/qb0AYv4= -golang.org/x/net v0.21.0/go.mod h1:bIjVDfnllIU7BJ2DNgfnXvpSvtn8VRwhlsaeUTyUS44= +golang.org/x/net v0.25.0 h1:d/OCCoBEUq33pjydKrGQhw7IlUPI2Oylr+8qLx49kac= +golang.org/x/net v0.25.0/go.mod h1:JkAGAh7GEvH74S6FOH42FLoXpXbE/aqXSrIQjXgsiwM= golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U= golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= golang.org/x/oauth2 v0.0.0-20190402181905-9f3314589c9a/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw= @@ -561,8 +593,8 @@ golang.org/x/sync v0.0.0-20200317015054-43a5402ce75a/go.mod h1:RxMgew5VJxzue5/jJ golang.org/x/sync v0.0.0-20200625203802-6e8e738ad208/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20201207232520-09787c993a3a/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= golang.org/x/sync v0.0.0-20220722155255-886fb9371eb4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM= -golang.org/x/sync v0.5.0 h1:60k92dhOjHxJkrqnwsfl8KuaHbn/5dl0lUPUklKo3qE= -golang.org/x/sync v0.5.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= +golang.org/x/sync v0.7.0 h1:YsImfSBoP9QPYL0xyKJPq0gcaJdG3rInoqxTWbfQu9M= +golang.org/x/sync v0.7.0/go.mod h1:Czt+wKu1gCyEFDUtn0jG5QVvpJ6rzVqr5aXyt9drQfk= golang.org/x/sys v0.0.0-20180830151530-49385e6e1522/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20180905080454-ebe1bf3edb33/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= golang.org/x/sys v0.0.0-20181116152217-5ac8a444bdc5/go.mod h1:STP8DvDyc/dI5b8T5hshtkjS+E42TnysNCUPdjciGhY= @@ -607,13 +639,13 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg= -golang.org/x/sys v0.18.0 h1:DBdB3niSjOA/O0blCZBqDefyWNYveAYMNF1Wum0DYQ4= -golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= +golang.org/x/sys v0.20.0 h1:Od9JTbYCk261bKm4M/mw7AklTlFYIa0bIp9BgSm1S8Y= +golang.org/x/sys v0.20.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA= golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo= golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8= golang.org/x/term v0.2.0/go.mod h1:TVmDHMZPmdnySmBfhjOoOdhjzdE1h4u1VwSiw2l1Nuc= -golang.org/x/term v0.18.0 h1:FcHjZXDMxI8mM3nwhX9HlKop4C0YQvCVCdwYl2wOtE8= -golang.org/x/term v0.18.0/go.mod h1:ILwASektA3OnRv7amZ1xhE/KTR+u50pbXfZ03+6Nx58= +golang.org/x/term v0.20.0 h1:VnkxpohqXaOBYJtBmEppKUG6mXpi+4O6purfc2+sMhw= +golang.org/x/term v0.20.0/go.mod h1:8UkIAJTvZgivsXaD6/pH6U9ecQzZ45awqEOzuCvwpFY= golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ= @@ -622,8 +654,8 @@ golang.org/x/text v0.3.3/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.6/go.mod h1:5Zoc/QRtKVWzQhOtBMvqHzDpF6irO9z98xDceosuGiQ= golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ= golang.org/x/text v0.4.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8= -golang.org/x/text v0.14.0 h1:ScX5w1eTa3QqT8oi6+ziP7dTV1S2+ALU0bI+0zXKWiQ= -golang.org/x/text v0.14.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= +golang.org/x/text v0.15.0 h1:h1V/4gjBv8v9cjcR6+AR5+/cIYK5N/WAgiv4xlsEtAk= +golang.org/x/text v0.15.0/go.mod h1:18ZOQIKpY8NJVqYksKHtTdi31H5itFRjB5/qKTNYzSU= golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ= @@ -670,8 +702,8 @@ golang.org/x/tools v0.0.0-20200729194436-6467de6f59a7/go.mod h1:njjCfa9FT2d7l9Bc golang.org/x/tools v0.0.0-20200804011535-6c149bb5ef0d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.0.0-20200825202427-b303f430e36d/go.mod h1:njjCfa9FT2d7l9Bc6FUM5FLjQPp3cFF28FI3qnDFljA= golang.org/x/tools v0.1.12/go.mod h1:hNGJHUnrk76NpqgfD5Aqm5Crs+Hm0VOH/i9J2+nxYbc= -golang.org/x/tools v0.13.0 h1:Iey4qkscZuv0VvIt8E0neZjtPVQFSc870HQ448QgEmQ= -golang.org/x/tools v0.13.0/go.mod h1:HvlwmtVNQAhOuCjW7xxvovg8wbNq7LwfXh/k7wXUl58= +golang.org/x/tools v0.21.0 h1:qc0xYgIbsSDt9EyWz05J5wfa7LOVW0YTLOXrqdLAWIw= +golang.org/x/tools v0.21.0/go.mod h1:aiJjzUbINMkxbQROHiO6hDPo2LHcIPhhQsa9DLh0yGk= golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191011141410-1b5146add898/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= golang.org/x/xerrors v0.0.0-20191204190536-9bdfabe68543/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0= @@ -753,8 +785,8 @@ google.golang.org/protobuf v1.24.0/go.mod h1:r/3tXBNzIEhYS9I1OUVjXDlt8tc493IdKGj google.golang.org/protobuf v1.25.0/go.mod h1:9JNX74DMeImyA3h4bdi1ymwjUzf21/xIlbajtzgsN7c= google.golang.org/protobuf v1.26.0-rc.1/go.mod h1:jlhhOSvTdKEhbULTjvd4ARK9grFBp09yW+WbY/TyQbw= google.golang.org/protobuf v1.26.0/go.mod h1:9q0QmTI4eRPtz6boOQmLYwt+qCgq0jsYwAQnmE0givc= -google.golang.org/protobuf v1.31.0 h1:g0LDEJHgrBl9N9r17Ru3sqWhkIx2NB67okBHPwC7hs8= -google.golang.org/protobuf v1.31.0/go.mod h1:HV8QOd/L58Z+nl8r43ehVNZIU/HEI6OcFqwMG9pJV4I= +google.golang.org/protobuf v1.33.0 h1:uNO2rsAINq/JlFpSdYEKIZ0uKD/R9cpdv0T+yoGwGmI= +google.golang.org/protobuf v1.33.0/go.mod h1:c6P6GXX6sHbq/GpV6MGZEdwhWPcYBgnhAHhKbcUYpos= gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw= gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0= @@ -764,6 +796,8 @@ gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI= gopkg.in/ini.v1 v1.67.0 h1:Dgnx+6+nfE+IfzjUEISNeydPJh9AXNNsWbGP9KzCsOA= gopkg.in/ini.v1 v1.67.0/go.mod h1:pNLf8WUiyNEtQjuu5G5vTm06TEv9tsIgeAvK8hOrP4k= gopkg.in/validator.v2 v2.0.0-20200605151824-2b28d334fa05/go.mod h1:o4V0GXN9/CAmCsvJ0oXYZvrZOe7syiDZSN1GWGZTGzc= +gopkg.in/warnings.v0 v0.1.2 h1:wFXVbFY8DY5/xOe1ECiWdKCzZlxgshcYVNkBHstARME= +gopkg.in/warnings.v0 v0.1.2/go.mod h1:jksf8JmL6Qr/oQM2OXTHunEvvTAsrWBLb6OOjuVWRNI= gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.2/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI= diff --git a/goss.yaml b/goss.yaml new file mode 100644 index 0000000000..efb4796f42 --- /dev/null +++ b/goss.yaml @@ -0,0 +1,37 @@ +# See: https://github.com/goss-org/goss/blob/master/docs/gossfile.md + +command: + # ensure atlantis is available + atlantis-available: + exec: "atlantis version" + exit-status: 0 + stdout: [] + stderr: [] + + # ensure conftest is available + conftest-available: + exec: "conftest -v" + exit-status: 0 + stdout: [] + stderr: [] + + # ensure git-lfs is available + git-lfs-available: + exec: "git-lfs -v" + exit-status: 0 + stdout: [] + stderr: [] + + # ensure terraform is available + terraform-available: + exec: "terraform version" + exit-status: 0 + stdout: [] + stderr: [] + + # ensure tofu binary is available + tofu-available: + exec: "tofu version" + exit-status: 0 + stdout: [] + stderr: [] diff --git a/netlify.toml b/netlify.toml new file mode 100644 index 0000000000..f32bbf2065 --- /dev/null +++ b/netlify.toml @@ -0,0 +1,32 @@ +# Netlify Config, https://www.netlify.com/docs/netlify-toml-reference/ +[build] +base = "/" +command = "npm install && npm run website:build" +publish = "runatlantis.io/.vitepress/dist/" + +[[redirects]] +force = true +from = "/guide/getting-started.html" +status = 301 +to = "/guide/" + +[[redirects]] +force = true +from = "/docs/atlantis-yaml-reference.html" +status = 301 +to = "/docs/repo-level-atlantis-yaml.html" + +[[headers]] +for = "/*" +[headers.values] +Cache-Control = "public, max-age=86400" +Referrer-Policy = "no-referrer" +Strict-Transport-Security = "max-age=86400; includeSubDomains; preload" +X-Content-Type-Options = "nosniff" +X-Frame-Options = "DENY" +X-XSS-Protection = "1; mode=block" + +[[headers]] +for = "*.html" +[headers.values] +Content-Type = "text/html; charset=UTF-8" diff --git a/package-lock.json b/package-lock.json new file mode 100644 index 0000000000..61570407ce --- /dev/null +++ b/package-lock.json @@ -0,0 +1,4114 @@ +{ + "name": "atlantis", + "lockfileVersion": 3, + "requires": true, + "packages": { + "": { + "license": "Apache-2.0", + "devDependencies": { + "@playwright/test": "^1.44.0", + "@types/node": "^20.12.12", + "@vueuse/core": "^10.9.0", + "markdown-it-footnote": "^4.0.0", + "markdownlint-cli": "^0.40.0", + "mermaid": "^10.9.1", + "sitemap-ts": "^1.7.3", + "vitepress": "^1.2.3", + "vitepress-plugin-mermaid": "^2.0.16", + "vue": "^3.4.27" + } + }, + "node_modules/@algolia/autocomplete-core": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-core/-/autocomplete-core-1.9.3.tgz", + "integrity": "sha512-009HdfugtGCdC4JdXUbVJClA0q0zh24yyePn+KUGk3rP7j8FEe/m5Yo/z65gn6nP/cM39PxpzqKrL7A6fP6PPw==", + "dev": true, + "dependencies": { + "@algolia/autocomplete-plugin-algolia-insights": "1.9.3", + "@algolia/autocomplete-shared": "1.9.3" + } + }, + "node_modules/@algolia/autocomplete-plugin-algolia-insights": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-plugin-algolia-insights/-/autocomplete-plugin-algolia-insights-1.9.3.tgz", + "integrity": "sha512-a/yTUkcO/Vyy+JffmAnTWbr4/90cLzw+CC3bRbhnULr/EM0fGNvM13oQQ14f2moLMcVDyAx/leczLlAOovhSZg==", + "dev": true, + "dependencies": { + "@algolia/autocomplete-shared": "1.9.3" + }, + "peerDependencies": { + "search-insights": ">= 1 < 3" + } + }, + "node_modules/@algolia/autocomplete-preset-algolia": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-preset-algolia/-/autocomplete-preset-algolia-1.9.3.tgz", + "integrity": "sha512-d4qlt6YmrLMYy95n5TB52wtNDr6EgAIPH81dvvvW8UmuWRgxEtY0NJiPwl/h95JtG2vmRM804M0DSwMCNZlzRA==", + "dev": true, + "dependencies": { + "@algolia/autocomplete-shared": "1.9.3" + }, + "peerDependencies": { + "@algolia/client-search": ">= 4.9.1 < 6", + "algoliasearch": ">= 4.9.1 < 6" + } + }, + "node_modules/@algolia/autocomplete-shared": { + "version": "1.9.3", + "resolved": "https://registry.npmjs.org/@algolia/autocomplete-shared/-/autocomplete-shared-1.9.3.tgz", + "integrity": "sha512-Wnm9E4Ye6Rl6sTTqjoymD+l8DjSTHsHboVRYrKgEt8Q7UHm9nYbqhN/i0fhUYA3OAEH7WA8x3jfpnmJm3rKvaQ==", + "dev": true, + "peerDependencies": { + "@algolia/client-search": ">= 4.9.1 < 6", + "algoliasearch": ">= 4.9.1 < 6" + } + }, + "node_modules/@algolia/cache-browser-local-storage": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/cache-browser-local-storage/-/cache-browser-local-storage-4.23.3.tgz", + "integrity": "sha512-vRHXYCpPlTDE7i6UOy2xE03zHF2C8MEFjPN2v7fRbqVpcOvAUQK81x3Kc21xyb5aSIpYCjWCZbYZuz8Glyzyyg==", + "dev": true, + "dependencies": { + "@algolia/cache-common": "4.23.3" + } + }, + "node_modules/@algolia/cache-common": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/cache-common/-/cache-common-4.23.3.tgz", + "integrity": "sha512-h9XcNI6lxYStaw32pHpB1TMm0RuxphF+Ik4o7tcQiodEdpKK+wKufY6QXtba7t3k8eseirEMVB83uFFF3Nu54A==", + "dev": true + }, + "node_modules/@algolia/cache-in-memory": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/cache-in-memory/-/cache-in-memory-4.23.3.tgz", + "integrity": "sha512-yvpbuUXg/+0rbcagxNT7un0eo3czx2Uf0y4eiR4z4SD7SiptwYTpbuS0IHxcLHG3lq22ukx1T6Kjtk/rT+mqNg==", + "dev": true, + "dependencies": { + "@algolia/cache-common": "4.23.3" + } + }, + "node_modules/@algolia/client-account": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-account/-/client-account-4.23.3.tgz", + "integrity": "sha512-hpa6S5d7iQmretHHF40QGq6hz0anWEHGlULcTIT9tbUssWUriN9AUXIFQ8Ei4w9azD0hc1rUok9/DeQQobhQMA==", + "dev": true, + "dependencies": { + "@algolia/client-common": "4.23.3", + "@algolia/client-search": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/@algolia/client-analytics": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-analytics/-/client-analytics-4.23.3.tgz", + "integrity": "sha512-LBsEARGS9cj8VkTAVEZphjxTjMVCci+zIIiRhpFun9jGDUlS1XmhCW7CTrnaWeIuCQS/2iPyRqSy1nXPjcBLRA==", + "dev": true, + "dependencies": { + "@algolia/client-common": "4.23.3", + "@algolia/client-search": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/@algolia/client-common": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-common/-/client-common-4.23.3.tgz", + "integrity": "sha512-l6EiPxdAlg8CYhroqS5ybfIczsGUIAC47slLPOMDeKSVXYG1n0qGiz4RjAHLw2aD0xzh2EXZ7aRguPfz7UKDKw==", + "dev": true, + "dependencies": { + "@algolia/requester-common": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/@algolia/client-personalization": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-personalization/-/client-personalization-4.23.3.tgz", + "integrity": "sha512-3E3yF3Ocr1tB/xOZiuC3doHQBQ2zu2MPTYZ0d4lpfWads2WTKG7ZzmGnsHmm63RflvDeLK/UVx7j2b3QuwKQ2g==", + "dev": true, + "dependencies": { + "@algolia/client-common": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/@algolia/client-search": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/client-search/-/client-search-4.23.3.tgz", + "integrity": "sha512-P4VAKFHqU0wx9O+q29Q8YVuaowaZ5EM77rxfmGnkHUJggh28useXQdopokgwMeYw2XUht49WX5RcTQ40rZIabw==", + "dev": true, + "dependencies": { + "@algolia/client-common": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/@algolia/logger-common": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/logger-common/-/logger-common-4.23.3.tgz", + "integrity": "sha512-y9kBtmJwiZ9ZZ+1Ek66P0M68mHQzKRxkW5kAAXYN/rdzgDN0d2COsViEFufxJ0pb45K4FRcfC7+33YB4BLrZ+g==", + "dev": true + }, + "node_modules/@algolia/logger-console": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/logger-console/-/logger-console-4.23.3.tgz", + "integrity": "sha512-8xoiseoWDKuCVnWP8jHthgaeobDLolh00KJAdMe9XPrWPuf1by732jSpgy2BlsLTaT9m32pHI8CRfrOqQzHv3A==", + "dev": true, + "dependencies": { + "@algolia/logger-common": "4.23.3" + } + }, + "node_modules/@algolia/recommend": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/recommend/-/recommend-4.23.3.tgz", + "integrity": "sha512-9fK4nXZF0bFkdcLBRDexsnGzVmu4TSYZqxdpgBW2tEyfuSSY54D4qSRkLmNkrrz4YFvdh2GM1gA8vSsnZPR73w==", + "dev": true, + "dependencies": { + "@algolia/cache-browser-local-storage": "4.23.3", + "@algolia/cache-common": "4.23.3", + "@algolia/cache-in-memory": "4.23.3", + "@algolia/client-common": "4.23.3", + "@algolia/client-search": "4.23.3", + "@algolia/logger-common": "4.23.3", + "@algolia/logger-console": "4.23.3", + "@algolia/requester-browser-xhr": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/requester-node-http": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/@algolia/requester-browser-xhr": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/requester-browser-xhr/-/requester-browser-xhr-4.23.3.tgz", + "integrity": "sha512-jDWGIQ96BhXbmONAQsasIpTYWslyjkiGu0Quydjlowe+ciqySpiDUrJHERIRfELE5+wFc7hc1Q5hqjGoV7yghw==", + "dev": true, + "dependencies": { + "@algolia/requester-common": "4.23.3" + } + }, + "node_modules/@algolia/requester-common": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/requester-common/-/requester-common-4.23.3.tgz", + "integrity": "sha512-xloIdr/bedtYEGcXCiF2muajyvRhwop4cMZo+K2qzNht0CMzlRkm8YsDdj5IaBhshqfgmBb3rTg4sL4/PpvLYw==", + "dev": true + }, + "node_modules/@algolia/requester-node-http": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/requester-node-http/-/requester-node-http-4.23.3.tgz", + "integrity": "sha512-zgu++8Uj03IWDEJM3fuNl34s746JnZOWn1Uz5taV1dFyJhVM/kTNw9Ik7YJWiUNHJQXcaD8IXD1eCb0nq/aByA==", + "dev": true, + "dependencies": { + "@algolia/requester-common": "4.23.3" + } + }, + "node_modules/@algolia/transporter": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/@algolia/transporter/-/transporter-4.23.3.tgz", + "integrity": "sha512-Wjl5gttqnf/gQKJA+dafnD0Y6Yw97yvfY8R9h0dQltX1GXTgNs1zWgvtWW0tHl1EgMdhAyw189uWiZMnL3QebQ==", + "dev": true, + "dependencies": { + "@algolia/cache-common": "4.23.3", + "@algolia/logger-common": "4.23.3", + "@algolia/requester-common": "4.23.3" + } + }, + "node_modules/@antfu/utils": { + "version": "0.7.8", + "resolved": "https://registry.npmjs.org/@antfu/utils/-/utils-0.7.8.tgz", + "integrity": "sha512-rWQkqXRESdjXtc+7NRfK9lASQjpXJu1ayp7qi1d23zZorY+wBHVLHHoVcMsEnkqEBWTFqbztO7/QdJFzyEcLTg==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/@babel/parser": { + "version": "7.24.7", + "resolved": "https://registry.npmjs.org/@babel/parser/-/parser-7.24.7.tgz", + "integrity": "sha512-9uUYRm6OqQrCqQdG1iCBwBPZgN8ciDBro2nIOFaiRz1/BCxaI7CNvQbDHvsArAC7Tw9Hda/B3U+6ui9u4HWXPw==", + "dev": true, + "bin": { + "parser": "bin/babel-parser.js" + }, + "engines": { + "node": ">=6.0.0" + } + }, + "node_modules/@braintree/sanitize-url": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/@braintree/sanitize-url/-/sanitize-url-6.0.4.tgz", + "integrity": "sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==", + "dev": true + }, + "node_modules/@docsearch/css": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@docsearch/css/-/css-3.6.0.tgz", + "integrity": "sha512-+sbxb71sWre+PwDK7X2T8+bhS6clcVMLwBPznX45Qu6opJcgRjAp7gYSDzVFp187J+feSj5dNBN1mJoi6ckkUQ==", + "dev": true + }, + "node_modules/@docsearch/js": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@docsearch/js/-/js-3.6.0.tgz", + "integrity": "sha512-QujhqINEElrkIfKwyyyTfbsfMAYCkylInLYMRqHy7PHc8xTBQCow73tlo/Kc7oIwBrCLf0P3YhjlOeV4v8hevQ==", + "dev": true, + "dependencies": { + "@docsearch/react": "3.6.0", + "preact": "^10.0.0" + } + }, + "node_modules/@docsearch/react": { + "version": "3.6.0", + "resolved": "https://registry.npmjs.org/@docsearch/react/-/react-3.6.0.tgz", + "integrity": "sha512-HUFut4ztcVNmqy9gp/wxNbC7pTOHhgVVkHVGCACTuLhUKUhKAF9KYHJtMiLUJxEqiFLQiuri1fWF8zqwM/cu1w==", + "dev": true, + "dependencies": { + "@algolia/autocomplete-core": "1.9.3", + "@algolia/autocomplete-preset-algolia": "1.9.3", + "@docsearch/css": "3.6.0", + "algoliasearch": "^4.19.1" + }, + "peerDependencies": { + "@types/react": ">= 16.8.0 < 19.0.0", + "react": ">= 16.8.0 < 19.0.0", + "react-dom": ">= 16.8.0 < 19.0.0", + "search-insights": ">= 1 < 3" + }, + "peerDependenciesMeta": { + "@types/react": { + "optional": true + }, + "react": { + "optional": true + }, + "react-dom": { + "optional": true + }, + "search-insights": { + "optional": true + } + } + }, + "node_modules/@esbuild/aix-ppc64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/aix-ppc64/-/aix-ppc64-0.20.2.tgz", + "integrity": "sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "aix" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm/-/android-arm-0.20.2.tgz", + "integrity": "sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-arm64/-/android-arm64-0.20.2.tgz", + "integrity": "sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/android-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/android-x64/-/android-x64-0.20.2.tgz", + "integrity": "sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-arm64/-/darwin-arm64-0.20.2.tgz", + "integrity": "sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/darwin-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/darwin-x64/-/darwin-x64-0.20.2.tgz", + "integrity": "sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-arm64/-/freebsd-arm64-0.20.2.tgz", + "integrity": "sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/freebsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/freebsd-x64/-/freebsd-x64-0.20.2.tgz", + "integrity": "sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "freebsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm/-/linux-arm-0.20.2.tgz", + "integrity": "sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-arm64/-/linux-arm64-0.20.2.tgz", + "integrity": "sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ia32": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ia32/-/linux-ia32-0.20.2.tgz", + "integrity": "sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-loong64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-loong64/-/linux-loong64-0.20.2.tgz", + "integrity": "sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==", + "cpu": [ + "loong64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-mips64el": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-mips64el/-/linux-mips64el-0.20.2.tgz", + "integrity": "sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==", + "cpu": [ + "mips64el" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-ppc64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-ppc64/-/linux-ppc64-0.20.2.tgz", + "integrity": "sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-riscv64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-riscv64/-/linux-riscv64-0.20.2.tgz", + "integrity": "sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-s390x": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-s390x/-/linux-s390x-0.20.2.tgz", + "integrity": "sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/linux-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/linux-x64/-/linux-x64-0.20.2.tgz", + "integrity": "sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/netbsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/netbsd-x64/-/netbsd-x64-0.20.2.tgz", + "integrity": "sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "netbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/openbsd-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/openbsd-x64/-/openbsd-x64-0.20.2.tgz", + "integrity": "sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "openbsd" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/sunos-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/sunos-x64/-/sunos-x64-0.20.2.tgz", + "integrity": "sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "sunos" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-arm64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-arm64/-/win32-arm64-0.20.2.tgz", + "integrity": "sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-ia32": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-ia32/-/win32-ia32-0.20.2.tgz", + "integrity": "sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@esbuild/win32-x64": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/@esbuild/win32-x64/-/win32-x64-0.20.2.tgz", + "integrity": "sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ], + "engines": { + "node": ">=12" + } + }, + "node_modules/@isaacs/cliui": { + "version": "8.0.2", + "resolved": "https://registry.npmjs.org/@isaacs/cliui/-/cliui-8.0.2.tgz", + "integrity": "sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==", + "dev": true, + "dependencies": { + "string-width": "^5.1.2", + "string-width-cjs": "npm:string-width@^4.2.0", + "strip-ansi": "^7.0.1", + "strip-ansi-cjs": "npm:strip-ansi@^6.0.1", + "wrap-ansi": "^8.1.0", + "wrap-ansi-cjs": "npm:wrap-ansi@^7.0.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/@jridgewell/sourcemap-codec": { + "version": "1.4.15", + "resolved": "https://registry.npmjs.org/@jridgewell/sourcemap-codec/-/sourcemap-codec-1.4.15.tgz", + "integrity": "sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==", + "dev": true + }, + "node_modules/@mermaid-js/mermaid-mindmap": { + "version": "9.3.0", + "resolved": "https://registry.npmjs.org/@mermaid-js/mermaid-mindmap/-/mermaid-mindmap-9.3.0.tgz", + "integrity": "sha512-IhtYSVBBRYviH1Ehu8gk69pMDF8DSRqXBRDMWrEfHoaMruHeaP2DXA3PBnuwsMaCdPQhlUUcy/7DBLAEIXvCAw==", + "dev": true, + "optional": true, + "dependencies": { + "@braintree/sanitize-url": "^6.0.0", + "cytoscape": "^3.23.0", + "cytoscape-cose-bilkent": "^4.1.0", + "cytoscape-fcose": "^2.1.0", + "d3": "^7.0.0", + "khroma": "^2.0.0", + "non-layered-tidy-tree-layout": "^2.0.2" + } + }, + "node_modules/@nodelib/fs.scandir": { + "version": "2.1.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.scandir/-/fs.scandir-2.1.5.tgz", + "integrity": "sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "2.0.5", + "run-parallel": "^1.1.9" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.stat": { + "version": "2.0.5", + "resolved": "https://registry.npmjs.org/@nodelib/fs.stat/-/fs.stat-2.0.5.tgz", + "integrity": "sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@nodelib/fs.walk": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/@nodelib/fs.walk/-/fs.walk-1.2.8.tgz", + "integrity": "sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==", + "dev": true, + "dependencies": { + "@nodelib/fs.scandir": "2.1.5", + "fastq": "^1.6.0" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/@pkgjs/parseargs": { + "version": "0.11.0", + "resolved": "https://registry.npmjs.org/@pkgjs/parseargs/-/parseargs-0.11.0.tgz", + "integrity": "sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==", + "dev": true, + "optional": true, + "engines": { + "node": ">=14" + } + }, + "node_modules/@playwright/test": { + "version": "1.44.1", + "resolved": "https://registry.npmjs.org/@playwright/test/-/test-1.44.1.tgz", + "integrity": "sha512-1hZ4TNvD5z9VuhNJ/walIjvMVvYkZKf71axoF/uiAqpntQJXpG64dlXhoDXE3OczPuTuvjf/M5KWFg5VAVUS3Q==", + "dev": true, + "dependencies": { + "playwright": "1.44.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/@rollup/rollup-android-arm-eabi": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm-eabi/-/rollup-android-arm-eabi-4.18.0.tgz", + "integrity": "sha512-Tya6xypR10giZV1XzxmH5wr25VcZSncG0pZIjfePT0OVBvqNEurzValetGNarVrGiq66EBVAFn15iYX4w6FKgQ==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-android-arm64": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-android-arm64/-/rollup-android-arm64-4.18.0.tgz", + "integrity": "sha512-avCea0RAP03lTsDhEyfy+hpfr85KfyTctMADqHVhLAF3MlIkq83CP8UfAHUssgXTYd+6er6PaAhx/QGv4L1EiA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "android" + ] + }, + "node_modules/@rollup/rollup-darwin-arm64": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-arm64/-/rollup-darwin-arm64-4.18.0.tgz", + "integrity": "sha512-IWfdwU7KDSm07Ty0PuA/W2JYoZ4iTj3TUQjkVsO/6U+4I1jN5lcR71ZEvRh52sDOERdnNhhHU57UITXz5jC1/w==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-darwin-x64": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-darwin-x64/-/rollup-darwin-x64-4.18.0.tgz", + "integrity": "sha512-n2LMsUz7Ynu7DoQrSQkBf8iNrjOGyPLrdSg802vk6XT3FtsgX6JbE8IHRvposskFm9SNxzkLYGSq9QdpLYpRNA==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "darwin" + ] + }, + "node_modules/@rollup/rollup-linux-arm-gnueabihf": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-gnueabihf/-/rollup-linux-arm-gnueabihf-4.18.0.tgz", + "integrity": "sha512-C/zbRYRXFjWvz9Z4haRxcTdnkPt1BtCkz+7RtBSuNmKzMzp3ZxdM28Mpccn6pt28/UWUCTXa+b0Mx1k3g6NOMA==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm-musleabihf": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm-musleabihf/-/rollup-linux-arm-musleabihf-4.18.0.tgz", + "integrity": "sha512-l3m9ewPgjQSXrUMHg93vt0hYCGnrMOcUpTz6FLtbwljo2HluS4zTXFy2571YQbisTnfTKPZ01u/ukJdQTLGh9A==", + "cpu": [ + "arm" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-gnu": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-gnu/-/rollup-linux-arm64-gnu-4.18.0.tgz", + "integrity": "sha512-rJ5D47d8WD7J+7STKdCUAgmQk49xuFrRi9pZkWoRD1UeSMakbcepWXPF8ycChBoAqs1pb2wzvbY6Q33WmN2ftw==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-arm64-musl": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-arm64-musl/-/rollup-linux-arm64-musl-4.18.0.tgz", + "integrity": "sha512-be6Yx37b24ZwxQ+wOQXXLZqpq4jTckJhtGlWGZs68TgdKXJgw54lUUoFYrg6Zs/kjzAQwEwYbp8JxZVzZLRepQ==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-powerpc64le-gnu": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-powerpc64le-gnu/-/rollup-linux-powerpc64le-gnu-4.18.0.tgz", + "integrity": "sha512-hNVMQK+qrA9Todu9+wqrXOHxFiD5YmdEi3paj6vP02Kx1hjd2LLYR2eaN7DsEshg09+9uzWi2W18MJDlG0cxJA==", + "cpu": [ + "ppc64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-riscv64-gnu": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-riscv64-gnu/-/rollup-linux-riscv64-gnu-4.18.0.tgz", + "integrity": "sha512-ROCM7i+m1NfdrsmvwSzoxp9HFtmKGHEqu5NNDiZWQtXLA8S5HBCkVvKAxJ8U+CVctHwV2Gb5VUaK7UAkzhDjlg==", + "cpu": [ + "riscv64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-s390x-gnu": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-s390x-gnu/-/rollup-linux-s390x-gnu-4.18.0.tgz", + "integrity": "sha512-0UyyRHyDN42QL+NbqevXIIUnKA47A+45WyasO+y2bGJ1mhQrfrtXUpTxCOrfxCR4esV3/RLYyucGVPiUsO8xjg==", + "cpu": [ + "s390x" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-gnu": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-gnu/-/rollup-linux-x64-gnu-4.18.0.tgz", + "integrity": "sha512-xuglR2rBVHA5UsI8h8UbX4VJ470PtGCf5Vpswh7p2ukaqBGFTnsfzxUBetoWBWymHMxbIG0Cmx7Y9qDZzr648w==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-linux-x64-musl": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-linux-x64-musl/-/rollup-linux-x64-musl-4.18.0.tgz", + "integrity": "sha512-LKaqQL9osY/ir2geuLVvRRs+utWUNilzdE90TpyoX0eNqPzWjRm14oMEE+YLve4k/NAqCdPkGYDaDF5Sw+xBfg==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "linux" + ] + }, + "node_modules/@rollup/rollup-win32-arm64-msvc": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-arm64-msvc/-/rollup-win32-arm64-msvc-4.18.0.tgz", + "integrity": "sha512-7J6TkZQFGo9qBKH0pk2cEVSRhJbL6MtfWxth7Y5YmZs57Pi+4x6c2dStAUvaQkHQLnEQv1jzBUW43GvZW8OFqA==", + "cpu": [ + "arm64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-ia32-msvc": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-ia32-msvc/-/rollup-win32-ia32-msvc-4.18.0.tgz", + "integrity": "sha512-Txjh+IxBPbkUB9+SXZMpv+b/vnTEtFyfWZgJ6iyCmt2tdx0OF5WhFowLmnh8ENGNpfUlUZkdI//4IEmhwPieNg==", + "cpu": [ + "ia32" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@rollup/rollup-win32-x64-msvc": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/@rollup/rollup-win32-x64-msvc/-/rollup-win32-x64-msvc-4.18.0.tgz", + "integrity": "sha512-UOo5FdvOL0+eIVTgS4tIdbW+TtnBLWg1YBCcU2KWM7nuNwRz9bksDX1bekJJCpu25N1DVWaCwnT39dVQxzqS8g==", + "cpu": [ + "x64" + ], + "dev": true, + "optional": true, + "os": [ + "win32" + ] + }, + "node_modules/@shikijs/core": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/@shikijs/core/-/core-1.6.3.tgz", + "integrity": "sha512-QnJKHFUW95GnlJLJGP6QLx4M69HM0KlXk+R2Y8lr/x4nAx1Yb/lsuxq4XwybuUjTxbJk+BT0g/kvn0bcsjGGHg==", + "dev": true + }, + "node_modules/@shikijs/transformers": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/@shikijs/transformers/-/transformers-1.6.3.tgz", + "integrity": "sha512-ptBuP/IIeqCzK3zZO/knFICZWs58uZWzbv7ND+bKOewe5NcCjZfSiMyzFwOyl23ewPJ1APjRBwLi6Asrodmmxw==", + "dev": true, + "dependencies": { + "shiki": "1.6.3" + } + }, + "node_modules/@types/d3-scale": { + "version": "4.0.8", + "resolved": "https://registry.npmjs.org/@types/d3-scale/-/d3-scale-4.0.8.tgz", + "integrity": "sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==", + "dev": true, + "dependencies": { + "@types/d3-time": "*" + } + }, + "node_modules/@types/d3-scale-chromatic": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-scale-chromatic/-/d3-scale-chromatic-3.0.3.tgz", + "integrity": "sha512-laXM4+1o5ImZv3RpFAsTRn3TEkzqkytiOY0Dz0sq5cnd1dtNlk6sHLon4OvqaiJb28T0S/TdsBI3Sjsy+keJrw==", + "dev": true + }, + "node_modules/@types/d3-time": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/@types/d3-time/-/d3-time-3.0.3.tgz", + "integrity": "sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw==", + "dev": true + }, + "node_modules/@types/debug": { + "version": "4.1.12", + "resolved": "https://registry.npmjs.org/@types/debug/-/debug-4.1.12.tgz", + "integrity": "sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==", + "dev": true, + "dependencies": { + "@types/ms": "*" + } + }, + "node_modules/@types/estree": { + "version": "1.0.5", + "resolved": "https://registry.npmjs.org/@types/estree/-/estree-1.0.5.tgz", + "integrity": "sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==", + "dev": true + }, + "node_modules/@types/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/@types/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==", + "dev": true + }, + "node_modules/@types/markdown-it": { + "version": "14.1.1", + "resolved": "https://registry.npmjs.org/@types/markdown-it/-/markdown-it-14.1.1.tgz", + "integrity": "sha512-4NpsnpYl2Gt1ljyBGrKMxFYAYvpqbnnkgP/i/g+NLpjEUa3obn1XJCur9YbEXKDAkaXqsR1LbDnGEJ0MmKFxfg==", + "dev": true, + "dependencies": { + "@types/linkify-it": "^5", + "@types/mdurl": "^2" + } + }, + "node_modules/@types/mdast": { + "version": "3.0.15", + "resolved": "https://registry.npmjs.org/@types/mdast/-/mdast-3.0.15.tgz", + "integrity": "sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==", + "dev": true, + "dependencies": { + "@types/unist": "^2" + } + }, + "node_modules/@types/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/@types/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==", + "dev": true + }, + "node_modules/@types/ms": { + "version": "0.7.34", + "resolved": "https://registry.npmjs.org/@types/ms/-/ms-0.7.34.tgz", + "integrity": "sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==", + "dev": true + }, + "node_modules/@types/node": { + "version": "20.14.2", + "resolved": "https://registry.npmjs.org/@types/node/-/node-20.14.2.tgz", + "integrity": "sha512-xyu6WAMVwv6AKFLB+e/7ySZVr/0zLCzOa7rSpq6jNwpqOrUbcACDWC+53d4n2QHOnDou0fbIsg8wZu/sxrnI4Q==", + "dev": true, + "dependencies": { + "undici-types": "~5.26.4" + } + }, + "node_modules/@types/sax": { + "version": "1.2.7", + "resolved": "https://registry.npmjs.org/@types/sax/-/sax-1.2.7.tgz", + "integrity": "sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==", + "dev": true, + "dependencies": { + "@types/node": "*" + } + }, + "node_modules/@types/unist": { + "version": "2.0.10", + "resolved": "https://registry.npmjs.org/@types/unist/-/unist-2.0.10.tgz", + "integrity": "sha512-IfYcSBWE3hLpBg8+X2SEa8LVkJdJEkT2Ese2aaLs3ptGdVtABxndrMaxuFlQ1qdFf9Q5rDvDpxI3WwgvKFAsQA==", + "dev": true + }, + "node_modules/@types/web-bluetooth": { + "version": "0.0.20", + "resolved": "https://registry.npmjs.org/@types/web-bluetooth/-/web-bluetooth-0.0.20.tgz", + "integrity": "sha512-g9gZnnXVq7gM7v3tJCWV/qw7w+KeOlSHAhgF9RytFyifW6AF61hdT2ucrYhPq9hLs5JIryeupHV3qGk95dH9ow==", + "dev": true + }, + "node_modules/@vitejs/plugin-vue": { + "version": "5.0.5", + "resolved": "https://registry.npmjs.org/@vitejs/plugin-vue/-/plugin-vue-5.0.5.tgz", + "integrity": "sha512-LOjm7XeIimLBZyzinBQ6OSm3UBCNVCpLkxGC0oWmm2YPzVZoxMsdvNVimLTBzpAnR9hl/yn1SHGuRfe6/Td9rQ==", + "dev": true, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "peerDependencies": { + "vite": "^5.0.0", + "vue": "^3.2.25" + } + }, + "node_modules/@vue/compiler-core": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/compiler-core/-/compiler-core-3.4.27.tgz", + "integrity": "sha512-E+RyqY24KnyDXsCuQrI+mlcdW3ALND6U7Gqa/+bVwbcpcR3BRRIckFoz7Qyd4TTlnugtwuI7YgjbvsLmxb+yvg==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.24.4", + "@vue/shared": "3.4.27", + "entities": "^4.5.0", + "estree-walker": "^2.0.2", + "source-map-js": "^1.2.0" + } + }, + "node_modules/@vue/compiler-dom": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/compiler-dom/-/compiler-dom-3.4.27.tgz", + "integrity": "sha512-kUTvochG/oVgE1w5ViSr3KUBh9X7CWirebA3bezTbB5ZKBQZwR2Mwj9uoSKRMFcz4gSMzzLXBPD6KpCLb9nvWw==", + "dev": true, + "dependencies": { + "@vue/compiler-core": "3.4.27", + "@vue/shared": "3.4.27" + } + }, + "node_modules/@vue/compiler-sfc": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/compiler-sfc/-/compiler-sfc-3.4.27.tgz", + "integrity": "sha512-nDwntUEADssW8e0rrmE0+OrONwmRlegDA1pD6QhVeXxjIytV03yDqTey9SBDiALsvAd5U4ZrEKbMyVXhX6mCGA==", + "dev": true, + "dependencies": { + "@babel/parser": "^7.24.4", + "@vue/compiler-core": "3.4.27", + "@vue/compiler-dom": "3.4.27", + "@vue/compiler-ssr": "3.4.27", + "@vue/shared": "3.4.27", + "estree-walker": "^2.0.2", + "magic-string": "^0.30.10", + "postcss": "^8.4.38", + "source-map-js": "^1.2.0" + } + }, + "node_modules/@vue/compiler-ssr": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/compiler-ssr/-/compiler-ssr-3.4.27.tgz", + "integrity": "sha512-CVRzSJIltzMG5FcidsW0jKNQnNRYC8bT21VegyMMtHmhW3UOI7knmUehzswXLrExDLE6lQCZdrhD4ogI7c+vuw==", + "dev": true, + "dependencies": { + "@vue/compiler-dom": "3.4.27", + "@vue/shared": "3.4.27" + } + }, + "node_modules/@vue/devtools-api": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/@vue/devtools-api/-/devtools-api-7.2.1.tgz", + "integrity": "sha512-6oNCtyFOrNdqm6GUkFujsCgFlpbsHLnZqq7edeM/+cxAbMyCWvsaCsIMUaz7AiluKLccCGEM8fhOsjaKgBvb7g==", + "dev": true, + "dependencies": { + "@vue/devtools-kit": "^7.2.1" + } + }, + "node_modules/@vue/devtools-kit": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/@vue/devtools-kit/-/devtools-kit-7.2.1.tgz", + "integrity": "sha512-Wak/fin1X0Q8LLIfCAHBrdaaB+R6IdpSXsDByPHbQ3BmkCP0/cIo/oEGp9i0U2+gEqD4L3V9RDjNf1S34DTzQQ==", + "dev": true, + "dependencies": { + "@vue/devtools-shared": "^7.2.1", + "hookable": "^5.5.3", + "mitt": "^3.0.1", + "perfect-debounce": "^1.0.0", + "speakingurl": "^14.0.1" + }, + "peerDependencies": { + "vue": "^3.0.0" + } + }, + "node_modules/@vue/devtools-shared": { + "version": "7.2.1", + "resolved": "https://registry.npmjs.org/@vue/devtools-shared/-/devtools-shared-7.2.1.tgz", + "integrity": "sha512-PCJF4UknJmOal68+X9XHyVeQ+idv0LFujkTOIW30+GaMJqwFVN9LkQKX4gLqn61KkGMdJTzQ1bt7EJag3TI6AA==", + "dev": true, + "dependencies": { + "rfdc": "^1.3.1" + } + }, + "node_modules/@vue/reactivity": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/reactivity/-/reactivity-3.4.27.tgz", + "integrity": "sha512-kK0g4NknW6JX2yySLpsm2jlunZJl2/RJGZ0H9ddHdfBVHcNzxmQ0sS0b09ipmBoQpY8JM2KmUw+a6sO8Zo+zIA==", + "dev": true, + "dependencies": { + "@vue/shared": "3.4.27" + } + }, + "node_modules/@vue/runtime-core": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/runtime-core/-/runtime-core-3.4.27.tgz", + "integrity": "sha512-7aYA9GEbOOdviqVvcuweTLe5Za4qBZkUY7SvET6vE8kyypxVgaT1ixHLg4urtOlrApdgcdgHoTZCUuTGap/5WA==", + "dev": true, + "dependencies": { + "@vue/reactivity": "3.4.27", + "@vue/shared": "3.4.27" + } + }, + "node_modules/@vue/runtime-dom": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/runtime-dom/-/runtime-dom-3.4.27.tgz", + "integrity": "sha512-ScOmP70/3NPM+TW9hvVAz6VWWtZJqkbdf7w6ySsws+EsqtHvkhxaWLecrTorFxsawelM5Ys9FnDEMt6BPBDS0Q==", + "dev": true, + "dependencies": { + "@vue/runtime-core": "3.4.27", + "@vue/shared": "3.4.27", + "csstype": "^3.1.3" + } + }, + "node_modules/@vue/server-renderer": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/server-renderer/-/server-renderer-3.4.27.tgz", + "integrity": "sha512-dlAMEuvmeA3rJsOMJ2J1kXU7o7pOxgsNHVr9K8hB3ImIkSuBrIdy0vF66h8gf8Tuinf1TK3mPAz2+2sqyf3KzA==", + "dev": true, + "dependencies": { + "@vue/compiler-ssr": "3.4.27", + "@vue/shared": "3.4.27" + }, + "peerDependencies": { + "vue": "3.4.27" + } + }, + "node_modules/@vue/shared": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/@vue/shared/-/shared-3.4.27.tgz", + "integrity": "sha512-DL3NmY2OFlqmYYrzp39yi3LDkKxa5vZVwxWdQ3rG0ekuWscHraeIbnI8t+aZK7qhYqEqWKTUdijadunb9pnrgA==", + "dev": true + }, + "node_modules/@vueuse/core": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/@vueuse/core/-/core-10.10.0.tgz", + "integrity": "sha512-vexJ/YXYs2S42B783rI95lMt3GzEwkxzC8Hb0Ndpd8rD+p+Lk/Za4bd797Ym7yq4jXqdSyj3JLChunF/vyYjUw==", + "dev": true, + "dependencies": { + "@types/web-bluetooth": "^0.0.20", + "@vueuse/metadata": "10.10.0", + "@vueuse/shared": "10.10.0", + "vue-demi": ">=0.14.7" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/@vueuse/core/node_modules/vue-demi": { + "version": "0.14.8", + "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.8.tgz", + "integrity": "sha512-Uuqnk9YE9SsWeReYqK2alDI5YzciATE0r2SkA6iMAtuXvNTMNACJLJEXNXaEy94ECuBe4Sk6RzRU80kjdbIo1Q==", + "dev": true, + "hasInstallScript": true, + "bin": { + "vue-demi-fix": "bin/vue-demi-fix.js", + "vue-demi-switch": "bin/vue-demi-switch.js" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vue/composition-api": "^1.0.0-rc.1", + "vue": "^3.0.0-0 || ^2.6.0" + }, + "peerDependenciesMeta": { + "@vue/composition-api": { + "optional": true + } + } + }, + "node_modules/@vueuse/integrations": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/@vueuse/integrations/-/integrations-10.10.0.tgz", + "integrity": "sha512-vHGeK7X6mkdkpcm1eE9t3Cpm21pNVfZRwrjwwbrEs9XftnSgszF4831G2rei8Dt9cIYJIfFV+iyx/29muimJPQ==", + "dev": true, + "dependencies": { + "@vueuse/core": "10.10.0", + "@vueuse/shared": "10.10.0", + "vue-demi": ">=0.14.7" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "async-validator": "*", + "axios": "*", + "change-case": "*", + "drauu": "*", + "focus-trap": "*", + "fuse.js": "*", + "idb-keyval": "*", + "jwt-decode": "*", + "nprogress": "*", + "qrcode": "*", + "sortablejs": "*", + "universal-cookie": "*" + }, + "peerDependenciesMeta": { + "async-validator": { + "optional": true + }, + "axios": { + "optional": true + }, + "change-case": { + "optional": true + }, + "drauu": { + "optional": true + }, + "focus-trap": { + "optional": true + }, + "fuse.js": { + "optional": true + }, + "idb-keyval": { + "optional": true + }, + "jwt-decode": { + "optional": true + }, + "nprogress": { + "optional": true + }, + "qrcode": { + "optional": true + }, + "sortablejs": { + "optional": true + }, + "universal-cookie": { + "optional": true + } + } + }, + "node_modules/@vueuse/integrations/node_modules/vue-demi": { + "version": "0.14.8", + "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.8.tgz", + "integrity": "sha512-Uuqnk9YE9SsWeReYqK2alDI5YzciATE0r2SkA6iMAtuXvNTMNACJLJEXNXaEy94ECuBe4Sk6RzRU80kjdbIo1Q==", + "dev": true, + "hasInstallScript": true, + "bin": { + "vue-demi-fix": "bin/vue-demi-fix.js", + "vue-demi-switch": "bin/vue-demi-switch.js" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vue/composition-api": "^1.0.0-rc.1", + "vue": "^3.0.0-0 || ^2.6.0" + }, + "peerDependenciesMeta": { + "@vue/composition-api": { + "optional": true + } + } + }, + "node_modules/@vueuse/metadata": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/@vueuse/metadata/-/metadata-10.10.0.tgz", + "integrity": "sha512-UNAo2sTCAW5ge6OErPEHb5z7NEAg3XcO9Cj7OK45aZXfLLH1QkexDcZD77HBi5zvEiLOm1An+p/4b5K3Worpug==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/@vueuse/shared": { + "version": "10.10.0", + "resolved": "https://registry.npmjs.org/@vueuse/shared/-/shared-10.10.0.tgz", + "integrity": "sha512-2aW33Ac0Uk0U+9yo3Ypg9s5KcR42cuehRWl7vnUHadQyFvCktseyxxEPBi1Eiq4D2yBGACOnqLZpx1eMc7g5Og==", + "dev": true, + "dependencies": { + "vue-demi": ">=0.14.7" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + } + }, + "node_modules/@vueuse/shared/node_modules/vue-demi": { + "version": "0.14.8", + "resolved": "https://registry.npmjs.org/vue-demi/-/vue-demi-0.14.8.tgz", + "integrity": "sha512-Uuqnk9YE9SsWeReYqK2alDI5YzciATE0r2SkA6iMAtuXvNTMNACJLJEXNXaEy94ECuBe4Sk6RzRU80kjdbIo1Q==", + "dev": true, + "hasInstallScript": true, + "bin": { + "vue-demi-fix": "bin/vue-demi-fix.js", + "vue-demi-switch": "bin/vue-demi-switch.js" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/antfu" + }, + "peerDependencies": { + "@vue/composition-api": "^1.0.0-rc.1", + "vue": "^3.0.0-0 || ^2.6.0" + }, + "peerDependenciesMeta": { + "@vue/composition-api": { + "optional": true + } + } + }, + "node_modules/algoliasearch": { + "version": "4.23.3", + "resolved": "https://registry.npmjs.org/algoliasearch/-/algoliasearch-4.23.3.tgz", + "integrity": "sha512-Le/3YgNvjW9zxIQMRhUHuhiUjAlKY/zsdZpfq4dlLqg6mEm0nL6yk+7f2hDOtLpxsgE4jSzDmvHL7nXdBp5feg==", + "dev": true, + "dependencies": { + "@algolia/cache-browser-local-storage": "4.23.3", + "@algolia/cache-common": "4.23.3", + "@algolia/cache-in-memory": "4.23.3", + "@algolia/client-account": "4.23.3", + "@algolia/client-analytics": "4.23.3", + "@algolia/client-common": "4.23.3", + "@algolia/client-personalization": "4.23.3", + "@algolia/client-search": "4.23.3", + "@algolia/logger-common": "4.23.3", + "@algolia/logger-console": "4.23.3", + "@algolia/recommend": "4.23.3", + "@algolia/requester-browser-xhr": "4.23.3", + "@algolia/requester-common": "4.23.3", + "@algolia/requester-node-http": "4.23.3", + "@algolia/transporter": "4.23.3" + } + }, + "node_modules/ansi-regex": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-6.0.1.tgz", + "integrity": "sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-regex?sponsor=1" + } + }, + "node_modules/ansi-styles": { + "version": "6.2.1", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-6.2.1.tgz", + "integrity": "sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/arg": { + "version": "5.0.2", + "resolved": "https://registry.npmjs.org/arg/-/arg-5.0.2.tgz", + "integrity": "sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==", + "dev": true + }, + "node_modules/argparse": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/argparse/-/argparse-2.0.1.tgz", + "integrity": "sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==", + "dev": true + }, + "node_modules/balanced-match": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/balanced-match/-/balanced-match-1.0.2.tgz", + "integrity": "sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==", + "dev": true + }, + "node_modules/brace-expansion": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/brace-expansion/-/brace-expansion-2.0.1.tgz", + "integrity": "sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==", + "dev": true, + "dependencies": { + "balanced-match": "^1.0.0" + } + }, + "node_modules/braces": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/braces/-/braces-3.0.3.tgz", + "integrity": "sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==", + "dev": true, + "dependencies": { + "fill-range": "^7.1.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/character-entities": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/character-entities/-/character-entities-2.0.2.tgz", + "integrity": "sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==", + "dev": true, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/color-convert": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/color-convert/-/color-convert-2.0.1.tgz", + "integrity": "sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==", + "dev": true, + "dependencies": { + "color-name": "~1.1.4" + }, + "engines": { + "node": ">=7.0.0" + } + }, + "node_modules/color-name": { + "version": "1.1.4", + "resolved": "https://registry.npmjs.org/color-name/-/color-name-1.1.4.tgz", + "integrity": "sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==", + "dev": true + }, + "node_modules/commander": { + "version": "12.0.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-12.0.0.tgz", + "integrity": "sha512-MwVNWlYjDTtOjX5PiD7o5pK0UrFU/OYgcJfjjK4RaHZETNtjJqrZa9Y9ds88+A+f+d5lv+561eZ+yCKoS3gbAA==", + "dev": true, + "engines": { + "node": ">=18" + } + }, + "node_modules/cose-base": { + "version": "1.0.3", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-1.0.3.tgz", + "integrity": "sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==", + "dev": true, + "dependencies": { + "layout-base": "^1.0.0" + } + }, + "node_modules/cross-spawn": { + "version": "7.0.3", + "resolved": "https://registry.npmjs.org/cross-spawn/-/cross-spawn-7.0.3.tgz", + "integrity": "sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==", + "dev": true, + "dependencies": { + "path-key": "^3.1.0", + "shebang-command": "^2.0.0", + "which": "^2.0.1" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/csstype": { + "version": "3.1.3", + "resolved": "https://registry.npmjs.org/csstype/-/csstype-3.1.3.tgz", + "integrity": "sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==", + "dev": true + }, + "node_modules/cytoscape": { + "version": "3.29.2", + "resolved": "https://registry.npmjs.org/cytoscape/-/cytoscape-3.29.2.tgz", + "integrity": "sha512-2G1ycU28Nh7OHT9rkXRLpCDP30MKH1dXJORZuBhtEhEW7pKwgPi77ImqlCWinouyE1PNepIOGZBOrE84DG7LyQ==", + "dev": true, + "engines": { + "node": ">=0.10" + } + }, + "node_modules/cytoscape-cose-bilkent": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/cytoscape-cose-bilkent/-/cytoscape-cose-bilkent-4.1.0.tgz", + "integrity": "sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==", + "dev": true, + "dependencies": { + "cose-base": "^1.0.0" + }, + "peerDependencies": { + "cytoscape": "^3.2.0" + } + }, + "node_modules/cytoscape-fcose": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cytoscape-fcose/-/cytoscape-fcose-2.2.0.tgz", + "integrity": "sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==", + "dev": true, + "optional": true, + "dependencies": { + "cose-base": "^2.2.0" + }, + "peerDependencies": { + "cytoscape": "^3.2.0" + } + }, + "node_modules/cytoscape-fcose/node_modules/cose-base": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/cose-base/-/cose-base-2.2.0.tgz", + "integrity": "sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==", + "dev": true, + "optional": true, + "dependencies": { + "layout-base": "^2.0.0" + } + }, + "node_modules/cytoscape-fcose/node_modules/layout-base": { + "version": "2.0.1", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-2.0.1.tgz", + "integrity": "sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==", + "dev": true, + "optional": true + }, + "node_modules/d3": { + "version": "7.9.0", + "resolved": "https://registry.npmjs.org/d3/-/d3-7.9.0.tgz", + "integrity": "sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==", + "dev": true, + "dependencies": { + "d3-array": "3", + "d3-axis": "3", + "d3-brush": "3", + "d3-chord": "3", + "d3-color": "3", + "d3-contour": "4", + "d3-delaunay": "6", + "d3-dispatch": "3", + "d3-drag": "3", + "d3-dsv": "3", + "d3-ease": "3", + "d3-fetch": "3", + "d3-force": "3", + "d3-format": "3", + "d3-geo": "3", + "d3-hierarchy": "3", + "d3-interpolate": "3", + "d3-path": "3", + "d3-polygon": "3", + "d3-quadtree": "3", + "d3-random": "3", + "d3-scale": "4", + "d3-scale-chromatic": "3", + "d3-selection": "3", + "d3-shape": "3", + "d3-time": "3", + "d3-time-format": "4", + "d3-timer": "3", + "d3-transition": "3", + "d3-zoom": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-array": { + "version": "3.2.4", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-3.2.4.tgz", + "integrity": "sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==", + "dev": true, + "dependencies": { + "internmap": "1 - 2" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-axis": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-axis/-/d3-axis-3.0.0.tgz", + "integrity": "sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-brush": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-brush/-/d3-brush-3.0.0.tgz", + "integrity": "sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==", + "dev": true, + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "3", + "d3-transition": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-chord": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-chord/-/d3-chord-3.0.1.tgz", + "integrity": "sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==", + "dev": true, + "dependencies": { + "d3-path": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-color": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-color/-/d3-color-3.1.0.tgz", + "integrity": "sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-contour": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-contour/-/d3-contour-4.0.2.tgz", + "integrity": "sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==", + "dev": true, + "dependencies": { + "d3-array": "^3.2.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-delaunay": { + "version": "6.0.4", + "resolved": "https://registry.npmjs.org/d3-delaunay/-/d3-delaunay-6.0.4.tgz", + "integrity": "sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==", + "dev": true, + "dependencies": { + "delaunator": "5" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dispatch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dispatch/-/d3-dispatch-3.0.1.tgz", + "integrity": "sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-drag": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-drag/-/d3-drag-3.0.0.tgz", + "integrity": "sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==", + "dev": true, + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-selection": "3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dsv": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-dsv/-/d3-dsv-3.0.1.tgz", + "integrity": "sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==", + "dev": true, + "dependencies": { + "commander": "7", + "iconv-lite": "0.6", + "rw": "1" + }, + "bin": { + "csv2json": "bin/dsv2json.js", + "csv2tsv": "bin/dsv2dsv.js", + "dsv2dsv": "bin/dsv2dsv.js", + "dsv2json": "bin/dsv2json.js", + "json2csv": "bin/json2dsv.js", + "json2dsv": "bin/json2dsv.js", + "json2tsv": "bin/json2dsv.js", + "tsv2csv": "bin/dsv2dsv.js", + "tsv2json": "bin/dsv2json.js" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-dsv/node_modules/commander": { + "version": "7.2.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-7.2.0.tgz", + "integrity": "sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==", + "dev": true, + "engines": { + "node": ">= 10" + } + }, + "node_modules/d3-ease": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-ease/-/d3-ease-3.0.1.tgz", + "integrity": "sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-fetch": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-fetch/-/d3-fetch-3.0.1.tgz", + "integrity": "sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==", + "dev": true, + "dependencies": { + "d3-dsv": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-force": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-force/-/d3-force-3.0.0.tgz", + "integrity": "sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==", + "dev": true, + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-quadtree": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-format": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-format/-/d3-format-3.1.0.tgz", + "integrity": "sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-geo": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/d3-geo/-/d3-geo-3.1.1.tgz", + "integrity": "sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==", + "dev": true, + "dependencies": { + "d3-array": "2.5.0 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-hierarchy": { + "version": "3.1.2", + "resolved": "https://registry.npmjs.org/d3-hierarchy/-/d3-hierarchy-3.1.2.tgz", + "integrity": "sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-interpolate": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-interpolate/-/d3-interpolate-3.0.1.tgz", + "integrity": "sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==", + "dev": true, + "dependencies": { + "d3-color": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-path": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-3.1.0.tgz", + "integrity": "sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-polygon": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-polygon/-/d3-polygon-3.0.1.tgz", + "integrity": "sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-quadtree": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-quadtree/-/d3-quadtree-3.0.1.tgz", + "integrity": "sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-random": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-random/-/d3-random-3.0.1.tgz", + "integrity": "sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-sankey": { + "version": "0.12.3", + "resolved": "https://registry.npmjs.org/d3-sankey/-/d3-sankey-0.12.3.tgz", + "integrity": "sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==", + "dev": true, + "dependencies": { + "d3-array": "1 - 2", + "d3-shape": "^1.2.0" + } + }, + "node_modules/d3-sankey/node_modules/d3-array": { + "version": "2.12.1", + "resolved": "https://registry.npmjs.org/d3-array/-/d3-array-2.12.1.tgz", + "integrity": "sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==", + "dev": true, + "dependencies": { + "internmap": "^1.0.0" + } + }, + "node_modules/d3-sankey/node_modules/d3-path": { + "version": "1.0.9", + "resolved": "https://registry.npmjs.org/d3-path/-/d3-path-1.0.9.tgz", + "integrity": "sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==", + "dev": true + }, + "node_modules/d3-sankey/node_modules/d3-shape": { + "version": "1.3.7", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-1.3.7.tgz", + "integrity": "sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==", + "dev": true, + "dependencies": { + "d3-path": "1" + } + }, + "node_modules/d3-sankey/node_modules/internmap": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-1.0.1.tgz", + "integrity": "sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==", + "dev": true + }, + "node_modules/d3-scale": { + "version": "4.0.2", + "resolved": "https://registry.npmjs.org/d3-scale/-/d3-scale-4.0.2.tgz", + "integrity": "sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==", + "dev": true, + "dependencies": { + "d3-array": "2.10.0 - 3", + "d3-format": "1 - 3", + "d3-interpolate": "1.2.0 - 3", + "d3-time": "2.1.1 - 3", + "d3-time-format": "2 - 4" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-scale-chromatic": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-scale-chromatic/-/d3-scale-chromatic-3.1.0.tgz", + "integrity": "sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==", + "dev": true, + "dependencies": { + "d3-color": "1 - 3", + "d3-interpolate": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-selection": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-selection/-/d3-selection-3.0.0.tgz", + "integrity": "sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-shape": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/d3-shape/-/d3-shape-3.2.0.tgz", + "integrity": "sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==", + "dev": true, + "dependencies": { + "d3-path": "^3.1.0" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time": { + "version": "3.1.0", + "resolved": "https://registry.npmjs.org/d3-time/-/d3-time-3.1.0.tgz", + "integrity": "sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==", + "dev": true, + "dependencies": { + "d3-array": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-time-format": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/d3-time-format/-/d3-time-format-4.1.0.tgz", + "integrity": "sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==", + "dev": true, + "dependencies": { + "d3-time": "1 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-timer": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-timer/-/d3-timer-3.0.1.tgz", + "integrity": "sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/d3-transition": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/d3-transition/-/d3-transition-3.0.1.tgz", + "integrity": "sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==", + "dev": true, + "dependencies": { + "d3-color": "1 - 3", + "d3-dispatch": "1 - 3", + "d3-ease": "1 - 3", + "d3-interpolate": "1 - 3", + "d3-timer": "1 - 3" + }, + "engines": { + "node": ">=12" + }, + "peerDependencies": { + "d3-selection": "2 - 3" + } + }, + "node_modules/d3-zoom": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/d3-zoom/-/d3-zoom-3.0.0.tgz", + "integrity": "sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==", + "dev": true, + "dependencies": { + "d3-dispatch": "1 - 3", + "d3-drag": "2 - 3", + "d3-interpolate": "1 - 3", + "d3-selection": "2 - 3", + "d3-transition": "2 - 3" + }, + "engines": { + "node": ">=12" + } + }, + "node_modules/dagre-d3-es": { + "version": "7.0.10", + "resolved": "https://registry.npmjs.org/dagre-d3-es/-/dagre-d3-es-7.0.10.tgz", + "integrity": "sha512-qTCQmEhcynucuaZgY5/+ti3X/rnszKZhEQH/ZdWdtP1tA/y3VoHJzcVrO9pjjJCNpigfscAtoUB5ONcd2wNn0A==", + "dev": true, + "dependencies": { + "d3": "^7.8.2", + "lodash-es": "^4.17.21" + } + }, + "node_modules/dayjs": { + "version": "1.11.11", + "resolved": "https://registry.npmjs.org/dayjs/-/dayjs-1.11.11.tgz", + "integrity": "sha512-okzr3f11N6WuqYtZSvm+F776mB41wRZMhKP+hc34YdW+KmtYYK9iqvHSwo2k9FEH3fhGXvOPV6yz2IcSrfRUDg==", + "dev": true + }, + "node_modules/debug": { + "version": "4.3.5", + "resolved": "https://registry.npmjs.org/debug/-/debug-4.3.5.tgz", + "integrity": "sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==", + "dev": true, + "dependencies": { + "ms": "2.1.2" + }, + "engines": { + "node": ">=6.0" + }, + "peerDependenciesMeta": { + "supports-color": { + "optional": true + } + } + }, + "node_modules/decode-named-character-reference": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/decode-named-character-reference/-/decode-named-character-reference-1.0.2.tgz", + "integrity": "sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==", + "dev": true, + "dependencies": { + "character-entities": "^2.0.0" + }, + "funding": { + "type": "github", + "url": "https://github.com/sponsors/wooorm" + } + }, + "node_modules/deep-extend": { + "version": "0.6.0", + "resolved": "https://registry.npmjs.org/deep-extend/-/deep-extend-0.6.0.tgz", + "integrity": "sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==", + "dev": true, + "engines": { + "node": ">=4.0.0" + } + }, + "node_modules/delaunator": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/delaunator/-/delaunator-5.0.1.tgz", + "integrity": "sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==", + "dev": true, + "dependencies": { + "robust-predicates": "^3.0.2" + } + }, + "node_modules/dequal": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/dequal/-/dequal-2.0.3.tgz", + "integrity": "sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/diff": { + "version": "5.2.0", + "resolved": "https://registry.npmjs.org/diff/-/diff-5.2.0.tgz", + "integrity": "sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==", + "dev": true, + "engines": { + "node": ">=0.3.1" + } + }, + "node_modules/dompurify": { + "version": "3.1.5", + "resolved": "https://registry.npmjs.org/dompurify/-/dompurify-3.1.5.tgz", + "integrity": "sha512-lwG+n5h8QNpxtyrJW/gJWckL+1/DQiYMX8f7t8Z2AZTPw1esVrqjI63i7Zc2Gz0aKzLVMYC1V1PL/ky+aY/NgA==", + "dev": true + }, + "node_modules/eastasianwidth": { + "version": "0.2.0", + "resolved": "https://registry.npmjs.org/eastasianwidth/-/eastasianwidth-0.2.0.tgz", + "integrity": "sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==", + "dev": true + }, + "node_modules/elkjs": { + "version": "0.9.3", + "resolved": "https://registry.npmjs.org/elkjs/-/elkjs-0.9.3.tgz", + "integrity": "sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==", + "dev": true + }, + "node_modules/emoji-regex": { + "version": "9.2.2", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-9.2.2.tgz", + "integrity": "sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==", + "dev": true + }, + "node_modules/entities": { + "version": "4.5.0", + "resolved": "https://registry.npmjs.org/entities/-/entities-4.5.0.tgz", + "integrity": "sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==", + "dev": true, + "engines": { + "node": ">=0.12" + }, + "funding": { + "url": "https://github.com/fb55/entities?sponsor=1" + } + }, + "node_modules/esbuild": { + "version": "0.20.2", + "resolved": "https://registry.npmjs.org/esbuild/-/esbuild-0.20.2.tgz", + "integrity": "sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==", + "dev": true, + "hasInstallScript": true, + "bin": { + "esbuild": "bin/esbuild" + }, + "engines": { + "node": ">=12" + }, + "optionalDependencies": { + "@esbuild/aix-ppc64": "0.20.2", + "@esbuild/android-arm": "0.20.2", + "@esbuild/android-arm64": "0.20.2", + "@esbuild/android-x64": "0.20.2", + "@esbuild/darwin-arm64": "0.20.2", + "@esbuild/darwin-x64": "0.20.2", + "@esbuild/freebsd-arm64": "0.20.2", + "@esbuild/freebsd-x64": "0.20.2", + "@esbuild/linux-arm": "0.20.2", + "@esbuild/linux-arm64": "0.20.2", + "@esbuild/linux-ia32": "0.20.2", + "@esbuild/linux-loong64": "0.20.2", + "@esbuild/linux-mips64el": "0.20.2", + "@esbuild/linux-ppc64": "0.20.2", + "@esbuild/linux-riscv64": "0.20.2", + "@esbuild/linux-s390x": "0.20.2", + "@esbuild/linux-x64": "0.20.2", + "@esbuild/netbsd-x64": "0.20.2", + "@esbuild/openbsd-x64": "0.20.2", + "@esbuild/sunos-x64": "0.20.2", + "@esbuild/win32-arm64": "0.20.2", + "@esbuild/win32-ia32": "0.20.2", + "@esbuild/win32-x64": "0.20.2" + } + }, + "node_modules/estree-walker": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/estree-walker/-/estree-walker-2.0.2.tgz", + "integrity": "sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==", + "dev": true + }, + "node_modules/fast-glob": { + "version": "3.3.2", + "resolved": "https://registry.npmjs.org/fast-glob/-/fast-glob-3.3.2.tgz", + "integrity": "sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==", + "dev": true, + "dependencies": { + "@nodelib/fs.stat": "^2.0.2", + "@nodelib/fs.walk": "^1.2.3", + "glob-parent": "^5.1.2", + "merge2": "^1.3.0", + "micromatch": "^4.0.4" + }, + "engines": { + "node": ">=8.6.0" + } + }, + "node_modules/fastq": { + "version": "1.17.1", + "resolved": "https://registry.npmjs.org/fastq/-/fastq-1.17.1.tgz", + "integrity": "sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==", + "dev": true, + "dependencies": { + "reusify": "^1.0.4" + } + }, + "node_modules/fill-range": { + "version": "7.1.1", + "resolved": "https://registry.npmjs.org/fill-range/-/fill-range-7.1.1.tgz", + "integrity": "sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==", + "dev": true, + "dependencies": { + "to-regex-range": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/focus-trap": { + "version": "7.5.4", + "resolved": "https://registry.npmjs.org/focus-trap/-/focus-trap-7.5.4.tgz", + "integrity": "sha512-N7kHdlgsO/v+iD/dMoJKtsSqs5Dz/dXZVebRgJw23LDk+jMi/974zyiOYDziY2JPp8xivq9BmUGwIJMiuSBi7w==", + "dev": true, + "dependencies": { + "tabbable": "^6.2.0" + } + }, + "node_modules/foreground-child": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/foreground-child/-/foreground-child-3.1.1.tgz", + "integrity": "sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==", + "dev": true, + "dependencies": { + "cross-spawn": "^7.0.0", + "signal-exit": "^4.0.1" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/fsevents": { + "version": "2.3.2", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.2.tgz", + "integrity": "sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/get-stdin": { + "version": "9.0.0", + "resolved": "https://registry.npmjs.org/get-stdin/-/get-stdin-9.0.0.tgz", + "integrity": "sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==", + "dev": true, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/glob": { + "version": "10.3.16", + "resolved": "https://registry.npmjs.org/glob/-/glob-10.3.16.tgz", + "integrity": "sha512-JDKXl1DiuuHJ6fVS2FXjownaavciiHNUU4mOvV/B793RLh05vZL1rcPnCSaOgv1hDT6RDlY7AB7ZUvFYAtPgAw==", + "dev": true, + "dependencies": { + "foreground-child": "^3.1.0", + "jackspeak": "^3.1.2", + "minimatch": "^9.0.1", + "minipass": "^7.0.4", + "path-scurry": "^1.11.0" + }, + "bin": { + "glob": "dist/esm/bin.mjs" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/glob-parent": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/glob-parent/-/glob-parent-5.1.2.tgz", + "integrity": "sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==", + "dev": true, + "dependencies": { + "is-glob": "^4.0.1" + }, + "engines": { + "node": ">= 6" + } + }, + "node_modules/hookable": { + "version": "5.5.3", + "resolved": "https://registry.npmjs.org/hookable/-/hookable-5.5.3.tgz", + "integrity": "sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==", + "dev": true + }, + "node_modules/iconv-lite": { + "version": "0.6.3", + "resolved": "https://registry.npmjs.org/iconv-lite/-/iconv-lite-0.6.3.tgz", + "integrity": "sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==", + "dev": true, + "dependencies": { + "safer-buffer": ">= 2.1.2 < 3.0.0" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/ignore": { + "version": "5.3.1", + "resolved": "https://registry.npmjs.org/ignore/-/ignore-5.3.1.tgz", + "integrity": "sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==", + "dev": true, + "engines": { + "node": ">= 4" + } + }, + "node_modules/ini": { + "version": "4.1.3", + "resolved": "https://registry.npmjs.org/ini/-/ini-4.1.3.tgz", + "integrity": "sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==", + "dev": true, + "engines": { + "node": "^14.17.0 || ^16.13.0 || >=18.0.0" + } + }, + "node_modules/internmap": { + "version": "2.0.3", + "resolved": "https://registry.npmjs.org/internmap/-/internmap-2.0.3.tgz", + "integrity": "sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==", + "dev": true, + "engines": { + "node": ">=12" + } + }, + "node_modules/is-extglob": { + "version": "2.1.1", + "resolved": "https://registry.npmjs.org/is-extglob/-/is-extglob-2.1.1.tgz", + "integrity": "sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-fullwidth-code-point": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/is-fullwidth-code-point/-/is-fullwidth-code-point-3.0.0.tgz", + "integrity": "sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/is-glob": { + "version": "4.0.3", + "resolved": "https://registry.npmjs.org/is-glob/-/is-glob-4.0.3.tgz", + "integrity": "sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==", + "dev": true, + "dependencies": { + "is-extglob": "^2.1.1" + }, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/is-number": { + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/is-number/-/is-number-7.0.0.tgz", + "integrity": "sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==", + "dev": true, + "engines": { + "node": ">=0.12.0" + } + }, + "node_modules/isexe": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/isexe/-/isexe-2.0.0.tgz", + "integrity": "sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==", + "dev": true + }, + "node_modules/jackspeak": { + "version": "3.4.0", + "resolved": "https://registry.npmjs.org/jackspeak/-/jackspeak-3.4.0.tgz", + "integrity": "sha512-JVYhQnN59LVPFCEcVa2C3CrEKYacvjRfqIQl+h8oi91aLYQVWRYbxjPcv1bUiUy/kLmQaANrYfNMCO3kuEDHfw==", + "dev": true, + "dependencies": { + "@isaacs/cliui": "^8.0.2" + }, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + }, + "optionalDependencies": { + "@pkgjs/parseargs": "^0.11.0" + } + }, + "node_modules/js-yaml": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-4.1.0.tgz", + "integrity": "sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1" + }, + "bin": { + "js-yaml": "bin/js-yaml.js" + } + }, + "node_modules/jsonc-parser": { + "version": "3.2.1", + "resolved": "https://registry.npmjs.org/jsonc-parser/-/jsonc-parser-3.2.1.tgz", + "integrity": "sha512-AilxAyFOAcK5wA1+LeaySVBrHsGQvUFCDWXKpZjzaL0PqW+xfBOttn8GNtWKFWqneyMZj41MWF9Kl6iPWLwgOA==", + "dev": true + }, + "node_modules/jsonpointer": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/jsonpointer/-/jsonpointer-5.0.1.tgz", + "integrity": "sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/katex": { + "version": "0.16.10", + "resolved": "https://registry.npmjs.org/katex/-/katex-0.16.10.tgz", + "integrity": "sha512-ZiqaC04tp2O5utMsl2TEZTXxa6WSC4yo0fv5ML++D3QZv/vx2Mct0mTlRx3O+uUkjfuAgOkzsCmq5MiUEsDDdA==", + "dev": true, + "funding": [ + "https://opencollective.com/katex", + "https://github.com/sponsors/katex" + ], + "dependencies": { + "commander": "^8.3.0" + }, + "bin": { + "katex": "cli.js" + } + }, + "node_modules/katex/node_modules/commander": { + "version": "8.3.0", + "resolved": "https://registry.npmjs.org/commander/-/commander-8.3.0.tgz", + "integrity": "sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==", + "dev": true, + "engines": { + "node": ">= 12" + } + }, + "node_modules/khroma": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/khroma/-/khroma-2.1.0.tgz", + "integrity": "sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==", + "dev": true + }, + "node_modules/kleur": { + "version": "4.1.5", + "resolved": "https://registry.npmjs.org/kleur/-/kleur-4.1.5.tgz", + "integrity": "sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/layout-base": { + "version": "1.0.2", + "resolved": "https://registry.npmjs.org/layout-base/-/layout-base-1.0.2.tgz", + "integrity": "sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==", + "dev": true + }, + "node_modules/linkify-it": { + "version": "5.0.0", + "resolved": "https://registry.npmjs.org/linkify-it/-/linkify-it-5.0.0.tgz", + "integrity": "sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==", + "dev": true, + "dependencies": { + "uc.micro": "^2.0.0" + } + }, + "node_modules/lodash-es": { + "version": "4.17.21", + "resolved": "https://registry.npmjs.org/lodash-es/-/lodash-es-4.17.21.tgz", + "integrity": "sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==", + "dev": true + }, + "node_modules/lru-cache": { + "version": "10.2.2", + "resolved": "https://registry.npmjs.org/lru-cache/-/lru-cache-10.2.2.tgz", + "integrity": "sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==", + "dev": true, + "engines": { + "node": "14 || >=16.14" + } + }, + "node_modules/magic-string": { + "version": "0.30.10", + "resolved": "https://registry.npmjs.org/magic-string/-/magic-string-0.30.10.tgz", + "integrity": "sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==", + "dev": true, + "dependencies": { + "@jridgewell/sourcemap-codec": "^1.4.15" + } + }, + "node_modules/mark.js": { + "version": "8.11.1", + "resolved": "https://registry.npmjs.org/mark.js/-/mark.js-8.11.1.tgz", + "integrity": "sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==", + "dev": true + }, + "node_modules/markdown-it": { + "version": "14.1.0", + "resolved": "https://registry.npmjs.org/markdown-it/-/markdown-it-14.1.0.tgz", + "integrity": "sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==", + "dev": true, + "dependencies": { + "argparse": "^2.0.1", + "entities": "^4.4.0", + "linkify-it": "^5.0.0", + "mdurl": "^2.0.0", + "punycode.js": "^2.3.1", + "uc.micro": "^2.1.0" + }, + "bin": { + "markdown-it": "bin/markdown-it.mjs" + } + }, + "node_modules/markdown-it-footnote": { + "version": "4.0.0", + "resolved": "https://registry.npmjs.org/markdown-it-footnote/-/markdown-it-footnote-4.0.0.tgz", + "integrity": "sha512-WYJ7urf+khJYl3DqofQpYfEYkZKbmXmwxQV8c8mO/hGIhgZ1wOe7R4HLFNwqx7TjILbnC98fuyeSsin19JdFcQ==", + "dev": true + }, + "node_modules/markdownlint": { + "version": "0.34.0", + "resolved": "https://registry.npmjs.org/markdownlint/-/markdownlint-0.34.0.tgz", + "integrity": "sha512-qwGyuyKwjkEMOJ10XN6OTKNOVYvOIi35RNvDLNxTof5s8UmyGHlCdpngRHoRGNvQVGuxO3BJ7uNSgdeX166WXw==", + "dev": true, + "dependencies": { + "markdown-it": "14.1.0", + "markdownlint-micromark": "0.1.9" + }, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/DavidAnson" + } + }, + "node_modules/markdownlint-cli": { + "version": "0.40.0", + "resolved": "https://registry.npmjs.org/markdownlint-cli/-/markdownlint-cli-0.40.0.tgz", + "integrity": "sha512-JXhI3dRQcaqwiFYpPz6VJ7aKYheD53GmTz9y4D/d0F1MbZDGOp9pqKlbOfUX/pHP/iAoeiE4wYRmk8/kjLakxA==", + "dev": true, + "dependencies": { + "commander": "~12.0.0", + "get-stdin": "~9.0.0", + "glob": "~10.3.12", + "ignore": "~5.3.1", + "js-yaml": "^4.1.0", + "jsonc-parser": "~3.2.1", + "jsonpointer": "5.0.1", + "markdownlint": "~0.34.0", + "minimatch": "~9.0.4", + "run-con": "~1.3.2", + "toml": "~3.0.0" + }, + "bin": { + "markdownlint": "markdownlint.js" + }, + "engines": { + "node": ">=18" + } + }, + "node_modules/markdownlint-micromark": { + "version": "0.1.9", + "resolved": "https://registry.npmjs.org/markdownlint-micromark/-/markdownlint-micromark-0.1.9.tgz", + "integrity": "sha512-5hVs/DzAFa8XqYosbEAEg6ok6MF2smDj89ztn9pKkCtdKHVdPQuGMH7frFfYL9mLkvfFe4pTyAMffLbjf3/EyA==", + "dev": true, + "engines": { + "node": ">=18" + }, + "funding": { + "url": "https://github.com/sponsors/DavidAnson" + } + }, + "node_modules/mdast-util-from-markdown": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/mdast-util-from-markdown/-/mdast-util-from-markdown-1.3.1.tgz", + "integrity": "sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww==", + "dev": true, + "dependencies": { + "@types/mdast": "^3.0.0", + "@types/unist": "^2.0.0", + "decode-named-character-reference": "^1.0.0", + "mdast-util-to-string": "^3.1.0", + "micromark": "^3.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-decode-string": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "unist-util-stringify-position": "^3.0.0", + "uvu": "^0.5.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdast-util-to-string": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/mdast-util-to-string/-/mdast-util-to-string-3.2.0.tgz", + "integrity": "sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==", + "dev": true, + "dependencies": { + "@types/mdast": "^3.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/mdurl": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/mdurl/-/mdurl-2.0.0.tgz", + "integrity": "sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==", + "dev": true + }, + "node_modules/merge2": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/merge2/-/merge2-1.4.1.tgz", + "integrity": "sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==", + "dev": true, + "engines": { + "node": ">= 8" + } + }, + "node_modules/mermaid": { + "version": "10.9.1", + "resolved": "https://registry.npmjs.org/mermaid/-/mermaid-10.9.1.tgz", + "integrity": "sha512-Mx45Obds5W1UkW1nv/7dHRsbfMM1aOKA2+Pxs/IGHNonygDHwmng8xTHyS9z4KWVi0rbko8gjiBmuwwXQ7tiNA==", + "dev": true, + "dependencies": { + "@braintree/sanitize-url": "^6.0.1", + "@types/d3-scale": "^4.0.3", + "@types/d3-scale-chromatic": "^3.0.0", + "cytoscape": "^3.28.1", + "cytoscape-cose-bilkent": "^4.1.0", + "d3": "^7.4.0", + "d3-sankey": "^0.12.3", + "dagre-d3-es": "7.0.10", + "dayjs": "^1.11.7", + "dompurify": "^3.0.5", + "elkjs": "^0.9.0", + "katex": "^0.16.9", + "khroma": "^2.0.0", + "lodash-es": "^4.17.21", + "mdast-util-from-markdown": "^1.3.0", + "non-layered-tidy-tree-layout": "^2.0.2", + "stylis": "^4.1.3", + "ts-dedent": "^2.2.0", + "uuid": "^9.0.0", + "web-worker": "^1.2.0" + } + }, + "node_modules/micromark": { + "version": "3.2.0", + "resolved": "https://registry.npmjs.org/micromark/-/micromark-3.2.0.tgz", + "integrity": "sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "@types/debug": "^4.0.0", + "debug": "^4.0.0", + "decode-named-character-reference": "^1.0.0", + "micromark-core-commonmark": "^1.0.1", + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-combine-extensions": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-sanitize-uri": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-core-commonmark": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-core-commonmark/-/micromark-core-commonmark-1.1.0.tgz", + "integrity": "sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-factory-destination": "^1.0.0", + "micromark-factory-label": "^1.0.0", + "micromark-factory-space": "^1.0.0", + "micromark-factory-title": "^1.0.0", + "micromark-factory-whitespace": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-chunked": "^1.0.0", + "micromark-util-classify-character": "^1.0.0", + "micromark-util-html-tag-name": "^1.0.0", + "micromark-util-normalize-identifier": "^1.0.0", + "micromark-util-resolve-all": "^1.0.0", + "micromark-util-subtokenize": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.1", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-factory-destination": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-destination/-/micromark-factory-destination-1.1.0.tgz", + "integrity": "sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-factory-label": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-label/-/micromark-factory-label-1.1.0.tgz", + "integrity": "sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-factory-space": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-space/-/micromark-factory-space-1.1.0.tgz", + "integrity": "sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-factory-title": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-title/-/micromark-factory-title-1.1.0.tgz", + "integrity": "sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-factory-whitespace": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-factory-whitespace/-/micromark-factory-whitespace-1.1.0.tgz", + "integrity": "sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-factory-space": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-character": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-character/-/micromark-util-character-1.2.0.tgz", + "integrity": "sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-chunked": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-chunked/-/micromark-util-chunked-1.1.0.tgz", + "integrity": "sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-classify-character": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-classify-character/-/micromark-util-classify-character-1.1.0.tgz", + "integrity": "sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-combine-extensions": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-combine-extensions/-/micromark-util-combine-extensions-1.1.0.tgz", + "integrity": "sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-decode-numeric-character-reference": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-numeric-character-reference/-/micromark-util-decode-numeric-character-reference-1.1.0.tgz", + "integrity": "sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-decode-string": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-decode-string/-/micromark-util-decode-string-1.1.0.tgz", + "integrity": "sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "decode-named-character-reference": "^1.0.0", + "micromark-util-character": "^1.0.0", + "micromark-util-decode-numeric-character-reference": "^1.0.0", + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-encode": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-encode/-/micromark-util-encode-1.1.0.tgz", + "integrity": "sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-html-tag-name": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-html-tag-name/-/micromark-util-html-tag-name-1.2.0.tgz", + "integrity": "sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-normalize-identifier": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-normalize-identifier/-/micromark-util-normalize-identifier-1.1.0.tgz", + "integrity": "sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-resolve-all": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-resolve-all/-/micromark-util-resolve-all-1.1.0.tgz", + "integrity": "sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-types": "^1.0.0" + } + }, + "node_modules/micromark-util-sanitize-uri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/micromark-util-sanitize-uri/-/micromark-util-sanitize-uri-1.2.0.tgz", + "integrity": "sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-character": "^1.0.0", + "micromark-util-encode": "^1.0.0", + "micromark-util-symbol": "^1.0.0" + } + }, + "node_modules/micromark-util-subtokenize": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-subtokenize/-/micromark-util-subtokenize-1.1.0.tgz", + "integrity": "sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ], + "dependencies": { + "micromark-util-chunked": "^1.0.0", + "micromark-util-symbol": "^1.0.0", + "micromark-util-types": "^1.0.0", + "uvu": "^0.5.0" + } + }, + "node_modules/micromark-util-symbol": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-symbol/-/micromark-util-symbol-1.1.0.tgz", + "integrity": "sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromark-util-types": { + "version": "1.1.0", + "resolved": "https://registry.npmjs.org/micromark-util-types/-/micromark-util-types-1.1.0.tgz", + "integrity": "sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==", + "dev": true, + "funding": [ + { + "type": "GitHub Sponsors", + "url": "https://github.com/sponsors/unifiedjs" + }, + { + "type": "OpenCollective", + "url": "https://opencollective.com/unified" + } + ] + }, + "node_modules/micromatch": { + "version": "4.0.7", + "resolved": "https://registry.npmjs.org/micromatch/-/micromatch-4.0.7.tgz", + "integrity": "sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==", + "dev": true, + "dependencies": { + "braces": "^3.0.3", + "picomatch": "^2.3.1" + }, + "engines": { + "node": ">=8.6" + } + }, + "node_modules/minimatch": { + "version": "9.0.4", + "resolved": "https://registry.npmjs.org/minimatch/-/minimatch-9.0.4.tgz", + "integrity": "sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==", + "dev": true, + "dependencies": { + "brace-expansion": "^2.0.1" + }, + "engines": { + "node": ">=16 || 14 >=14.17" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/minimist": { + "version": "1.2.8", + "resolved": "https://registry.npmjs.org/minimist/-/minimist-1.2.8.tgz", + "integrity": "sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==", + "dev": true, + "funding": { + "url": "https://github.com/sponsors/ljharb" + } + }, + "node_modules/minipass": { + "version": "7.1.2", + "resolved": "https://registry.npmjs.org/minipass/-/minipass-7.1.2.tgz", + "integrity": "sha512-qOOzS1cBTWYF4BH8fVePDBOO9iptMnGUEZwNc/cMWnTV2nVLZ7VoNWEPHkYczZA0pdoA7dl6e7FL659nX9S2aw==", + "dev": true, + "engines": { + "node": ">=16 || 14 >=14.17" + } + }, + "node_modules/minisearch": { + "version": "6.3.0", + "resolved": "https://registry.npmjs.org/minisearch/-/minisearch-6.3.0.tgz", + "integrity": "sha512-ihFnidEeU8iXzcVHy74dhkxh/dn8Dc08ERl0xwoMMGqp4+LvRSCgicb+zGqWthVokQKvCSxITlh3P08OzdTYCQ==", + "dev": true + }, + "node_modules/mitt": { + "version": "3.0.1", + "resolved": "https://registry.npmjs.org/mitt/-/mitt-3.0.1.tgz", + "integrity": "sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==", + "dev": true + }, + "node_modules/mri": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/mri/-/mri-1.2.0.tgz", + "integrity": "sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==", + "dev": true, + "engines": { + "node": ">=4" + } + }, + "node_modules/ms": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/ms/-/ms-2.1.2.tgz", + "integrity": "sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==", + "dev": true + }, + "node_modules/nanoid": { + "version": "3.3.7", + "resolved": "https://registry.npmjs.org/nanoid/-/nanoid-3.3.7.tgz", + "integrity": "sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "bin": { + "nanoid": "bin/nanoid.cjs" + }, + "engines": { + "node": "^10 || ^12 || ^13.7 || ^14 || >=15.0.1" + } + }, + "node_modules/non-layered-tidy-tree-layout": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/non-layered-tidy-tree-layout/-/non-layered-tidy-tree-layout-2.0.2.tgz", + "integrity": "sha512-gkXMxRzUH+PB0ax9dUN0yYF0S25BqeAYqhgMaLUFmpXLEk7Fcu8f4emJuOAY0V8kjDICxROIKsTAKsV/v355xw==", + "dev": true + }, + "node_modules/path-key": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/path-key/-/path-key-3.1.1.tgz", + "integrity": "sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/path-scurry": { + "version": "1.11.1", + "resolved": "https://registry.npmjs.org/path-scurry/-/path-scurry-1.11.1.tgz", + "integrity": "sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==", + "dev": true, + "dependencies": { + "lru-cache": "^10.2.0", + "minipass": "^5.0.0 || ^6.0.2 || ^7.0.0" + }, + "engines": { + "node": ">=16 || 14 >=14.18" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/perfect-debounce": { + "version": "1.0.0", + "resolved": "https://registry.npmjs.org/perfect-debounce/-/perfect-debounce-1.0.0.tgz", + "integrity": "sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==", + "dev": true + }, + "node_modules/picocolors": { + "version": "1.0.1", + "resolved": "https://registry.npmjs.org/picocolors/-/picocolors-1.0.1.tgz", + "integrity": "sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==", + "dev": true + }, + "node_modules/picomatch": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/picomatch/-/picomatch-2.3.1.tgz", + "integrity": "sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==", + "dev": true, + "engines": { + "node": ">=8.6" + }, + "funding": { + "url": "https://github.com/sponsors/jonschlinkert" + } + }, + "node_modules/playwright": { + "version": "1.44.1", + "resolved": "https://registry.npmjs.org/playwright/-/playwright-1.44.1.tgz", + "integrity": "sha512-qr/0UJ5CFAtloI3avF95Y0L1xQo6r3LQArLIg/z/PoGJ6xa+EwzrwO5lpNr/09STxdHuUoP2mvuELJS+hLdtgg==", + "dev": true, + "dependencies": { + "playwright-core": "1.44.1" + }, + "bin": { + "playwright": "cli.js" + }, + "engines": { + "node": ">=16" + }, + "optionalDependencies": { + "fsevents": "2.3.2" + } + }, + "node_modules/playwright-core": { + "version": "1.44.1", + "resolved": "https://registry.npmjs.org/playwright-core/-/playwright-core-1.44.1.tgz", + "integrity": "sha512-wh0JWtYTrhv1+OSsLPgFzGzt67Y7BE/ZS3jEqgGBlp2ppp1ZDj8c+9IARNW4dwf1poq5MgHreEM2KV/GuR4cFA==", + "dev": true, + "bin": { + "playwright-core": "cli.js" + }, + "engines": { + "node": ">=16" + } + }, + "node_modules/postcss": { + "version": "8.4.38", + "resolved": "https://registry.npmjs.org/postcss/-/postcss-8.4.38.tgz", + "integrity": "sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==", + "dev": true, + "funding": [ + { + "type": "opencollective", + "url": "https://opencollective.com/postcss/" + }, + { + "type": "tidelift", + "url": "https://tidelift.com/funding/github/npm/postcss" + }, + { + "type": "github", + "url": "https://github.com/sponsors/ai" + } + ], + "dependencies": { + "nanoid": "^3.3.7", + "picocolors": "^1.0.0", + "source-map-js": "^1.2.0" + }, + "engines": { + "node": "^10 || ^12 || >=14" + } + }, + "node_modules/preact": { + "version": "10.22.0", + "resolved": "https://registry.npmjs.org/preact/-/preact-10.22.0.tgz", + "integrity": "sha512-RRurnSjJPj4rp5K6XoP45Ui33ncb7e4H7WiOHVpjbkvqvA3U+N8Z6Qbo0AE6leGYBV66n8EhEaFixvIu3SkxFw==", + "dev": true, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/preact" + } + }, + "node_modules/punycode.js": { + "version": "2.3.1", + "resolved": "https://registry.npmjs.org/punycode.js/-/punycode.js-2.3.1.tgz", + "integrity": "sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==", + "dev": true, + "engines": { + "node": ">=6" + } + }, + "node_modules/queue-microtask": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/queue-microtask/-/queue-microtask-1.2.3.tgz", + "integrity": "sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ] + }, + "node_modules/reusify": { + "version": "1.0.4", + "resolved": "https://registry.npmjs.org/reusify/-/reusify-1.0.4.tgz", + "integrity": "sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==", + "dev": true, + "engines": { + "iojs": ">=1.0.0", + "node": ">=0.10.0" + } + }, + "node_modules/rfdc": { + "version": "1.3.1", + "resolved": "https://registry.npmjs.org/rfdc/-/rfdc-1.3.1.tgz", + "integrity": "sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg==", + "dev": true + }, + "node_modules/robust-predicates": { + "version": "3.0.2", + "resolved": "https://registry.npmjs.org/robust-predicates/-/robust-predicates-3.0.2.tgz", + "integrity": "sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==", + "dev": true + }, + "node_modules/rollup": { + "version": "4.18.0", + "resolved": "https://registry.npmjs.org/rollup/-/rollup-4.18.0.tgz", + "integrity": "sha512-QmJz14PX3rzbJCN1SG4Xe/bAAX2a6NpCP8ab2vfu2GiUr8AQcr2nCV/oEO3yneFarB67zk8ShlIyWb2LGTb3Sg==", + "dev": true, + "dependencies": { + "@types/estree": "1.0.5" + }, + "bin": { + "rollup": "dist/bin/rollup" + }, + "engines": { + "node": ">=18.0.0", + "npm": ">=8.0.0" + }, + "optionalDependencies": { + "@rollup/rollup-android-arm-eabi": "4.18.0", + "@rollup/rollup-android-arm64": "4.18.0", + "@rollup/rollup-darwin-arm64": "4.18.0", + "@rollup/rollup-darwin-x64": "4.18.0", + "@rollup/rollup-linux-arm-gnueabihf": "4.18.0", + "@rollup/rollup-linux-arm-musleabihf": "4.18.0", + "@rollup/rollup-linux-arm64-gnu": "4.18.0", + "@rollup/rollup-linux-arm64-musl": "4.18.0", + "@rollup/rollup-linux-powerpc64le-gnu": "4.18.0", + "@rollup/rollup-linux-riscv64-gnu": "4.18.0", + "@rollup/rollup-linux-s390x-gnu": "4.18.0", + "@rollup/rollup-linux-x64-gnu": "4.18.0", + "@rollup/rollup-linux-x64-musl": "4.18.0", + "@rollup/rollup-win32-arm64-msvc": "4.18.0", + "@rollup/rollup-win32-ia32-msvc": "4.18.0", + "@rollup/rollup-win32-x64-msvc": "4.18.0", + "fsevents": "~2.3.2" + } + }, + "node_modules/run-con": { + "version": "1.3.2", + "resolved": "https://registry.npmjs.org/run-con/-/run-con-1.3.2.tgz", + "integrity": "sha512-CcfE+mYiTcKEzg0IqS08+efdnH0oJ3zV0wSUFBNrMHMuxCtXvBCLzCJHatwuXDcu/RlhjTziTo/a1ruQik6/Yg==", + "dev": true, + "dependencies": { + "deep-extend": "^0.6.0", + "ini": "~4.1.0", + "minimist": "^1.2.8", + "strip-json-comments": "~3.1.1" + }, + "bin": { + "run-con": "cli.js" + } + }, + "node_modules/run-parallel": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/run-parallel/-/run-parallel-1.2.0.tgz", + "integrity": "sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==", + "dev": true, + "funding": [ + { + "type": "github", + "url": "https://github.com/sponsors/feross" + }, + { + "type": "patreon", + "url": "https://www.patreon.com/feross" + }, + { + "type": "consulting", + "url": "https://feross.org/support" + } + ], + "dependencies": { + "queue-microtask": "^1.2.2" + } + }, + "node_modules/rw": { + "version": "1.3.3", + "resolved": "https://registry.npmjs.org/rw/-/rw-1.3.3.tgz", + "integrity": "sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==", + "dev": true + }, + "node_modules/sade": { + "version": "1.8.1", + "resolved": "https://registry.npmjs.org/sade/-/sade-1.8.1.tgz", + "integrity": "sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==", + "dev": true, + "dependencies": { + "mri": "^1.1.0" + }, + "engines": { + "node": ">=6" + } + }, + "node_modules/safer-buffer": { + "version": "2.1.2", + "resolved": "https://registry.npmjs.org/safer-buffer/-/safer-buffer-2.1.2.tgz", + "integrity": "sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==", + "dev": true + }, + "node_modules/sax": { + "version": "1.4.1", + "resolved": "https://registry.npmjs.org/sax/-/sax-1.4.1.tgz", + "integrity": "sha512-+aWOz7yVScEGoKNd4PA10LZ8sk0A/z5+nXQG5giUO5rprX9jgYsTdov9qCchZiPIZezbZH+jRut8nPodFAX4Jg==", + "dev": true + }, + "node_modules/search-insights": { + "version": "2.14.0", + "resolved": "https://registry.npmjs.org/search-insights/-/search-insights-2.14.0.tgz", + "integrity": "sha512-OLN6MsPMCghDOqlCtsIsYgtsC0pnwVTyT9Mu6A3ewOj1DxvzZF6COrn2g86E/c05xbktB0XN04m/t1Z+n+fTGw==", + "dev": true, + "peer": true + }, + "node_modules/shebang-command": { + "version": "2.0.0", + "resolved": "https://registry.npmjs.org/shebang-command/-/shebang-command-2.0.0.tgz", + "integrity": "sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==", + "dev": true, + "dependencies": { + "shebang-regex": "^3.0.0" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/shebang-regex": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/shebang-regex/-/shebang-regex-3.0.0.tgz", + "integrity": "sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/shiki": { + "version": "1.6.3", + "resolved": "https://registry.npmjs.org/shiki/-/shiki-1.6.3.tgz", + "integrity": "sha512-lE1/YGlzFY0hQSyEfsZj18xGrTWxyhFQkaiILALqTBZPbJeYFWpbUhlmTGPOupYB/qC+H6sV4UznJzcEh3WMHQ==", + "dev": true, + "dependencies": { + "@shikijs/core": "1.6.3" + } + }, + "node_modules/signal-exit": { + "version": "4.1.0", + "resolved": "https://registry.npmjs.org/signal-exit/-/signal-exit-4.1.0.tgz", + "integrity": "sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==", + "dev": true, + "engines": { + "node": ">=14" + }, + "funding": { + "url": "https://github.com/sponsors/isaacs" + } + }, + "node_modules/sitemap": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/sitemap/-/sitemap-8.0.0.tgz", + "integrity": "sha512-+AbdxhM9kJsHtruUF39bwS/B0Fytw6Fr1o4ZAIAEqA6cke2xcoO2GleBw9Zw7nRzILVEgz7zBM5GiTJjie1G9A==", + "dev": true, + "dependencies": { + "@types/node": "^17.0.5", + "@types/sax": "^1.2.1", + "arg": "^5.0.0", + "sax": "^1.2.4" + }, + "bin": { + "sitemap": "dist/cli.js" + }, + "engines": { + "node": ">=14.0.0", + "npm": ">=6.0.0" + } + }, + "node_modules/sitemap-ts": { + "version": "1.7.3", + "resolved": "https://registry.npmjs.org/sitemap-ts/-/sitemap-ts-1.7.3.tgz", + "integrity": "sha512-xeCbwZvYQfZeKyaCGsPnBZgmLjKGa4BQfVY5O/JHhd0LEvoJM4PIvYBN8zyIl97q+gtxmDaNMclVRxT6FGxyHQ==", + "dev": true, + "dependencies": { + "@antfu/utils": "^0.7.8", + "fast-glob": "^3.3.2", + "sitemap": "^8.0.0", + "xml-formatter": "^3.6.2" + } + }, + "node_modules/sitemap/node_modules/@types/node": { + "version": "17.0.45", + "resolved": "https://registry.npmjs.org/@types/node/-/node-17.0.45.tgz", + "integrity": "sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==", + "dev": true + }, + "node_modules/source-map-js": { + "version": "1.2.0", + "resolved": "https://registry.npmjs.org/source-map-js/-/source-map-js-1.2.0.tgz", + "integrity": "sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/speakingurl": { + "version": "14.0.1", + "resolved": "https://registry.npmjs.org/speakingurl/-/speakingurl-14.0.1.tgz", + "integrity": "sha512-1POYv7uv2gXoyGFpBCmpDVSNV74IfsWlDW216UPjbWufNf+bSU6GdbDsxdcxtfwb4xlI3yxzOTKClUosxARYrQ==", + "dev": true, + "engines": { + "node": ">=0.10.0" + } + }, + "node_modules/string-width": { + "version": "5.1.2", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-5.1.2.tgz", + "integrity": "sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==", + "dev": true, + "dependencies": { + "eastasianwidth": "^0.2.0", + "emoji-regex": "^9.2.2", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/string-width-cjs": { + "name": "string-width", + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/string-width-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/string-width-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi": { + "version": "7.1.0", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-7.1.0.tgz", + "integrity": "sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==", + "dev": true, + "dependencies": { + "ansi-regex": "^6.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/strip-ansi?sponsor=1" + } + }, + "node_modules/strip-ansi-cjs": { + "name": "strip-ansi", + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/strip-json-comments": { + "version": "3.1.1", + "resolved": "https://registry.npmjs.org/strip-json-comments/-/strip-json-comments-3.1.1.tgz", + "integrity": "sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==", + "dev": true, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/sponsors/sindresorhus" + } + }, + "node_modules/stylis": { + "version": "4.3.2", + "resolved": "https://registry.npmjs.org/stylis/-/stylis-4.3.2.tgz", + "integrity": "sha512-bhtUjWd/z6ltJiQwg0dUfxEJ+W+jdqQd8TbWLWyeIJHlnsqmGLRFFd8e5mA0AZi/zx90smXRlN66YMTcaSFifg==", + "dev": true + }, + "node_modules/tabbable": { + "version": "6.2.0", + "resolved": "https://registry.npmjs.org/tabbable/-/tabbable-6.2.0.tgz", + "integrity": "sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==", + "dev": true + }, + "node_modules/to-regex-range": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/to-regex-range/-/to-regex-range-5.0.1.tgz", + "integrity": "sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==", + "dev": true, + "dependencies": { + "is-number": "^7.0.0" + }, + "engines": { + "node": ">=8.0" + } + }, + "node_modules/toml": { + "version": "3.0.0", + "resolved": "https://registry.npmjs.org/toml/-/toml-3.0.0.tgz", + "integrity": "sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==", + "dev": true + }, + "node_modules/ts-dedent": { + "version": "2.2.0", + "resolved": "https://registry.npmjs.org/ts-dedent/-/ts-dedent-2.2.0.tgz", + "integrity": "sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==", + "dev": true, + "engines": { + "node": ">=6.10" + } + }, + "node_modules/uc.micro": { + "version": "2.1.0", + "resolved": "https://registry.npmjs.org/uc.micro/-/uc.micro-2.1.0.tgz", + "integrity": "sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==", + "dev": true + }, + "node_modules/undici-types": { + "version": "5.26.5", + "resolved": "https://registry.npmjs.org/undici-types/-/undici-types-5.26.5.tgz", + "integrity": "sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==", + "dev": true + }, + "node_modules/unist-util-stringify-position": { + "version": "3.0.3", + "resolved": "https://registry.npmjs.org/unist-util-stringify-position/-/unist-util-stringify-position-3.0.3.tgz", + "integrity": "sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==", + "dev": true, + "dependencies": { + "@types/unist": "^2.0.0" + }, + "funding": { + "type": "opencollective", + "url": "https://opencollective.com/unified" + } + }, + "node_modules/uuid": { + "version": "9.0.1", + "resolved": "https://registry.npmjs.org/uuid/-/uuid-9.0.1.tgz", + "integrity": "sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==", + "dev": true, + "funding": [ + "https://github.com/sponsors/broofa", + "https://github.com/sponsors/ctavan" + ], + "bin": { + "uuid": "dist/bin/uuid" + } + }, + "node_modules/uvu": { + "version": "0.5.6", + "resolved": "https://registry.npmjs.org/uvu/-/uvu-0.5.6.tgz", + "integrity": "sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==", + "dev": true, + "dependencies": { + "dequal": "^2.0.0", + "diff": "^5.0.0", + "kleur": "^4.0.3", + "sade": "^1.7.3" + }, + "bin": { + "uvu": "bin.js" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/vite": { + "version": "5.2.13", + "resolved": "https://registry.npmjs.org/vite/-/vite-5.2.13.tgz", + "integrity": "sha512-SSq1noJfY9pR3I1TUENL3rQYDQCFqgD+lM6fTRAM8Nv6Lsg5hDLaXkjETVeBt+7vZBCMoibD+6IWnT2mJ+Zb/A==", + "dev": true, + "dependencies": { + "esbuild": "^0.20.1", + "postcss": "^8.4.38", + "rollup": "^4.13.0" + }, + "bin": { + "vite": "bin/vite.js" + }, + "engines": { + "node": "^18.0.0 || >=20.0.0" + }, + "funding": { + "url": "https://github.com/vitejs/vite?sponsor=1" + }, + "optionalDependencies": { + "fsevents": "~2.3.3" + }, + "peerDependencies": { + "@types/node": "^18.0.0 || >=20.0.0", + "less": "*", + "lightningcss": "^1.21.0", + "sass": "*", + "stylus": "*", + "sugarss": "*", + "terser": "^5.4.0" + }, + "peerDependenciesMeta": { + "@types/node": { + "optional": true + }, + "less": { + "optional": true + }, + "lightningcss": { + "optional": true + }, + "sass": { + "optional": true + }, + "stylus": { + "optional": true + }, + "sugarss": { + "optional": true + }, + "terser": { + "optional": true + } + } + }, + "node_modules/vite/node_modules/fsevents": { + "version": "2.3.3", + "resolved": "https://registry.npmjs.org/fsevents/-/fsevents-2.3.3.tgz", + "integrity": "sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==", + "dev": true, + "hasInstallScript": true, + "optional": true, + "os": [ + "darwin" + ], + "engines": { + "node": "^8.16.0 || ^10.6.0 || >=11.0.0" + } + }, + "node_modules/vitepress": { + "version": "1.2.3", + "resolved": "https://registry.npmjs.org/vitepress/-/vitepress-1.2.3.tgz", + "integrity": "sha512-GvEsrEeNLiDE1+fuwDAYJCYLNZDAna+EtnXlPajhv/MYeTjbNK6Bvyg6NoTdO1sbwuQJ0vuJR99bOlH53bo6lg==", + "dev": true, + "dependencies": { + "@docsearch/css": "^3.6.0", + "@docsearch/js": "^3.6.0", + "@shikijs/core": "^1.6.2", + "@shikijs/transformers": "^1.6.2", + "@types/markdown-it": "^14.1.1", + "@vitejs/plugin-vue": "^5.0.5", + "@vue/devtools-api": "^7.2.1", + "@vue/shared": "^3.4.27", + "@vueuse/core": "^10.10.0", + "@vueuse/integrations": "^10.10.0", + "focus-trap": "^7.5.4", + "mark.js": "8.11.1", + "minisearch": "^6.3.0", + "shiki": "^1.6.2", + "vite": "^5.2.12", + "vue": "^3.4.27" + }, + "bin": { + "vitepress": "bin/vitepress.js" + }, + "peerDependencies": { + "markdown-it-mathjax3": "^4", + "postcss": "^8" + }, + "peerDependenciesMeta": { + "markdown-it-mathjax3": { + "optional": true + }, + "postcss": { + "optional": true + } + } + }, + "node_modules/vitepress-plugin-mermaid": { + "version": "2.0.16", + "resolved": "https://registry.npmjs.org/vitepress-plugin-mermaid/-/vitepress-plugin-mermaid-2.0.16.tgz", + "integrity": "sha512-sW0Eu4+1EzRdwZBMGjzwKDsbQiuJIxCy8BlMw7Ur88p9fXalrFYKqZ3wYWLxsFTBipeooFIeanef/xw1P+v7vQ==", + "dev": true, + "optionalDependencies": { + "@mermaid-js/mermaid-mindmap": "^9.3.0" + }, + "peerDependencies": { + "mermaid": "10", + "vitepress": "^1.0.0 || ^1.0.0-alpha" + } + }, + "node_modules/vue": { + "version": "3.4.27", + "resolved": "https://registry.npmjs.org/vue/-/vue-3.4.27.tgz", + "integrity": "sha512-8s/56uK6r01r1icG/aEOHqyMVxd1bkYcSe9j8HcKtr/xTOFWvnzIVTehNW+5Yt89f+DLBe4A569pnZLS5HzAMA==", + "dev": true, + "dependencies": { + "@vue/compiler-dom": "3.4.27", + "@vue/compiler-sfc": "3.4.27", + "@vue/runtime-dom": "3.4.27", + "@vue/server-renderer": "3.4.27", + "@vue/shared": "3.4.27" + }, + "peerDependencies": { + "typescript": "*" + }, + "peerDependenciesMeta": { + "typescript": { + "optional": true + } + } + }, + "node_modules/web-worker": { + "version": "1.3.0", + "resolved": "https://registry.npmjs.org/web-worker/-/web-worker-1.3.0.tgz", + "integrity": "sha512-BSR9wyRsy/KOValMgd5kMyr3JzpdeoR9KVId8u5GVlTTAtNChlsE4yTxeY7zMdNSyOmoKBv8NH2qeRY9Tg+IaA==", + "dev": true + }, + "node_modules/which": { + "version": "2.0.2", + "resolved": "https://registry.npmjs.org/which/-/which-2.0.2.tgz", + "integrity": "sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==", + "dev": true, + "dependencies": { + "isexe": "^2.0.0" + }, + "bin": { + "node-which": "bin/node-which" + }, + "engines": { + "node": ">= 8" + } + }, + "node_modules/wrap-ansi": { + "version": "8.1.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-8.1.0.tgz", + "integrity": "sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==", + "dev": true, + "dependencies": { + "ansi-styles": "^6.1.0", + "string-width": "^5.0.1", + "strip-ansi": "^7.0.1" + }, + "engines": { + "node": ">=12" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs": { + "name": "wrap-ansi", + "version": "7.0.0", + "resolved": "https://registry.npmjs.org/wrap-ansi/-/wrap-ansi-7.0.0.tgz", + "integrity": "sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==", + "dev": true, + "dependencies": { + "ansi-styles": "^4.0.0", + "string-width": "^4.1.0", + "strip-ansi": "^6.0.0" + }, + "engines": { + "node": ">=10" + }, + "funding": { + "url": "https://github.com/chalk/wrap-ansi?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-regex": { + "version": "5.0.1", + "resolved": "https://registry.npmjs.org/ansi-regex/-/ansi-regex-5.0.1.tgz", + "integrity": "sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==", + "dev": true, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/ansi-styles": { + "version": "4.3.0", + "resolved": "https://registry.npmjs.org/ansi-styles/-/ansi-styles-4.3.0.tgz", + "integrity": "sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==", + "dev": true, + "dependencies": { + "color-convert": "^2.0.1" + }, + "engines": { + "node": ">=8" + }, + "funding": { + "url": "https://github.com/chalk/ansi-styles?sponsor=1" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/emoji-regex": { + "version": "8.0.0", + "resolved": "https://registry.npmjs.org/emoji-regex/-/emoji-regex-8.0.0.tgz", + "integrity": "sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==", + "dev": true + }, + "node_modules/wrap-ansi-cjs/node_modules/string-width": { + "version": "4.2.3", + "resolved": "https://registry.npmjs.org/string-width/-/string-width-4.2.3.tgz", + "integrity": "sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==", + "dev": true, + "dependencies": { + "emoji-regex": "^8.0.0", + "is-fullwidth-code-point": "^3.0.0", + "strip-ansi": "^6.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/wrap-ansi-cjs/node_modules/strip-ansi": { + "version": "6.0.1", + "resolved": "https://registry.npmjs.org/strip-ansi/-/strip-ansi-6.0.1.tgz", + "integrity": "sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==", + "dev": true, + "dependencies": { + "ansi-regex": "^5.0.1" + }, + "engines": { + "node": ">=8" + } + }, + "node_modules/xml-formatter": { + "version": "3.6.2", + "resolved": "https://registry.npmjs.org/xml-formatter/-/xml-formatter-3.6.2.tgz", + "integrity": "sha512-enWhevZNOwffZFUhzl1WMcha8lFLZUgJ7NzFs5Ug4ZOFCoNheGYXz1J9Iz/e+cTn9rCkuT1GwTacz+YlmFHOGw==", + "dev": true, + "dependencies": { + "xml-parser-xo": "^4.1.0" + }, + "engines": { + "node": ">= 14" + } + }, + "node_modules/xml-parser-xo": { + "version": "4.1.1", + "resolved": "https://registry.npmjs.org/xml-parser-xo/-/xml-parser-xo-4.1.1.tgz", + "integrity": "sha512-Ggf2y90+Y6e9IK5hoPuembVHJ03PhDSdhldEmgzbihzu9k0XBo0sfcFxaSi4W1PlUSSI1ok+MJ0JCXUn+U4Ilw==", + "dev": true, + "engines": { + "node": ">= 14" + } + } + } +} diff --git a/package.json b/package.json index 6874b9f97f..bdd8fe208c 100644 --- a/package.json +++ b/package.json @@ -1,16 +1,23 @@ { "license": "Apache-2.0", + "type": "module", "devDependencies": { - "@vuepress/client": "2.0.0-rc.0", - "@vuepress/plugin-docsearch": "2.0.0-rc.0", - "@vuepress/plugin-google-analytics": "2.0.0-rc.15", - "@vuepress/utils": "2.0.0-rc.0", - "vue": "^3.3.11", - "vuepress": "2.0.0-rc.0", - "vuepress-plugin-sitemap2": "2.0.0-rc.4" + "@playwright/test": "^1.44.0", + "@types/node": "^20.12.12", + "@vueuse/core": "^10.9.0", + "markdown-it-footnote": "^4.0.0", + "markdownlint-cli": "^0.40.0", + "mermaid": "^10.9.1", + "sitemap-ts": "^1.7.3", + "vitepress": "^1.2.3", + "vitepress-plugin-mermaid": "^2.0.16", + "vue": "^3.4.27" }, "scripts": { - "website:dev": "vuepress dev runatlantis.io", - "website:build": "NODE_OPTIONS=--openssl-legacy-provider vuepress build runatlantis.io" + "website:dev": "vitepress dev --host localhost --port 8080 runatlantis.io", + "website:lint": "markdownlint runatlantis.io", + "website:lint-fix": "markdownlint --fix runatlantis.io", + "website:build": "vitepress build runatlantis.io", + "e2e": "playwright test" } } diff --git a/playwright.config.cjs b/playwright.config.cjs new file mode 100644 index 0000000000..e3411787bd --- /dev/null +++ b/playwright.config.cjs @@ -0,0 +1,3 @@ +module.exports = { + testDir: './runatlantis.io/e2e' +}; diff --git a/pnpm-lock.yaml b/pnpm-lock.yaml index 5a075817d2..ac3332a689 100644 --- a/pnpm-lock.yaml +++ b/pnpm-lock.yaml @@ -1,214 +1,142 @@ -lockfileVersion: '6.0' +lockfileVersion: '9.0' settings: autoInstallPeers: true excludeLinksFromLockfile: false -devDependencies: - '@vuepress/client': - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0 - '@vuepress/plugin-docsearch': - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0(@algolia/client-search@4.21.1)(search-insights@2.13.0) - '@vuepress/plugin-google-analytics': - specifier: 2.0.0-rc.15 - version: 2.0.0-rc.15(vuepress@2.0.0-rc.0) - '@vuepress/utils': - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0 - vue: - specifier: ^3.3.11 - version: 3.3.11 - vuepress: - specifier: 2.0.0-rc.0 - version: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) - vuepress-plugin-sitemap2: - specifier: 2.0.0-rc.4 - version: 2.0.0-rc.4(vuepress@2.0.0-rc.0) +importers: + + .: + devDependencies: + '@playwright/test': + specifier: ^1.44.0 + version: 1.44.0 + '@types/node': + specifier: ^20.12.12 + version: 20.12.12 + '@vueuse/core': + specifier: ^10.9.0 + version: 10.9.0(vue@3.4.27) + markdown-it-footnote: + specifier: ^4.0.0 + version: 4.0.0 + markdownlint-cli: + specifier: ^0.40.0 + version: 0.40.0 + mermaid: + specifier: ^10.9.1 + version: 10.9.1 + sitemap-ts: + specifier: ~1.6.1 + version: 1.6.1 + vitepress: + specifier: ~1.1.4 + version: 1.1.4(@algolia/client-search@4.23.3)(@types/node@20.12.12)(postcss@8.4.38)(sass@1.77.2)(search-insights@2.13.0) + vitepress-plugin-mermaid: + specifier: ^2.0.16 + version: 2.0.16(mermaid@10.9.1)(vitepress@1.1.4(@algolia/client-search@4.23.3)(@types/node@20.12.12)(postcss@8.4.38)(sass@1.77.2)(search-insights@2.13.0)) + vue: + specifier: ^3.4.27 + version: 3.4.27 packages: - /@algolia/autocomplete-core@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0): + '@algolia/autocomplete-core@1.9.3': resolution: {integrity: sha512-009HdfugtGCdC4JdXUbVJClA0q0zh24yyePn+KUGk3rP7j8FEe/m5Yo/z65gn6nP/cM39PxpzqKrL7A6fP6PPw==} - dependencies: - '@algolia/autocomplete-plugin-algolia-insights': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0) - '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1) - transitivePeerDependencies: - - '@algolia/client-search' - - algoliasearch - - search-insights - dev: true - /@algolia/autocomplete-plugin-algolia-insights@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0): + '@algolia/autocomplete-plugin-algolia-insights@1.9.3': resolution: {integrity: sha512-a/yTUkcO/Vyy+JffmAnTWbr4/90cLzw+CC3bRbhnULr/EM0fGNvM13oQQ14f2moLMcVDyAx/leczLlAOovhSZg==} peerDependencies: search-insights: '>= 1 < 3' - dependencies: - '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1) - search-insights: 2.13.0 - transitivePeerDependencies: - - '@algolia/client-search' - - algoliasearch - dev: true - /@algolia/autocomplete-preset-algolia@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1): + '@algolia/autocomplete-preset-algolia@1.9.3': resolution: {integrity: sha512-d4qlt6YmrLMYy95n5TB52wtNDr6EgAIPH81dvvvW8UmuWRgxEtY0NJiPwl/h95JtG2vmRM804M0DSwMCNZlzRA==} peerDependencies: '@algolia/client-search': '>= 4.9.1 < 6' algoliasearch: '>= 4.9.1 < 6' - dependencies: - '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1) - '@algolia/client-search': 4.21.1 - algoliasearch: 4.21.1 - dev: true - /@algolia/autocomplete-shared@1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1): + '@algolia/autocomplete-shared@1.9.3': resolution: {integrity: sha512-Wnm9E4Ye6Rl6sTTqjoymD+l8DjSTHsHboVRYrKgEt8Q7UHm9nYbqhN/i0fhUYA3OAEH7WA8x3jfpnmJm3rKvaQ==} peerDependencies: '@algolia/client-search': '>= 4.9.1 < 6' algoliasearch: '>= 4.9.1 < 6' - dependencies: - '@algolia/client-search': 4.21.1 - algoliasearch: 4.21.1 - dev: true - /@algolia/cache-browser-local-storage@4.21.1: - resolution: {integrity: sha512-vUkac/vgj8inyGR/IgunRjTOQ6IlBwl7afFkIfUZRqbqKKXBs+A/g5wgH+UnAlCSW8wjFRAIfCzuvSRb1/qjsQ==} - dependencies: - '@algolia/cache-common': 4.21.1 - dev: true + '@algolia/cache-browser-local-storage@4.23.3': + resolution: {integrity: sha512-vRHXYCpPlTDE7i6UOy2xE03zHF2C8MEFjPN2v7fRbqVpcOvAUQK81x3Kc21xyb5aSIpYCjWCZbYZuz8Glyzyyg==} - /@algolia/cache-common@4.21.1: - resolution: {integrity: sha512-HUo4fRk8KXFMyCASW0k+Kl8iXBoRPdqAjV9OVaFibTNg1dbwnpe6eIxbSTM6AJ2X82ic/8x3GuAO8zF/E515PA==} - dev: true + '@algolia/cache-common@4.23.3': + resolution: {integrity: sha512-h9XcNI6lxYStaw32pHpB1TMm0RuxphF+Ik4o7tcQiodEdpKK+wKufY6QXtba7t3k8eseirEMVB83uFFF3Nu54A==} - /@algolia/cache-in-memory@4.21.1: - resolution: {integrity: sha512-+l2pLg6yIwRaGNtv41pGF/f/e9Qk80FeYE41f4OXS9lb5vpyrxzqM5nUaffWk/ZSFrPDuw5J2E226c//tIIffA==} - dependencies: - '@algolia/cache-common': 4.21.1 - dev: true + '@algolia/cache-in-memory@4.23.3': + resolution: {integrity: sha512-yvpbuUXg/+0rbcagxNT7un0eo3czx2Uf0y4eiR4z4SD7SiptwYTpbuS0IHxcLHG3lq22ukx1T6Kjtk/rT+mqNg==} - /@algolia/client-account@4.21.1: - resolution: {integrity: sha512-AC6SjA9n38th73gAUqcjsuxNUChpwaflaAhPL0qO9cUICN67njpQrnYaoSVZ/yx0opG5zQFRKbpEcuPGj0XjhQ==} - dependencies: - '@algolia/client-common': 4.21.1 - '@algolia/client-search': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-account@4.23.3': + resolution: {integrity: sha512-hpa6S5d7iQmretHHF40QGq6hz0anWEHGlULcTIT9tbUssWUriN9AUXIFQ8Ei4w9azD0hc1rUok9/DeQQobhQMA==} - /@algolia/client-analytics@4.21.1: - resolution: {integrity: sha512-q6AxvAcBl4fNZXZsMwRRQXcsxUv0PK5eUAz/lHDvgkMWAg6cP7Fl+WIq0fHcG7cJA4EHf2sT5fV6Z+yUlf7NfA==} - dependencies: - '@algolia/client-common': 4.21.1 - '@algolia/client-search': 4.21.1 - '@algolia/requester-common': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-analytics@4.23.3': + resolution: {integrity: sha512-LBsEARGS9cj8VkTAVEZphjxTjMVCci+zIIiRhpFun9jGDUlS1XmhCW7CTrnaWeIuCQS/2iPyRqSy1nXPjcBLRA==} - /@algolia/client-common@4.21.1: - resolution: {integrity: sha512-LOH7ncYwY/x7epOgxc/MIuV7m3qzl00wIjDG5/9rgImFpkV0X+D/ndJI9DmPsIx7yaTLd5xv/XYuKLcvrUR0eQ==} - dependencies: - '@algolia/requester-common': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-common@4.23.3': + resolution: {integrity: sha512-l6EiPxdAlg8CYhroqS5ybfIczsGUIAC47slLPOMDeKSVXYG1n0qGiz4RjAHLw2aD0xzh2EXZ7aRguPfz7UKDKw==} - /@algolia/client-personalization@4.21.1: - resolution: {integrity: sha512-u2CyQjHbyVwPqM5eSXd/o+rh1Pk949P/MO6s+OxyEGg6/R2YpYvmsafVZl9Q+xqT8pFaf5QygfcqlSdMUDHV5Q==} - dependencies: - '@algolia/client-common': 4.21.1 - '@algolia/requester-common': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-personalization@4.23.3': + resolution: {integrity: sha512-3E3yF3Ocr1tB/xOZiuC3doHQBQ2zu2MPTYZ0d4lpfWads2WTKG7ZzmGnsHmm63RflvDeLK/UVx7j2b3QuwKQ2g==} - /@algolia/client-search@4.21.1: - resolution: {integrity: sha512-3KqSmMkQmF+ACY/Ms5TdcvrcK8iqgQP/N0EPnNUUP4LMUzAACpLLTdzA+AtCuc6oaz5ITtGJBVdPUljj5Jf/Lg==} - dependencies: - '@algolia/client-common': 4.21.1 - '@algolia/requester-common': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true + '@algolia/client-search@4.23.3': + resolution: {integrity: sha512-P4VAKFHqU0wx9O+q29Q8YVuaowaZ5EM77rxfmGnkHUJggh28useXQdopokgwMeYw2XUht49WX5RcTQ40rZIabw==} - /@algolia/logger-common@4.21.1: - resolution: {integrity: sha512-9AyYpR2OO9vPkkDlpTtW2/6nX+RmMd7LUwzJiAF3uN+BYUiQqgXEp+oGaH8UC0dgetmK7wJO6hw4b39cnTdEpw==} - dev: true + '@algolia/logger-common@4.23.3': + resolution: {integrity: sha512-y9kBtmJwiZ9ZZ+1Ek66P0M68mHQzKRxkW5kAAXYN/rdzgDN0d2COsViEFufxJ0pb45K4FRcfC7+33YB4BLrZ+g==} - /@algolia/logger-console@4.21.1: - resolution: {integrity: sha512-9wizQiQ8kL4DiBmT82i403UwacNuv+0hpfsfaWYZQrGjpzG+yvXETWM4AgwFZLj007esuKQiGfOPUoYFZNkGGA==} - dependencies: - '@algolia/logger-common': 4.21.1 - dev: true + '@algolia/logger-console@4.23.3': + resolution: {integrity: sha512-8xoiseoWDKuCVnWP8jHthgaeobDLolh00KJAdMe9XPrWPuf1by732jSpgy2BlsLTaT9m32pHI8CRfrOqQzHv3A==} - /@algolia/requester-browser-xhr@4.21.1: - resolution: {integrity: sha512-9NudesJLuXtRHV+JD8fTkrsdVj/oAPQbtLnxBbSQeMduzV6+a7W+G9VuWo5fwFymCdXR8/Hb6jy8D1owQIq5Gw==} - dependencies: - '@algolia/requester-common': 4.21.1 - dev: true + '@algolia/recommend@4.23.3': + resolution: {integrity: sha512-9fK4nXZF0bFkdcLBRDexsnGzVmu4TSYZqxdpgBW2tEyfuSSY54D4qSRkLmNkrrz4YFvdh2GM1gA8vSsnZPR73w==} - /@algolia/requester-common@4.21.1: - resolution: {integrity: sha512-KtX2Ep3C43XxoN3xKw755cdf9enE6gPgzh6ufZQRJBl4rYCOoXbiREU6noDYX/Nq+Q+sl03V37WAp0YgtIlh9g==} - dev: true + '@algolia/requester-browser-xhr@4.23.3': + resolution: {integrity: sha512-jDWGIQ96BhXbmONAQsasIpTYWslyjkiGu0Quydjlowe+ciqySpiDUrJHERIRfELE5+wFc7hc1Q5hqjGoV7yghw==} - /@algolia/requester-node-http@4.21.1: - resolution: {integrity: sha512-EcD8cY6Bh2iMySpqXglTKU9+pt+km1ws3xF0V7CGMIUzW1HmN/ZVhi4apCBY4tEMytbyARv0XRTPsolSC4gSSw==} - dependencies: - '@algolia/requester-common': 4.21.1 - dev: true + '@algolia/requester-common@4.23.3': + resolution: {integrity: sha512-xloIdr/bedtYEGcXCiF2muajyvRhwop4cMZo+K2qzNht0CMzlRkm8YsDdj5IaBhshqfgmBb3rTg4sL4/PpvLYw==} - /@algolia/transporter@4.21.1: - resolution: {integrity: sha512-KGLFKz8krzOWRwcbR4FT49Grh1dES/mG8dHABEojbvrfUb6kUFxkAee/aezp2GIxuNx+gpQjRn1IzOsqbUZL0A==} - dependencies: - '@algolia/cache-common': 4.21.1 - '@algolia/logger-common': 4.21.1 - '@algolia/requester-common': 4.21.1 - dev: true + '@algolia/requester-node-http@4.23.3': + resolution: {integrity: sha512-zgu++8Uj03IWDEJM3fuNl34s746JnZOWn1Uz5taV1dFyJhVM/kTNw9Ik7YJWiUNHJQXcaD8IXD1eCb0nq/aByA==} + + '@algolia/transporter@4.23.3': + resolution: {integrity: sha512-Wjl5gttqnf/gQKJA+dafnD0Y6Yw97yvfY8R9h0dQltX1GXTgNs1zWgvtWW0tHl1EgMdhAyw189uWiZMnL3QebQ==} - /@babel/helper-string-parser@7.23.4: - resolution: {integrity: sha512-803gmbQdqwdf4olxrX4AJyFBV/RTr3rSmOj0rKwesmzlfhYNDEs+/iOcznzpNWlJlIlTJC2QfPFcHB6DlzdVLQ==} + '@antfu/utils@0.7.6': + resolution: {integrity: sha512-pvFiLP2BeOKA/ZOS6jxx4XhKzdVLHDhGlFEaZ2flWWYf2xOqVniqpk38I04DFRyz+L0ASggl7SkItTc+ZLju4w==} + + '@babel/helper-string-parser@7.24.1': + resolution: {integrity: sha512-2ofRCjnnA9y+wk8b9IAREroeUP02KHp431N2mhKniy2yKIDKpbrHv9eXwm8cBeWQYcJmzv5qKCu65P47eCF7CQ==} engines: {node: '>=6.9.0'} - dev: true - /@babel/helper-validator-identifier@7.22.20: - resolution: {integrity: sha512-Y4OZ+ytlatR8AI+8KZfKuL5urKp7qey08ha31L8b3BwewJAoJamTzyvxPR/5D+KkdJCGPq/+8TukHBlY10FX9A==} + '@babel/helper-validator-identifier@7.24.5': + resolution: {integrity: sha512-3q93SSKX2TWCG30M2G2kwaKeTYgEUp5Snjuj8qm729SObL6nbtUldAi37qbxkD5gg3xnBio+f9nqpSepGZMvxA==} engines: {node: '>=6.9.0'} - dev: true - /@babel/parser@7.23.6: - resolution: {integrity: sha512-Z2uID7YJ7oNvAI20O9X0bblw7Qqs8Q2hFy0R9tAfnfLkp5MW0UH9eUvnDSnFwKZ0AvgS1ucqR4KzvVHgnke1VQ==} + '@babel/parser@7.24.5': + resolution: {integrity: sha512-EOv5IK8arwh3LI47dz1b0tKUb/1uhHAnHJOrjgtQMIpu1uXd9mlFrJg9IUgGUgZ41Ch0K8REPTYpO7B76b4vJg==} engines: {node: '>=6.0.0'} hasBin: true - dependencies: - '@babel/types': 7.23.6 - dev: true - /@babel/types@7.23.6: - resolution: {integrity: sha512-+uarb83brBzPKN38NX1MkB6vb6+mwvR6amUulqAE7ccQw1pEl+bCia9TbdG1lsnFP7lZySvUn37CHyXQdfTwzg==} + '@babel/types@7.24.5': + resolution: {integrity: sha512-6mQNsaLeXTw0nxYUYu+NSa4Hx4BlF1x1x8/PMFbiR+GBSr+2DkECc69b8hgy2frEodNcvPffeH8YfWd3LI6jhQ==} engines: {node: '>=6.9.0'} - dependencies: - '@babel/helper-string-parser': 7.23.4 - '@babel/helper-validator-identifier': 7.22.20 - to-fast-properties: 2.0.0 - dev: true - /@docsearch/css@3.5.2: - resolution: {integrity: sha512-SPiDHaWKQZpwR2siD0KQUwlStvIAnEyK6tAE2h2Wuoq8ue9skzhlyVQ1ddzOxX6khULnAALDiR/isSF3bnuciA==} - dev: true + '@braintree/sanitize-url@6.0.4': + resolution: {integrity: sha512-s3jaWicZd0pkP0jf5ysyHUI/RE7MHos6qlToFcGWXVp+ykHOy77OUMrfbgJ9it2C5bow7OIQwYYaHjk9XlBQ2A==} - /@docsearch/js@3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0): - resolution: {integrity: sha512-p1YFTCDflk8ieHgFJYfmyHBki1D61+U9idwrLh+GQQMrBSP3DLGKpy0XUJtPjAOPltcVbqsTjiPFfH7JImjUNg==} - dependencies: - '@docsearch/react': 3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0) - preact: 10.19.3 - transitivePeerDependencies: - - '@algolia/client-search' - - '@types/react' - - react - - react-dom - - search-insights - dev: true + '@docsearch/css@3.6.0': + resolution: {integrity: sha512-+sbxb71sWre+PwDK7X2T8+bhS6clcVMLwBPznX45Qu6opJcgRjAp7gYSDzVFp187J+feSj5dNBN1mJoi6ckkUQ==} + + '@docsearch/js@3.6.0': + resolution: {integrity: sha512-QujhqINEElrkIfKwyyyTfbsfMAYCkylInLYMRqHy7PHc8xTBQCow73tlo/Kc7oIwBrCLf0P3YhjlOeV4v8hevQ==} - /@docsearch/react@3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0): - resolution: {integrity: sha512-9Ahcrs5z2jq/DcAvYtvlqEBHImbm4YJI8M9y0x6Tqg598P40HTEkX7hsMcIuThI+hTFxRGZ9hll0Wygm2yEjng==} + '@docsearch/react@3.6.0': + resolution: {integrity: sha512-HUFut4ztcVNmqy9gp/wxNbC7pTOHhgVVkHVGCACTuLhUKUhKAF9KYHJtMiLUJxEqiFLQiuri1fWF8zqwM/cu1w==} peerDependencies: '@types/react': '>= 16.8.0 < 19.0.0' react: '>= 16.8.0 < 19.0.0' @@ -223,1079 +151,1762 @@ packages: optional: true search-insights: optional: true - dependencies: - '@algolia/autocomplete-core': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1)(search-insights@2.13.0) - '@algolia/autocomplete-preset-algolia': 1.9.3(@algolia/client-search@4.21.1)(algoliasearch@4.21.1) - '@docsearch/css': 3.5.2 - algoliasearch: 4.21.1 - search-insights: 2.13.0 - transitivePeerDependencies: - - '@algolia/client-search' - dev: true - /@esbuild/android-arm64@0.19.9: - resolution: {integrity: sha512-q4cR+6ZD0938R19MyEW3jEsMzbb/1rulLXiNAJQADD/XYp7pT+rOS5JGxvpRW8dFDEfjW4wLgC/3FXIw4zYglQ==} + '@esbuild/aix-ppc64@0.20.2': + resolution: {integrity: sha512-D+EBOJHXdNZcLJRBkhENNG8Wji2kgc9AZ9KiPr1JuZjsNtyHzrsfLRrY0tk2H2aoFu6RANO1y1iPPUCDYWkb5g==} + engines: {node: '>=12'} + cpu: [ppc64] + os: [aix] + + '@esbuild/android-arm64@0.20.2': + resolution: {integrity: sha512-mRzjLacRtl/tWU0SvD8lUEwb61yP9cqQo6noDZP/O8VkwafSYwZ4yWy24kan8jE/IMERpYncRt2dw438LP3Xmg==} engines: {node: '>=12'} cpu: [arm64] os: [android] - requiresBuild: true - dev: true - optional: true - /@esbuild/android-arm@0.19.9: - resolution: {integrity: sha512-jkYjjq7SdsWuNI6b5quymW0oC83NN5FdRPuCbs9HZ02mfVdAP8B8eeqLSYU3gb6OJEaY5CQabtTFbqBf26H3GA==} + '@esbuild/android-arm@0.20.2': + resolution: {integrity: sha512-t98Ra6pw2VaDhqNWO2Oph2LXbz/EJcnLmKLGBJwEwXX/JAN83Fym1rU8l0JUWK6HkIbWONCSSatf4sf2NBRx/w==} engines: {node: '>=12'} cpu: [arm] os: [android] - requiresBuild: true - dev: true - optional: true - /@esbuild/android-x64@0.19.9: - resolution: {integrity: sha512-KOqoPntWAH6ZxDwx1D6mRntIgZh9KodzgNOy5Ebt9ghzffOk9X2c1sPwtM9P+0eXbefnDhqYfkh5PLP5ULtWFA==} + '@esbuild/android-x64@0.20.2': + resolution: {integrity: sha512-btzExgV+/lMGDDa194CcUQm53ncxzeBrWJcncOBxuC6ndBkKxnHdFJn86mCIgTELsooUmwUm9FkhSp5HYu00Rg==} engines: {node: '>=12'} cpu: [x64] os: [android] - requiresBuild: true - dev: true - optional: true - /@esbuild/darwin-arm64@0.19.9: - resolution: {integrity: sha512-KBJ9S0AFyLVx2E5D8W0vExqRW01WqRtczUZ8NRu+Pi+87opZn5tL4Y0xT0mA4FtHctd0ZgwNoN639fUUGlNIWw==} + '@esbuild/darwin-arm64@0.20.2': + resolution: {integrity: sha512-4J6IRT+10J3aJH3l1yzEg9y3wkTDgDk7TSDFX+wKFiWjqWp/iCfLIYzGyasx9l0SAFPT1HwSCR+0w/h1ES/MjA==} engines: {node: '>=12'} cpu: [arm64] os: [darwin] - requiresBuild: true - dev: true - optional: true - /@esbuild/darwin-x64@0.19.9: - resolution: {integrity: sha512-vE0VotmNTQaTdX0Q9dOHmMTao6ObjyPm58CHZr1UK7qpNleQyxlFlNCaHsHx6Uqv86VgPmR4o2wdNq3dP1qyDQ==} + '@esbuild/darwin-x64@0.20.2': + resolution: {integrity: sha512-tBcXp9KNphnNH0dfhv8KYkZhjc+H3XBkF5DKtswJblV7KlT9EI2+jeA8DgBjp908WEuYll6pF+UStUCfEpdysA==} engines: {node: '>=12'} cpu: [x64] os: [darwin] - requiresBuild: true - dev: true - optional: true - /@esbuild/freebsd-arm64@0.19.9: - resolution: {integrity: sha512-uFQyd/o1IjiEk3rUHSwUKkqZwqdvuD8GevWF065eqgYfexcVkxh+IJgwTaGZVu59XczZGcN/YMh9uF1fWD8j1g==} + '@esbuild/freebsd-arm64@0.20.2': + resolution: {integrity: sha512-d3qI41G4SuLiCGCFGUrKsSeTXyWG6yem1KcGZVS+3FYlYhtNoNgYrWcvkOoaqMhwXSMrZRl69ArHsGJ9mYdbbw==} engines: {node: '>=12'} cpu: [arm64] os: [freebsd] - requiresBuild: true - dev: true - optional: true - /@esbuild/freebsd-x64@0.19.9: - resolution: {integrity: sha512-WMLgWAtkdTbTu1AWacY7uoj/YtHthgqrqhf1OaEWnZb7PQgpt8eaA/F3LkV0E6K/Lc0cUr/uaVP/49iE4M4asA==} + '@esbuild/freebsd-x64@0.20.2': + resolution: {integrity: sha512-d+DipyvHRuqEeM5zDivKV1KuXn9WeRX6vqSqIDgwIfPQtwMP4jaDsQsDncjTDDsExT4lR/91OLjRo8bmC1e+Cw==} engines: {node: '>=12'} cpu: [x64] os: [freebsd] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-arm64@0.19.9: - resolution: {integrity: sha512-PiPblfe1BjK7WDAKR1Cr9O7VVPqVNpwFcPWgfn4xu0eMemzRp442hXyzF/fSwgrufI66FpHOEJk0yYdPInsmyQ==} + '@esbuild/linux-arm64@0.20.2': + resolution: {integrity: sha512-9pb6rBjGvTFNira2FLIWqDk/uaf42sSyLE8j1rnUpuzsODBq7FvpwHYZxQ/It/8b+QOS1RYfqgGFNLRI+qlq2A==} engines: {node: '>=12'} cpu: [arm64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-arm@0.19.9: - resolution: {integrity: sha512-C/ChPohUYoyUaqn1h17m/6yt6OB14hbXvT8EgM1ZWaiiTYz7nWZR0SYmMnB5BzQA4GXl3BgBO1l8MYqL/He3qw==} + '@esbuild/linux-arm@0.20.2': + resolution: {integrity: sha512-VhLPeR8HTMPccbuWWcEUD1Az68TqaTYyj6nfE4QByZIQEQVWBB8vup8PpR7y1QHL3CpcF6xd5WVBU/+SBEvGTg==} engines: {node: '>=12'} cpu: [arm] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-ia32@0.19.9: - resolution: {integrity: sha512-f37i/0zE0MjDxijkPSQw1CO/7C27Eojqb+r3BbHVxMLkj8GCa78TrBZzvPyA/FNLUMzP3eyHCVkAopkKVja+6Q==} + '@esbuild/linux-ia32@0.20.2': + resolution: {integrity: sha512-o10utieEkNPFDZFQm9CoP7Tvb33UutoJqg3qKf1PWVeeJhJw0Q347PxMvBgVVFgouYLGIhFYG0UGdBumROyiig==} engines: {node: '>=12'} cpu: [ia32] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-loong64@0.19.9: - resolution: {integrity: sha512-t6mN147pUIf3t6wUt3FeumoOTPfmv9Cc6DQlsVBpB7eCpLOqQDyWBP1ymXn1lDw4fNUSb/gBcKAmvTP49oIkaA==} + '@esbuild/linux-loong64@0.20.2': + resolution: {integrity: sha512-PR7sp6R/UC4CFVomVINKJ80pMFlfDfMQMYynX7t1tNTeivQ6XdX5r2XovMmha/VjR1YN/HgHWsVcTRIMkymrgQ==} engines: {node: '>=12'} cpu: [loong64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-mips64el@0.19.9: - resolution: {integrity: sha512-jg9fujJTNTQBuDXdmAg1eeJUL4Jds7BklOTkkH80ZgQIoCTdQrDaHYgbFZyeTq8zbY+axgptncko3v9p5hLZtw==} + '@esbuild/linux-mips64el@0.20.2': + resolution: {integrity: sha512-4BlTqeutE/KnOiTG5Y6Sb/Hw6hsBOZapOVF6njAESHInhlQAghVVZL1ZpIctBOoTFbQyGW+LsVYZ8lSSB3wkjA==} engines: {node: '>=12'} cpu: [mips64el] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-ppc64@0.19.9: - resolution: {integrity: sha512-tkV0xUX0pUUgY4ha7z5BbDS85uI7ABw3V1d0RNTii7E9lbmV8Z37Pup2tsLV46SQWzjOeyDi1Q7Wx2+QM8WaCQ==} + '@esbuild/linux-ppc64@0.20.2': + resolution: {integrity: sha512-rD3KsaDprDcfajSKdn25ooz5J5/fWBylaaXkuotBDGnMnDP1Uv5DLAN/45qfnf3JDYyJv/ytGHQaziHUdyzaAg==} engines: {node: '>=12'} cpu: [ppc64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-riscv64@0.19.9: - resolution: {integrity: sha512-DfLp8dj91cufgPZDXr9p3FoR++m3ZJ6uIXsXrIvJdOjXVREtXuQCjfMfvmc3LScAVmLjcfloyVtpn43D56JFHg==} + '@esbuild/linux-riscv64@0.20.2': + resolution: {integrity: sha512-snwmBKacKmwTMmhLlz/3aH1Q9T8v45bKYGE3j26TsaOVtjIag4wLfWSiZykXzXuE1kbCE+zJRmwp+ZbIHinnVg==} engines: {node: '>=12'} cpu: [riscv64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-s390x@0.19.9: - resolution: {integrity: sha512-zHbglfEdC88KMgCWpOl/zc6dDYJvWGLiUtmPRsr1OgCViu3z5GncvNVdf+6/56O2Ca8jUU+t1BW261V6kp8qdw==} + '@esbuild/linux-s390x@0.20.2': + resolution: {integrity: sha512-wcWISOobRWNm3cezm5HOZcYz1sKoHLd8VL1dl309DiixxVFoFe/o8HnwuIwn6sXre88Nwj+VwZUvJf4AFxkyrQ==} engines: {node: '>=12'} cpu: [s390x] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/linux-x64@0.19.9: - resolution: {integrity: sha512-JUjpystGFFmNrEHQnIVG8hKwvA2DN5o7RqiO1CVX8EN/F/gkCjkUMgVn6hzScpwnJtl2mPR6I9XV1oW8k9O+0A==} + '@esbuild/linux-x64@0.20.2': + resolution: {integrity: sha512-1MdwI6OOTsfQfek8sLwgyjOXAu+wKhLEoaOLTjbijk6E2WONYpH9ZU2mNtR+lZ2B4uwr+usqGuVfFT9tMtGvGw==} engines: {node: '>=12'} cpu: [x64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@esbuild/netbsd-x64@0.19.9: - resolution: {integrity: sha512-GThgZPAwOBOsheA2RUlW5UeroRfESwMq/guy8uEe3wJlAOjpOXuSevLRd70NZ37ZrpO6RHGHgEHvPg1h3S1Jug==} + '@esbuild/netbsd-x64@0.20.2': + resolution: {integrity: sha512-K8/DhBxcVQkzYc43yJXDSyjlFeHQJBiowJ0uVL6Tor3jGQfSGHNNJcWxNbOI8v5k82prYqzPuwkzHt3J1T1iZQ==} engines: {node: '>=12'} cpu: [x64] os: [netbsd] - requiresBuild: true - dev: true - optional: true - /@esbuild/openbsd-x64@0.19.9: - resolution: {integrity: sha512-Ki6PlzppaFVbLnD8PtlVQfsYw4S9n3eQl87cqgeIw+O3sRr9IghpfSKY62mggdt1yCSZ8QWvTZ9jo9fjDSg9uw==} + '@esbuild/openbsd-x64@0.20.2': + resolution: {integrity: sha512-eMpKlV0SThJmmJgiVyN9jTPJ2VBPquf6Kt/nAoo6DgHAoN57K15ZghiHaMvqjCye/uU4X5u3YSMgVBI1h3vKrQ==} engines: {node: '>=12'} cpu: [x64] os: [openbsd] - requiresBuild: true - dev: true - optional: true - /@esbuild/sunos-x64@0.19.9: - resolution: {integrity: sha512-MLHj7k9hWh4y1ddkBpvRj2b9NCBhfgBt3VpWbHQnXRedVun/hC7sIyTGDGTfsGuXo4ebik2+3ShjcPbhtFwWDw==} + '@esbuild/sunos-x64@0.20.2': + resolution: {integrity: sha512-2UyFtRC6cXLyejf/YEld4Hajo7UHILetzE1vsRcGL3earZEW77JxrFjH4Ez2qaTiEfMgAXxfAZCm1fvM/G/o8w==} engines: {node: '>=12'} cpu: [x64] os: [sunos] - requiresBuild: true - dev: true - optional: true - /@esbuild/win32-arm64@0.19.9: - resolution: {integrity: sha512-GQoa6OrQ8G08guMFgeXPH7yE/8Dt0IfOGWJSfSH4uafwdC7rWwrfE6P9N8AtPGIjUzdo2+7bN8Xo3qC578olhg==} + '@esbuild/win32-arm64@0.20.2': + resolution: {integrity: sha512-GRibxoawM9ZCnDxnP3usoUDO9vUkpAxIIZ6GQI+IlVmr5kP3zUq+l17xELTHMWTWzjxa2guPNyrpq1GWmPvcGQ==} engines: {node: '>=12'} cpu: [arm64] os: [win32] - requiresBuild: true - dev: true - optional: true - /@esbuild/win32-ia32@0.19.9: - resolution: {integrity: sha512-UOozV7Ntykvr5tSOlGCrqU3NBr3d8JqPes0QWN2WOXfvkWVGRajC+Ym0/Wj88fUgecUCLDdJPDF0Nna2UK3Qtg==} + '@esbuild/win32-ia32@0.20.2': + resolution: {integrity: sha512-HfLOfn9YWmkSKRQqovpnITazdtquEW8/SoHW7pWpuEeguaZI4QnCRW6b+oZTztdBnZOS2hqJ6im/D5cPzBTTlQ==} engines: {node: '>=12'} cpu: [ia32] os: [win32] - requiresBuild: true - dev: true - optional: true - /@esbuild/win32-x64@0.19.9: - resolution: {integrity: sha512-oxoQgglOP7RH6iasDrhY+R/3cHrfwIDvRlT4CGChflq6twk8iENeVvMJjmvBb94Ik1Z+93iGO27err7w6l54GQ==} + '@esbuild/win32-x64@0.20.2': + resolution: {integrity: sha512-N49X4lJX27+l9jbLKSqZ6bKNjzQvHaT8IIFUy+YIqmXQdjYCToGWwOItDrfby14c78aDd5NHQl29xingXfCdLQ==} engines: {node: '>=12'} cpu: [x64] os: [win32] - requiresBuild: true - dev: true - optional: true - - /@jridgewell/sourcemap-codec@1.4.15: - resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} - dev: true - - /@mdit-vue/plugin-component@1.0.0: - resolution: {integrity: sha512-ZXsJwxkG5yyTHARIYbR74cT4AZ0SfMokFFjiHYCbypHIeYWgJhso4+CZ8+3V9EWFG3EHlGoKNGqKp9chHnqntQ==} - dependencies: - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - - /@mdit-vue/plugin-frontmatter@1.0.0: - resolution: {integrity: sha512-MMA7Ny+YPZA7eDOY1t4E+rKuEWO39mzDdP/M68fKdXJU6VfcGkPr7gnpnJfW2QBJ5qIvMrK/3lDAA2JBy5TfpA==} - dependencies: - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - gray-matter: 4.0.3 - markdown-it: 13.0.2 - dev: true - - /@mdit-vue/plugin-headers@1.0.0: - resolution: {integrity: sha512-0rK/iKy6x13d/Pp5XxdLBshTD0+YjZvtHIaIV+JO+/H2WnOv7oaRgs48G5d44z3XJVUE2u6fNnTlI169fef0/A==} - dependencies: - '@mdit-vue/shared': 1.0.0 - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - - /@mdit-vue/plugin-sfc@1.0.0: - resolution: {integrity: sha512-agMUe0fY4YHxsZivSvplBwRwrFvsIf/JNUJCAYq1+2Sg9+2hviTBZwjZDxYqHDHOVLtiNr+wuo68tE24mAx3AQ==} - dependencies: - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - - /@mdit-vue/plugin-title@1.0.0: - resolution: {integrity: sha512-8yC60fCZ95xcJ/cvJH4Lv43Rs4k+33UGyKrRWj5J8TNyMwUyGcwur0XyPM+ffJH4/Bzq4myZLsj/TTFSkXRxvw==} - dependencies: - '@mdit-vue/shared': 1.0.0 - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true - /@mdit-vue/plugin-toc@1.0.0: - resolution: {integrity: sha512-WN8blfX0X/5Nolic0ClDWP7eVo9IB+U4g0jbycX3lolIZX5Bai1UpsD3QYZr5VVsPbQJMKMGvTrCEtCNTGvyWQ==} - dependencies: - '@mdit-vue/shared': 1.0.0 - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true + '@isaacs/cliui@8.0.2': + resolution: {integrity: sha512-O8jcjabXaleOG9DQ0+ARXWZBTfnP4WNAqzuiJK7ll44AmxGKv/J2M4TPjxjY3znBCfvBXFzucm1twdyFybFqEA==} + engines: {node: '>=12'} - /@mdit-vue/shared@1.0.0: - resolution: {integrity: sha512-nbYBfmEi+pR2Lm0Z6TMVX2/iBjfr/kGEsHW8CC0rQw+3+sG5dY6VG094HuFAkiAmmvZx9DZZb+7ZMWp9vkwCRw==} - dependencies: - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true + '@jridgewell/sourcemap-codec@1.4.15': + resolution: {integrity: sha512-eF2rxCRulEKXHTRiDrDy6erMYWqNw4LPdQ8UQA4huuxaQsVeRPFl2oM8oDGxMFhJUWZf9McpLtJasDDZb/Bpeg==} - /@mdit-vue/types@1.0.0: - resolution: {integrity: sha512-xeF5+sHLzRNF7plbksywKCph4qli20l72of2fMlZQQ7RECvXYrRkE9+bjRFQCyULC7B8ydUYbpbkux5xJlVWyw==} - dev: true + '@mermaid-js/mermaid-mindmap@9.3.0': + resolution: {integrity: sha512-IhtYSVBBRYviH1Ehu8gk69pMDF8DSRqXBRDMWrEfHoaMruHeaP2DXA3PBnuwsMaCdPQhlUUcy/7DBLAEIXvCAw==} - /@nodelib/fs.scandir@2.1.5: + '@nodelib/fs.scandir@2.1.5': resolution: {integrity: sha512-vq24Bq3ym5HEQm2NKCr3yXDwjc7vTsEThRDnkp2DK9p1uqLR+DHurm/NOTo0KG7HYHU7eppKZj3MyqYuMBf62g==} engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.stat': 2.0.5 - run-parallel: 1.2.0 - dev: true - /@nodelib/fs.stat@2.0.5: + '@nodelib/fs.stat@2.0.5': resolution: {integrity: sha512-RkhPPp2zrqDAQA/2jNhnztcPAlv64XdhIp7a7454A5ovI7Bukxgt7MX7udwAu3zg1DcpPU0rz3VV1SeaqvY4+A==} engines: {node: '>= 8'} - dev: true - /@nodelib/fs.walk@1.2.8: + '@nodelib/fs.walk@1.2.8': resolution: {integrity: sha512-oGB+UxlgWcgQkgwo8GcEGwemoTFt3FIO9ababBmaGwXIoBKZ+GTy0pP185beGg7Llih/NSHSV2XAs1lnznocSg==} engines: {node: '>= 8'} - dependencies: - '@nodelib/fs.scandir': 2.1.5 - fastq: 1.15.0 - dev: true - /@rollup/rollup-android-arm-eabi@4.8.0: - resolution: {integrity: sha512-zdTObFRoNENrdPpnTNnhOljYIcOX7aI7+7wyrSpPFFIOf/nRdedE6IYsjaBE7tjukphh1tMTojgJ7p3lKY8x6Q==} + '@pkgjs/parseargs@0.11.0': + resolution: {integrity: sha512-+1VkjdD0QBLPodGrJUeqarH8VAIvQODIbwh9XpP5Syisf7YoQgsJKPNFoqqLQlu+VQ/tVSshMR6loPMn8U+dPg==} + engines: {node: '>=14'} + + '@playwright/test@1.44.0': + resolution: {integrity: sha512-rNX5lbNidamSUorBhB4XZ9SQTjAqfe5M+p37Z8ic0jPFBMo5iCtQz1kRWkEMg+rYOKSlVycpQmpqjSFq7LXOfg==} + engines: {node: '>=16'} + hasBin: true + + '@rollup/rollup-android-arm-eabi@4.18.0': + resolution: {integrity: sha512-Tya6xypR10giZV1XzxmH5wr25VcZSncG0pZIjfePT0OVBvqNEurzValetGNarVrGiq66EBVAFn15iYX4w6FKgQ==} cpu: [arm] os: [android] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-android-arm64@4.8.0: - resolution: {integrity: sha512-aiItwP48BiGpMFS9Znjo/xCNQVwTQVcRKkFKsO81m8exrGjHkCBDvm9PHay2kpa8RPnZzzKcD1iQ9KaLY4fPQQ==} + '@rollup/rollup-android-arm64@4.18.0': + resolution: {integrity: sha512-avCea0RAP03lTsDhEyfy+hpfr85KfyTctMADqHVhLAF3MlIkq83CP8UfAHUssgXTYd+6er6PaAhx/QGv4L1EiA==} cpu: [arm64] os: [android] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-darwin-arm64@4.8.0: - resolution: {integrity: sha512-zhNIS+L4ZYkYQUjIQUR6Zl0RXhbbA0huvNIWjmPc2SL0cB1h5Djkcy+RZ3/Bwszfb6vgwUvcVJYD6e6Zkpsi8g==} + '@rollup/rollup-darwin-arm64@4.18.0': + resolution: {integrity: sha512-IWfdwU7KDSm07Ty0PuA/W2JYoZ4iTj3TUQjkVsO/6U+4I1jN5lcR71ZEvRh52sDOERdnNhhHU57UITXz5jC1/w==} cpu: [arm64] os: [darwin] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-darwin-x64@4.8.0: - resolution: {integrity: sha512-A/FAHFRNQYrELrb/JHncRWzTTXB2ticiRFztP4ggIUAfa9Up1qfW8aG2w/mN9jNiZ+HB0t0u0jpJgFXG6BfRTA==} + '@rollup/rollup-darwin-x64@4.18.0': + resolution: {integrity: sha512-n2LMsUz7Ynu7DoQrSQkBf8iNrjOGyPLrdSg802vk6XT3FtsgX6JbE8IHRvposskFm9SNxzkLYGSq9QdpLYpRNA==} cpu: [x64] os: [darwin] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-arm-gnueabihf@4.8.0: - resolution: {integrity: sha512-JsidBnh3p2IJJA4/2xOF2puAYqbaczB3elZDT0qHxn362EIoIkq7hrR43Xa8RisgI6/WPfvb2umbGsuvf7E37A==} + '@rollup/rollup-linux-arm-gnueabihf@4.18.0': + resolution: {integrity: sha512-C/zbRYRXFjWvz9Z4haRxcTdnkPt1BtCkz+7RtBSuNmKzMzp3ZxdM28Mpccn6pt28/UWUCTXa+b0Mx1k3g6NOMA==} + cpu: [arm] + os: [linux] + + '@rollup/rollup-linux-arm-musleabihf@4.18.0': + resolution: {integrity: sha512-l3m9ewPgjQSXrUMHg93vt0hYCGnrMOcUpTz6FLtbwljo2HluS4zTXFy2571YQbisTnfTKPZ01u/ukJdQTLGh9A==} cpu: [arm] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-arm64-gnu@4.8.0: - resolution: {integrity: sha512-hBNCnqw3EVCkaPB0Oqd24bv8SklETptQWcJz06kb9OtiShn9jK1VuTgi7o4zPSt6rNGWQOTDEAccbk0OqJmS+g==} + '@rollup/rollup-linux-arm64-gnu@4.18.0': + resolution: {integrity: sha512-rJ5D47d8WD7J+7STKdCUAgmQk49xuFrRi9pZkWoRD1UeSMakbcepWXPF8ycChBoAqs1pb2wzvbY6Q33WmN2ftw==} cpu: [arm64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-arm64-musl@4.8.0: - resolution: {integrity: sha512-Fw9ChYfJPdltvi9ALJ9wzdCdxGw4wtq4t1qY028b2O7GwB5qLNSGtqMsAel1lfWTZvf4b6/+4HKp0GlSYg0ahA==} + '@rollup/rollup-linux-arm64-musl@4.18.0': + resolution: {integrity: sha512-be6Yx37b24ZwxQ+wOQXXLZqpq4jTckJhtGlWGZs68TgdKXJgw54lUUoFYrg6Zs/kjzAQwEwYbp8JxZVzZLRepQ==} cpu: [arm64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-riscv64-gnu@4.8.0: - resolution: {integrity: sha512-BH5xIh7tOzS9yBi8dFrCTG8Z6iNIGWGltd3IpTSKp6+pNWWO6qy8eKoRxOtwFbMrid5NZaidLYN6rHh9aB8bEw==} + '@rollup/rollup-linux-powerpc64le-gnu@4.18.0': + resolution: {integrity: sha512-hNVMQK+qrA9Todu9+wqrXOHxFiD5YmdEi3paj6vP02Kx1hjd2LLYR2eaN7DsEshg09+9uzWi2W18MJDlG0cxJA==} + cpu: [ppc64] + os: [linux] + + '@rollup/rollup-linux-riscv64-gnu@4.18.0': + resolution: {integrity: sha512-ROCM7i+m1NfdrsmvwSzoxp9HFtmKGHEqu5NNDiZWQtXLA8S5HBCkVvKAxJ8U+CVctHwV2Gb5VUaK7UAkzhDjlg==} cpu: [riscv64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-x64-gnu@4.8.0: - resolution: {integrity: sha512-PmvAj8k6EuWiyLbkNpd6BLv5XeYFpqWuRvRNRl80xVfpGXK/z6KYXmAgbI4ogz7uFiJxCnYcqyvZVD0dgFog7Q==} + '@rollup/rollup-linux-s390x-gnu@4.18.0': + resolution: {integrity: sha512-0UyyRHyDN42QL+NbqevXIIUnKA47A+45WyasO+y2bGJ1mhQrfrtXUpTxCOrfxCR4esV3/RLYyucGVPiUsO8xjg==} + cpu: [s390x] + os: [linux] + + '@rollup/rollup-linux-x64-gnu@4.18.0': + resolution: {integrity: sha512-xuglR2rBVHA5UsI8h8UbX4VJ470PtGCf5Vpswh7p2ukaqBGFTnsfzxUBetoWBWymHMxbIG0Cmx7Y9qDZzr648w==} cpu: [x64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-linux-x64-musl@4.8.0: - resolution: {integrity: sha512-mdxnlW2QUzXwY+95TuxZ+CurrhgrPAMveDWI97EQlA9bfhR8tw3Pt7SUlc/eSlCNxlWktpmT//EAA8UfCHOyXg==} + '@rollup/rollup-linux-x64-musl@4.18.0': + resolution: {integrity: sha512-LKaqQL9osY/ir2geuLVvRRs+utWUNilzdE90TpyoX0eNqPzWjRm14oMEE+YLve4k/NAqCdPkGYDaDF5Sw+xBfg==} cpu: [x64] os: [linux] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-win32-arm64-msvc@4.8.0: - resolution: {integrity: sha512-ge7saUz38aesM4MA7Cad8CHo0Fyd1+qTaqoIo+Jtk+ipBi4ATSrHWov9/S4u5pbEQmLjgUjB7BJt+MiKG2kzmA==} + '@rollup/rollup-win32-arm64-msvc@4.18.0': + resolution: {integrity: sha512-7J6TkZQFGo9qBKH0pk2cEVSRhJbL6MtfWxth7Y5YmZs57Pi+4x6c2dStAUvaQkHQLnEQv1jzBUW43GvZW8OFqA==} cpu: [arm64] os: [win32] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-win32-ia32-msvc@4.8.0: - resolution: {integrity: sha512-p9E3PZlzurhlsN5h9g7zIP1DnqKXJe8ZUkFwAazqSvHuWfihlIISPxG9hCHCoA+dOOspL/c7ty1eeEVFTE0UTw==} + '@rollup/rollup-win32-ia32-msvc@4.18.0': + resolution: {integrity: sha512-Txjh+IxBPbkUB9+SXZMpv+b/vnTEtFyfWZgJ6iyCmt2tdx0OF5WhFowLmnh8ENGNpfUlUZkdI//4IEmhwPieNg==} cpu: [ia32] os: [win32] - requiresBuild: true - dev: true - optional: true - /@rollup/rollup-win32-x64-msvc@4.8.0: - resolution: {integrity: sha512-kb4/auKXkYKqlUYTE8s40FcJIj5soOyRLHKd4ugR0dCq0G2EfcF54eYcfQiGkHzjidZ40daB4ulsFdtqNKZtBg==} + '@rollup/rollup-win32-x64-msvc@4.18.0': + resolution: {integrity: sha512-UOo5FdvOL0+eIVTgS4tIdbW+TtnBLWg1YBCcU2KWM7nuNwRz9bksDX1bekJJCpu25N1DVWaCwnT39dVQxzqS8g==} cpu: [x64] os: [win32] - requiresBuild: true - dev: true - optional: true - /@sindresorhus/merge-streams@1.0.0: - resolution: {integrity: sha512-rUV5WyJrJLoloD4NDN1V1+LDMDWOa4OTsT4yYJwQNpTU6FWxkxHpL7eu4w+DmiH8x/EAM1otkPE1+LaspIbplw==} - engines: {node: '>=18'} - dev: true + '@shikijs/core@1.6.0': + resolution: {integrity: sha512-NIEAi5U5R7BLkbW1pG/ZKu3eb1lzc3/+jD0lFsuxMT7zjaf9bbNwdNyMr7zh/Zl8EXQtQ+MYBAt5G+JLu+5DlA==} - /@types/debug@4.1.12: - resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} - dependencies: - '@types/ms': 0.7.34 - dev: true + '@shikijs/transformers@1.6.0': + resolution: {integrity: sha512-qGfHe1ECiqfE2STPWvfogIj/9Q0SK+MCRJdoITkW7AmFuB7DmbFnBT2US84+zklJOB51MzNO8RUXZiauWssJlQ==} - /@types/fs-extra@11.0.4: - resolution: {integrity: sha512-yTbItCNreRooED33qjunPthRcSjERP1r4MqCZc7wv0u2sUkzTFp45tgUfS5+r7FrZPdmCCNflLhVSP/o+SemsQ==} - dependencies: - '@types/jsonfile': 6.1.4 - '@types/node': 20.10.4 - dev: true + '@types/d3-scale-chromatic@3.0.3': + resolution: {integrity: sha512-laXM4+1o5ImZv3RpFAsTRn3TEkzqkytiOY0Dz0sq5cnd1dtNlk6sHLon4OvqaiJb28T0S/TdsBI3Sjsy+keJrw==} - /@types/hash-sum@1.0.2: - resolution: {integrity: sha512-UP28RddqY8xcU0SCEp9YKutQICXpaAq9N8U2klqF5hegGha7KzTOL8EdhIIV3bOSGBzjEpN9bU/d+nNZBdJYVw==} - dev: true + '@types/d3-scale@4.0.8': + resolution: {integrity: sha512-gkK1VVTr5iNiYJ7vWDI+yUFFlszhNMtVeneJ6lUTKPjprsvLLI9/tgEGiXJOnlINJA8FyA88gfnQsHbybVZrYQ==} - /@types/jsonfile@6.1.4: - resolution: {integrity: sha512-D5qGUYwjvnNNextdU59/+fI+spnwtTFmyQP0h+PfIOSkNfpU6AOICUOkm4i0OnSk+NyjdPJrxCDro0sJsWlRpQ==} - dependencies: - '@types/node': 20.10.4 - dev: true + '@types/d3-time@3.0.3': + resolution: {integrity: sha512-2p6olUZ4w3s+07q3Tm2dbiMZy5pCDfYwtLXXHUnVzXgQlZ/OyPtUz6OL382BkOuGlLXqfT+wqv8Fw2v8/0geBw==} - /@types/linkify-it@3.0.5: - resolution: {integrity: sha512-yg6E+u0/+Zjva+buc3EIb+29XEg4wltq7cSmd4Uc2EE/1nUVmxyzpX6gUXD0V8jIrG0r7YeOGVIbYRkxeooCtw==} - dev: true + '@types/debug@4.1.12': + resolution: {integrity: sha512-vIChWdVG3LG1SMxEvI/AK+FWJthlrqlTu7fbrlywTkkaONwk/UAGaULXRlf8vkzFBLVm0zkMdCquhL5aOjhXPQ==} - /@types/markdown-it-emoji@2.0.4: - resolution: {integrity: sha512-H6ulk/ZmbDxOayPwI/leJzrmoW1YKX1Z+MVSCHXuYhvqckV4I/c+hPTf6UiqJyn2avWugfj30XroheEb6/Ekqg==} - dependencies: - '@types/markdown-it': 13.0.7 - dev: true + '@types/estree@1.0.5': + resolution: {integrity: sha512-/kYRxGDLWzHOB7q+wtSUQlFrtcdUccpfy+X+9iMBpHK8QLLhx2wIPYuS5DYtR9Wa/YlZAbIovy7qVdB1Aq6Lyw==} - /@types/markdown-it@13.0.7: - resolution: {integrity: sha512-U/CBi2YUUcTHBt5tjO2r5QV/x0Po6nsYwQU4Y04fBS6vfoImaiZ6f8bi3CjTCxBPQSO1LMyUqkByzi8AidyxfA==} - dependencies: - '@types/linkify-it': 3.0.5 - '@types/mdurl': 1.0.5 - dev: true + '@types/linkify-it@5.0.0': + resolution: {integrity: sha512-sVDA58zAw4eWAffKOaQH5/5j3XeayukzDk+ewSsnv3p4yJEZHCCzMDiZM8e0OUrRvmpGZ85jf4yDHkHsgBNr9Q==} - /@types/mdurl@1.0.5: - resolution: {integrity: sha512-6L6VymKTzYSrEf4Nev4Xa1LCHKrlTlYCBMTlQKFuddo1CvQcE52I0mwfOJayueUC7MJuXOeHTcIU683lzd0cUA==} - dev: true + '@types/markdown-it@14.1.1': + resolution: {integrity: sha512-4NpsnpYl2Gt1ljyBGrKMxFYAYvpqbnnkgP/i/g+NLpjEUa3obn1XJCur9YbEXKDAkaXqsR1LbDnGEJ0MmKFxfg==} - /@types/ms@0.7.34: + '@types/mdast@3.0.15': + resolution: {integrity: sha512-LnwD+mUEfxWMa1QpDraczIn6k0Ee3SMicuYSSzS6ZYl2gKS09EClnJYGd8Du6rfc5r/GZEk5o1mRb8TaTj03sQ==} + + '@types/mdurl@2.0.0': + resolution: {integrity: sha512-RGdgjQUZba5p6QEFAVx2OGb8rQDL/cPRG7GiedRzMcJ1tYnUANBncjbSB1NRGwbvjcPeikRABz2nshyPk1bhWg==} + + '@types/ms@0.7.34': resolution: {integrity: sha512-nG96G3Wp6acyAgJqGasjODb+acrI7KltPiRxzHPXnP3NgI28bpQDRv53olbqGXbfcgF5aiiHmO3xpwEpS5Ld9g==} - dev: true - /@types/node@17.0.45: + '@types/node@17.0.45': resolution: {integrity: sha512-w+tIMs3rq2afQdsPJlODhoUEKzFP1ayaoyl1CcnwtIlsVe7K7bA1NGm4s3PraqTLlXnbIN84zuBlxBWo1u9BLw==} - dev: true - /@types/node@20.10.4: - resolution: {integrity: sha512-D08YG6rr8X90YB56tSIuBaddy/UXAA9RKJoFvrsnogAum/0pmjkgi4+2nx96A330FmioegBWmEYQ+syqCFaveg==} - dependencies: - undici-types: 5.26.5 - dev: true + '@types/node@20.12.12': + resolution: {integrity: sha512-eWLDGF/FOSPtAvEqeRAQ4C8LSA7M1I7i0ky1I8U7kD1J5ITyW3AsRhQrKVoWf5pFKZ2kILsEGJhsI9r93PYnOw==} - /@types/sax@1.2.7: + '@types/sax@1.2.7': resolution: {integrity: sha512-rO73L89PJxeYM3s3pPPjiPgVVcymqU490g0YO5n5By0k2Erzj6tay/4lr1CHAAU4JyOWd1rpQ8bCf6cZfHU96A==} - dependencies: - '@types/node': 20.10.4 - dev: true - /@types/web-bluetooth@0.0.20: + '@types/unist@2.0.10': + resolution: {integrity: sha512-IfYcSBWE3hLpBg8+X2SEa8LVkJdJEkT2Ese2aaLs3ptGdVtABxndrMaxuFlQ1qdFf9Q5rDvDpxI3WwgvKFAsQA==} + + '@types/web-bluetooth@0.0.20': resolution: {integrity: sha512-g9gZnnXVq7gM7v3tJCWV/qw7w+KeOlSHAhgF9RytFyifW6AF61hdT2ucrYhPq9hLs5JIryeupHV3qGk95dH9ow==} - dev: true - /@vitejs/plugin-vue@4.5.2(vite@5.0.12)(vue@3.3.11): - resolution: {integrity: sha512-UGR3DlzLi/SaVBPX0cnSyE37vqxU3O6chn8l0HJNzQzDia6/Au2A4xKv+iIJW8w2daf80G7TYHhi1pAUjdZ0bQ==} - engines: {node: ^14.18.0 || >=16.0.0} + '@vitejs/plugin-vue@5.0.4': + resolution: {integrity: sha512-WS3hevEszI6CEVEx28F8RjTX97k3KsrcY6kvTg7+Whm5y3oYvcqzVeGCU3hxSAn4uY2CLCkeokkGKpoctccilQ==} + engines: {node: ^18.0.0 || >=20.0.0} peerDependencies: - vite: ^4.0.0 || ^5.0.0 + vite: ^5.0.0 vue: ^3.2.25 - dependencies: - vite: 5.0.12 - vue: 3.3.11 - dev: true - /@vue/compiler-core@3.3.11: - resolution: {integrity: sha512-h97/TGWBilnLuRaj58sxNrsUU66fwdRKLOLQ9N/5iNDfp+DZhYH9Obhe0bXxhedl8fjAgpRANpiZfbgWyruQ0w==} - dependencies: - '@babel/parser': 7.23.6 - '@vue/shared': 3.3.11 - estree-walker: 2.0.2 - source-map-js: 1.0.2 - dev: true + '@vue/compiler-core@3.4.27': + resolution: {integrity: sha512-E+RyqY24KnyDXsCuQrI+mlcdW3ALND6U7Gqa/+bVwbcpcR3BRRIckFoz7Qyd4TTlnugtwuI7YgjbvsLmxb+yvg==} - /@vue/compiler-dom@3.3.11: - resolution: {integrity: sha512-zoAiUIqSKqAJ81WhfPXYmFGwDRuO+loqLxvXmfUdR5fOitPoUiIeFI9cTTyv9MU5O1+ZZglJVTusWzy+wfk5hw==} - dependencies: - '@vue/compiler-core': 3.3.11 - '@vue/shared': 3.3.11 - dev: true + '@vue/compiler-dom@3.4.27': + resolution: {integrity: sha512-kUTvochG/oVgE1w5ViSr3KUBh9X7CWirebA3bezTbB5ZKBQZwR2Mwj9uoSKRMFcz4gSMzzLXBPD6KpCLb9nvWw==} - /@vue/compiler-sfc@3.3.11: - resolution: {integrity: sha512-U4iqPlHO0KQeK1mrsxCN0vZzw43/lL8POxgpzcJweopmqtoYy9nljJzWDIQS3EfjiYhfdtdk9Gtgz7MRXnz3GA==} - dependencies: - '@babel/parser': 7.23.6 - '@vue/compiler-core': 3.3.11 - '@vue/compiler-dom': 3.3.11 - '@vue/compiler-ssr': 3.3.11 - '@vue/reactivity-transform': 3.3.11 - '@vue/shared': 3.3.11 - estree-walker: 2.0.2 - magic-string: 0.30.5 - postcss: 8.4.32 - source-map-js: 1.0.2 - dev: true + '@vue/compiler-sfc@3.4.27': + resolution: {integrity: sha512-nDwntUEADssW8e0rrmE0+OrONwmRlegDA1pD6QhVeXxjIytV03yDqTey9SBDiALsvAd5U4ZrEKbMyVXhX6mCGA==} - /@vue/compiler-ssr@3.3.11: - resolution: {integrity: sha512-Zd66ZwMvndxRTgVPdo+muV4Rv9n9DwQ4SSgWWKWkPFebHQfVYRrVjeygmmDmPewsHyznCNvJ2P2d6iOOhdv8Qg==} - dependencies: - '@vue/compiler-dom': 3.3.11 - '@vue/shared': 3.3.11 - dev: true + '@vue/compiler-ssr@3.4.27': + resolution: {integrity: sha512-CVRzSJIltzMG5FcidsW0jKNQnNRYC8bT21VegyMMtHmhW3UOI7knmUehzswXLrExDLE6lQCZdrhD4ogI7c+vuw==} - /@vue/devtools-api@6.5.1: - resolution: {integrity: sha512-+KpckaAQyfbvshdDW5xQylLni1asvNSGme1JFs8I1+/H5pHEhqUKMEQD/qn3Nx5+/nycBq11qAEi8lk+LXI2dA==} - dev: true + '@vue/devtools-api@7.2.1': + resolution: {integrity: sha512-6oNCtyFOrNdqm6GUkFujsCgFlpbsHLnZqq7edeM/+cxAbMyCWvsaCsIMUaz7AiluKLccCGEM8fhOsjaKgBvb7g==} - /@vue/reactivity-transform@3.3.11: - resolution: {integrity: sha512-fPGjH0wqJo68A0wQ1k158utDq/cRyZNlFoxGwNScE28aUFOKFEnCBsvyD8jHn+0kd0UKVpuGuaZEQ6r9FJRqCg==} - dependencies: - '@babel/parser': 7.23.6 - '@vue/compiler-core': 3.3.11 - '@vue/shared': 3.3.11 - estree-walker: 2.0.2 - magic-string: 0.30.5 - dev: true + '@vue/devtools-kit@7.2.1': + resolution: {integrity: sha512-Wak/fin1X0Q8LLIfCAHBrdaaB+R6IdpSXsDByPHbQ3BmkCP0/cIo/oEGp9i0U2+gEqD4L3V9RDjNf1S34DTzQQ==} + peerDependencies: + vue: ^3.0.0 - /@vue/reactivity@3.3.11: - resolution: {integrity: sha512-D5tcw091f0nuu+hXq5XANofD0OXnBmaRqMYl5B3fCR+mX+cXJIGNw/VNawBqkjLNWETrFW0i+xH9NvDbTPVh7g==} - dependencies: - '@vue/shared': 3.3.11 - dev: true + '@vue/devtools-shared@7.2.1': + resolution: {integrity: sha512-PCJF4UknJmOal68+X9XHyVeQ+idv0LFujkTOIW30+GaMJqwFVN9LkQKX4gLqn61KkGMdJTzQ1bt7EJag3TI6AA==} - /@vue/runtime-core@3.3.11: - resolution: {integrity: sha512-g9ztHGwEbS5RyWaOpXuyIVFTschclnwhqEbdy5AwGhYOgc7m/q3NFwr50MirZwTTzX55JY8pSkeib9BX04NIpw==} - dependencies: - '@vue/reactivity': 3.3.11 - '@vue/shared': 3.3.11 - dev: true + '@vue/reactivity@3.4.27': + resolution: {integrity: sha512-kK0g4NknW6JX2yySLpsm2jlunZJl2/RJGZ0H9ddHdfBVHcNzxmQ0sS0b09ipmBoQpY8JM2KmUw+a6sO8Zo+zIA==} - /@vue/runtime-dom@3.3.11: - resolution: {integrity: sha512-OlhtV1PVpbgk+I2zl+Y5rQtDNcCDs12rsRg71XwaA2/Rbllw6mBLMi57VOn8G0AjOJ4Mdb4k56V37+g8ukShpQ==} - dependencies: - '@vue/runtime-core': 3.3.11 - '@vue/shared': 3.3.11 - csstype: 3.1.3 - dev: true + '@vue/runtime-core@3.4.27': + resolution: {integrity: sha512-7aYA9GEbOOdviqVvcuweTLe5Za4qBZkUY7SvET6vE8kyypxVgaT1ixHLg4urtOlrApdgcdgHoTZCUuTGap/5WA==} + + '@vue/runtime-dom@3.4.27': + resolution: {integrity: sha512-ScOmP70/3NPM+TW9hvVAz6VWWtZJqkbdf7w6ySsws+EsqtHvkhxaWLecrTorFxsawelM5Ys9FnDEMt6BPBDS0Q==} - /@vue/server-renderer@3.3.11(vue@3.3.11): - resolution: {integrity: sha512-AIWk0VwwxCAm4wqtJyxBylRTXSy1wCLOKbWxHaHiu14wjsNYtiRCSgVuqEPVuDpErOlRdNnuRgipQfXRLjLN5A==} + '@vue/server-renderer@3.4.27': + resolution: {integrity: sha512-dlAMEuvmeA3rJsOMJ2J1kXU7o7pOxgsNHVr9K8hB3ImIkSuBrIdy0vF66h8gf8Tuinf1TK3mPAz2+2sqyf3KzA==} peerDependencies: - vue: 3.3.11 - dependencies: - '@vue/compiler-ssr': 3.3.11 - '@vue/shared': 3.3.11 - vue: 3.3.11 - dev: true - - /@vue/shared@3.3.11: - resolution: {integrity: sha512-u2G8ZQ9IhMWTMXaWqZycnK4UthG1fA238CD+DP4Dm4WJi5hdUKKLg0RMRaRpDPNMdkTwIDkp7WtD0Rd9BH9fLw==} - dev: true - - /@vuepress/bundler-vite@2.0.0-rc.0: - resolution: {integrity: sha512-rX8S8IYpqqlJfNPstS/joorpxXx/4WuE7+gDM31i2HUrxOKGZVzq8ZsRRRU2UdoTwHZSd3LpUS4sMtxE5xLK1A==} - dependencies: - '@vitejs/plugin-vue': 4.5.2(vite@5.0.12)(vue@3.3.11) - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - autoprefixer: 10.4.16(postcss@8.4.32) - connect-history-api-fallback: 2.0.0 - postcss: 8.4.32 - postcss-load-config: 4.0.2(postcss@8.4.32) - rollup: 4.8.0 - vite: 5.0.12 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@types/node' - - '@vue/composition-api' - - less - - lightningcss - - sass - - stylus - - sugarss - - supports-color - - terser - - ts-node - - typescript - dev: true + vue: 3.4.27 - /@vuepress/cli@2.0.0-rc.0: - resolution: {integrity: sha512-XWSIFO9iOR7N4O2lXIwS5vZuLjU9WU/aGAtmhMWEMxrdMx7TQaJbgrfpTUEbHMf+cPI1DXBbUbtmkqIvtfOV0w==} - hasBin: true - dependencies: - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - cac: 6.7.14 - chokidar: 3.5.3 - envinfo: 7.11.0 - esbuild: 0.19.9 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vue/shared@3.4.27': + resolution: {integrity: sha512-DL3NmY2OFlqmYYrzp39yi3LDkKxa5vZVwxWdQ3rG0ekuWscHraeIbnI8t+aZK7qhYqEqWKTUdijadunb9pnrgA==} - /@vuepress/client@2.0.0-rc.0: - resolution: {integrity: sha512-TwQx8hJgYONYxX+QltZ2aw9O5Ym6SKelfiUduuIRb555B1gece/jSVap3H/ZwyBhpgJMtG4+/Mrmf8nlDSHjvw==} - dependencies: - '@vue/devtools-api': 6.5.1 - '@vuepress/shared': 2.0.0-rc.0 - '@vueuse/core': 10.7.0(vue@3.3.11) - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - typescript - dev: true + '@vueuse/core@10.9.0': + resolution: {integrity: sha512-/1vjTol8SXnx6xewDEKfS0Ra//ncg4Hb0DaZiwKf7drgfMsKFExQ+FnnENcN6efPen+1kIzhLQoGSy0eDUVOMg==} - /@vuepress/core@2.0.0-rc.0: - resolution: {integrity: sha512-uoOaZP1MdxZYJIAJcRcmYKKeCIVnxZeOuLMOOB9CPuAKSalT1RvJ1lztw6RX3q9SPnlqtSZPQXDncPAZivw4pA==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/markdown': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true - - /@vuepress/markdown@2.0.0-rc.0: - resolution: {integrity: sha512-USmqdKKMT6ZFHYRztTjKUlO8qgGfnEygMAAq4AzC/uYXiEfrbMBLAWJhteyGS56P3rGLj0OPAhksE681bX/wOg==} - dependencies: - '@mdit-vue/plugin-component': 1.0.0 - '@mdit-vue/plugin-frontmatter': 1.0.0 - '@mdit-vue/plugin-headers': 1.0.0 - '@mdit-vue/plugin-sfc': 1.0.0 - '@mdit-vue/plugin-title': 1.0.0 - '@mdit-vue/plugin-toc': 1.0.0 - '@mdit-vue/shared': 1.0.0 - '@mdit-vue/types': 1.0.0 - '@types/markdown-it': 13.0.7 - '@types/markdown-it-emoji': 2.0.4 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - markdown-it: 13.0.2 - markdown-it-anchor: 8.6.7(@types/markdown-it@13.0.7)(markdown-it@13.0.2) - markdown-it-emoji: 2.0.2 - mdurl: 1.0.1 - transitivePeerDependencies: - - supports-color - dev: true + '@vueuse/integrations@10.9.0': + resolution: {integrity: sha512-acK+A01AYdWSvL4BZmCoJAcyHJ6EqhmkQEXbQLwev1MY7NBnS+hcEMx/BzVoR9zKI+UqEPMD9u6PsyAuiTRT4Q==} + peerDependencies: + async-validator: '*' + axios: '*' + change-case: '*' + drauu: '*' + focus-trap: '*' + fuse.js: '*' + idb-keyval: '*' + jwt-decode: '*' + nprogress: '*' + qrcode: '*' + sortablejs: '*' + universal-cookie: '*' + peerDependenciesMeta: + async-validator: + optional: true + axios: + optional: true + change-case: + optional: true + drauu: + optional: true + focus-trap: + optional: true + fuse.js: + optional: true + idb-keyval: + optional: true + jwt-decode: + optional: true + nprogress: + optional: true + qrcode: + optional: true + sortablejs: + optional: true + universal-cookie: + optional: true - /@vuepress/plugin-active-header-links@2.0.0-rc.0: - resolution: {integrity: sha512-UJdXLYNGL5Wjy5YGY8M2QgqT75bZ95EHebbqGi8twBdIJE9O+bM+dPJyYtAk2PIVqFORiw3Hj+PchsNSxdn9+g==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - ts-debounce: 4.0.0 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@vueuse/metadata@10.9.0': + resolution: {integrity: sha512-iddNbg3yZM0X7qFY2sAotomgdHK7YJ6sKUvQqbvwnf7TmaVPxS4EJydcNsVejNdS8iWCtDk+fYXr7E32nyTnGA==} - /@vuepress/plugin-back-to-top@2.0.0-rc.0: - resolution: {integrity: sha512-6GPfuzV5lkAnR00BxRUhqMXwMWt741alkq2R6bln4N8BneSOwEpX/7vi19MGf232aKdS/Va4pF5p0/nJ8Sed/g==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - ts-debounce: 4.0.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true - - /@vuepress/plugin-container@2.0.0-rc.0: - resolution: {integrity: sha512-b7vrLN11YE7qiUDPfA3N9P7Z8fupe9Wbcr9KAE/bmfZ9VT4d6kzpVyoU7XHi99XngitsmnkaXP4aBvBF1c2AnA==} - dependencies: - '@types/markdown-it': 13.0.7 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/markdown': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - markdown-it: 13.0.2 - markdown-it-container: 3.0.0 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true - - /@vuepress/plugin-docsearch@2.0.0-rc.0(@algolia/client-search@4.21.1)(search-insights@2.13.0): - resolution: {integrity: sha512-bFbb+RxNyoLVbojv3Fh3UNfMmx9tszdae5ni9nG2xa05giCRwGKT0wFG3Q6n0a9kIQ6V7z3PjCj9x1k4SALPEA==} - dependencies: - '@docsearch/css': 3.5.2 - '@docsearch/js': 3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0) - '@docsearch/react': 3.5.2(@algolia/client-search@4.21.1)(search-insights@2.13.0) - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - '@vueuse/core': 10.7.0(vue@3.3.11) - ts-debounce: 4.0.0 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@algolia/client-search' - - '@types/react' - - '@vue/composition-api' - - react - - react-dom - - search-insights - - supports-color - - typescript - dev: true + '@vueuse/shared@10.9.0': + resolution: {integrity: sha512-Uud2IWncmAfJvRaFYzv5OHDli+FbOzxiVEQdLCKQKLyhz94PIyFC3CHcH7EDMwIn8NPtD06+PNbC/PiO0LGLtw==} - /@vuepress/plugin-external-link-icon@2.0.0-rc.0: - resolution: {integrity: sha512-o8bk0oIlj/BkKc02mq91XLDloq1VOz/8iNcRwKAeqBE6svXzdYiyoTGet0J/4iPuAetsCn75S57W6RioDJHMnQ==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/markdown': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + algoliasearch@4.23.3: + resolution: {integrity: sha512-Le/3YgNvjW9zxIQMRhUHuhiUjAlKY/zsdZpfq4dlLqg6mEm0nL6yk+7f2hDOtLpxsgE4jSzDmvHL7nXdBp5feg==} - /@vuepress/plugin-git@2.0.0-rc.0: - resolution: {integrity: sha512-r7UF77vZxaYeJQLygzodKv+15z3/dTLuGp4VcYO21W6BlJZvd4u9zqgiV7A//bZQvK4+3Hprylr0G3KgXqMewA==} - dependencies: - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - execa: 8.0.1 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + ansi-regex@5.0.1: + resolution: {integrity: sha512-quJQXlTSUGL2LH9SUXo8VwsY4soanhgo6LNSm84E1LBcE8s3O0wpdiRzyR9z/ZZJMlMWv37qOOb9pdJlMUEKFQ==} + engines: {node: '>=8'} - /@vuepress/plugin-google-analytics@2.0.0-rc.15(vuepress@2.0.0-rc.0): - resolution: {integrity: sha512-ovMpOYz0fFoVcRVgyv+7qnU7LPnovocbtYPk+oPspd9hMedYXjAMeyxOYYnN/MiC6+DSKshDqStTfjVpW3x9DQ==} - peerDependencies: - vuepress: 2.0.0-rc.8 - dependencies: - vuepress: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) - dev: true + ansi-regex@6.0.1: + resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==} + engines: {node: '>=12'} - /@vuepress/plugin-medium-zoom@2.0.0-rc.0: - resolution: {integrity: sha512-peU1lYKsmKikIe/0pkJuHzD/k6xW2TuqdvKVhV4I//aOE1WxsREKJ4ACcldmoIsnysoDydAUqKT6xDPGyDsH2g==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - medium-zoom: 1.1.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + ansi-styles@4.3.0: + resolution: {integrity: sha512-zbB9rCJAT1rbjiVDb2hqKFHNYLxgtk8NURxZ3IZwD3F6NtxbXZQCnnSi1Lkx+IDohdPlFp222wVALIheZJQSEg==} + engines: {node: '>=8'} - /@vuepress/plugin-nprogress@2.0.0-rc.0: - resolution: {integrity: sha512-rI+eK0Pg1KiZE+7hGmDUeSbgdWCid8Vnw0hFKNmjinDzGVmx4m03M6qfvclsI0SryH+lR7itZGLaR4gbTlrz/w==} - dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + ansi-styles@6.2.1: + resolution: {integrity: sha512-bN798gFfQX+viw3R7yrGWRqnrN2oRkEkUjjl4JNn4E8GxxbjtG3FbrEIIY3l8/hrwUwIeCZvi4QuOTP4MErVug==} + engines: {node: '>=12'} - /@vuepress/plugin-palette@2.0.0-rc.0: - resolution: {integrity: sha512-wW70SCp3/K7s1lln5YQsBGTog2WXaQv5piva5zhXcQ47YGf4aAJpThDa5C/ot4HhkPOKn8Iz5s0ckxXZzW8DIg==} - dependencies: - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - chokidar: 3.5.3 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + anymatch@3.1.3: + resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} + engines: {node: '>= 8'} - /@vuepress/plugin-prismjs@2.0.0-rc.0: - resolution: {integrity: sha512-c5WRI7+FhVjdbymOKQ8F2KY/Bnv7aQtWScVk8vCMUimNi7v7Wff/A/i3KSFNz/tge3LxiAeH/Dc2WS/OnQXwCg==} - dependencies: - '@vuepress/core': 2.0.0-rc.0 - prismjs: 1.29.0 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + arg@5.0.2: + resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} - /@vuepress/plugin-theme-data@2.0.0-rc.0: - resolution: {integrity: sha512-FXY3/Ml+rM6gNKvwdBF6vKAcwnSvtXCzKgQwJAw3ppQTKUkLcbOxqM+h4d8bzHWAAvdnEvQFug5uEZgWllBQbA==} - dependencies: - '@vue/devtools-api': 6.5.1 - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - vue: 3.3.11 - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + argparse@2.0.1: + resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} + + balanced-match@1.0.2: + resolution: {integrity: sha512-3oSeUO0TMV67hN1AmbXsK4yaqU7tjiHlbxRDZOpH0KW9+CeX4bRAaX0Anxt0tx2MrpRpWwQaPwIlISEJhYU5Pw==} + + binary-extensions@2.3.0: + resolution: {integrity: sha512-Ceh+7ox5qe7LJuLHoY0feh3pHuUDHAcRUeyL2VYghZwfpkNIy/+8Ocg0a3UuSoYzavmylwuLWQOf3hl0jjMMIw==} + engines: {node: '>=8'} + + brace-expansion@2.0.1: + resolution: {integrity: sha512-XnAIvQ8eM+kC6aULx6wuQiwVsnzsi9d3WxzV3FpWTGA19F621kwdbsAcFKXgKUHZWsy+mY6iL1sHTxWEFCytDA==} + + braces@3.0.3: + resolution: {integrity: sha512-yQbXgO/OSZVD2IsiLlro+7Hf6Q18EJrKSEsdoMzKePKXct3gvD8oLcOQdIzGupr5Fj+EDe8gO/lxc1BzfMpxvA==} + engines: {node: '>=8'} + + character-entities@2.0.2: + resolution: {integrity: sha512-shx7oQ0Awen/BRIdkjkvz54PnEEI/EjwXDSIZp86/KKdbafHh1Df/RYGBhn4hbe2+uKC9FnT5UCEdyPz3ai9hQ==} + + chokidar@3.6.0: + resolution: {integrity: sha512-7VT13fmjotKpGipCW9JEQAusEPE+Ei8nl6/g4FBAmIm0GOOLMua9NDDo/DWp0ZAxCr3cPq5ZpBqmPAQgDda2Pw==} + engines: {node: '>= 8.10.0'} + + color-convert@2.0.1: + resolution: {integrity: sha512-RRECPsj7iu/xb5oKYcsFHSppFNnsj/52OVTRKb4zP5onXwVF3zVmmToNcOfGC+CRDpfK/U584fMg38ZHCaElKQ==} + engines: {node: '>=7.0.0'} + + color-name@1.1.4: + resolution: {integrity: sha512-dOy+3AuW3a2wNbZHIuMZpTcgjGuLU/uBL/ubcZF9OXbDo8ff4O8yVp5Bf0efS8uEoYo5q4Fx7dY9OgQGXgAsQA==} + + commander@12.0.0: + resolution: {integrity: sha512-MwVNWlYjDTtOjX5PiD7o5pK0UrFU/OYgcJfjjK4RaHZETNtjJqrZa9Y9ds88+A+f+d5lv+561eZ+yCKoS3gbAA==} + engines: {node: '>=18'} + + commander@7.2.0: + resolution: {integrity: sha512-QrWXB+ZQSVPmIWIhtEO9H+gwHaMGYiF5ChvoJ+K9ZGHG/sVsa6yiesAD1GC/x46sET00Xlwo1u49RVVVzvcSkw==} + engines: {node: '>= 10'} + + commander@8.3.0: + resolution: {integrity: sha512-OkTL9umf+He2DZkUq8f8J9of7yL6RJKI24dVITBmNfZBmri9zYZQrKkuXiKhyfPSu8tUhnVBB1iKXevvnlR4Ww==} + engines: {node: '>= 12'} + + cose-base@1.0.3: + resolution: {integrity: sha512-s9whTXInMSgAp/NVXVNuVxVKzGH2qck3aQlVHxDCdAEPgtMKwc4Wq6/QKhgdEdgbLSi9rBTAcPoRa6JpiG4ksg==} + + cose-base@2.2.0: + resolution: {integrity: sha512-AzlgcsCbUMymkADOJtQm3wO9S3ltPfYOFD5033keQn9NJzIbtnZj+UdBJe7DYml/8TdbtHJW3j58SOnKhWY/5g==} + + cross-spawn@7.0.3: + resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} + engines: {node: '>= 8'} + + csstype@3.1.3: + resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} + + cytoscape-cose-bilkent@4.1.0: + resolution: {integrity: sha512-wgQlVIUJF13Quxiv5e1gstZ08rnZj2XaLHGoFMYXz7SkNfCDOOteKBE6SYRfA9WxxI/iBc3ajfDoc6hb/MRAHQ==} + peerDependencies: + cytoscape: ^3.2.0 + + cytoscape-fcose@2.2.0: + resolution: {integrity: sha512-ki1/VuRIHFCzxWNrsshHYPs6L7TvLu3DL+TyIGEsRcvVERmxokbf5Gdk7mFxZnTdiGtnA4cfSmjZJMviqSuZrQ==} + peerDependencies: + cytoscape: ^3.2.0 + + cytoscape@3.29.2: + resolution: {integrity: sha512-2G1ycU28Nh7OHT9rkXRLpCDP30MKH1dXJORZuBhtEhEW7pKwgPi77ImqlCWinouyE1PNepIOGZBOrE84DG7LyQ==} + engines: {node: '>=0.10'} + + d3-array@2.12.1: + resolution: {integrity: sha512-B0ErZK/66mHtEsR1TkPEEkwdy+WDesimkM5gpZr5Dsg54BiTA5RXtYW5qTLIAcekaS9xfZrzBLF/OAkB3Qn1YQ==} + + d3-array@3.2.4: + resolution: {integrity: sha512-tdQAmyA18i4J7wprpYq8ClcxZy3SC31QMeByyCFyRt7BVHdREQZ5lpzoe5mFEYZUWe+oq8HBvk9JjpibyEV4Jg==} + engines: {node: '>=12'} + + d3-axis@3.0.0: + resolution: {integrity: sha512-IH5tgjV4jE/GhHkRV0HiVYPDtvfjHQlQfJHs0usq7M30XcSBvOotpmH1IgkcXsO/5gEQZD43B//fc7SRT5S+xw==} + engines: {node: '>=12'} + + d3-brush@3.0.0: + resolution: {integrity: sha512-ALnjWlVYkXsVIGlOsuWH1+3udkYFI48Ljihfnh8FZPF2QS9o+PzGLBslO0PjzVoHLZ2KCVgAM8NVkXPJB2aNnQ==} + engines: {node: '>=12'} + + d3-chord@3.0.1: + resolution: {integrity: sha512-VE5S6TNa+j8msksl7HwjxMHDM2yNK3XCkusIlpX5kwauBfXuyLAtNg9jCp/iHH61tgI4sb6R/EIMWCqEIdjT/g==} + engines: {node: '>=12'} + + d3-color@3.1.0: + resolution: {integrity: sha512-zg/chbXyeBtMQ1LbD/WSoW2DpC3I0mpmPdW+ynRTj/x2DAWYrIY7qeZIHidozwV24m4iavr15lNwIwLxRmOxhA==} + engines: {node: '>=12'} + + d3-contour@4.0.2: + resolution: {integrity: sha512-4EzFTRIikzs47RGmdxbeUvLWtGedDUNkTcmzoeyg4sP/dvCexO47AaQL7VKy/gul85TOxw+IBgA8US2xwbToNA==} + engines: {node: '>=12'} + + d3-delaunay@6.0.4: + resolution: {integrity: sha512-mdjtIZ1XLAM8bm/hx3WwjfHt6Sggek7qH043O8KEjDXN40xi3vx/6pYSVTwLjEgiXQTbvaouWKynLBiUZ6SK6A==} + engines: {node: '>=12'} + + d3-dispatch@3.0.1: + resolution: {integrity: sha512-rzUyPU/S7rwUflMyLc1ETDeBj0NRuHKKAcvukozwhshr6g6c5d8zh4c2gQjY2bZ0dXeGLWc1PF174P2tVvKhfg==} + engines: {node: '>=12'} + + d3-drag@3.0.0: + resolution: {integrity: sha512-pWbUJLdETVA8lQNJecMxoXfH6x+mO2UQo8rSmZ+QqxcbyA3hfeprFgIT//HW2nlHChWeIIMwS2Fq+gEARkhTkg==} + engines: {node: '>=12'} + + d3-dsv@3.0.1: + resolution: {integrity: sha512-UG6OvdI5afDIFP9w4G0mNq50dSOsXHJaRE8arAS5o9ApWnIElp8GZw1Dun8vP8OyHOZ/QJUKUJwxiiCCnUwm+Q==} + engines: {node: '>=12'} + hasBin: true + + d3-ease@3.0.1: + resolution: {integrity: sha512-wR/XK3D3XcLIZwpbvQwQ5fK+8Ykds1ip7A2Txe0yxncXSdq1L9skcG7blcedkOX+ZcgxGAmLX1FrRGbADwzi0w==} + engines: {node: '>=12'} + + d3-fetch@3.0.1: + resolution: {integrity: sha512-kpkQIM20n3oLVBKGg6oHrUchHM3xODkTzjMoj7aWQFq5QEM+R6E4WkzT5+tojDY7yjez8KgCBRoj4aEr99Fdqw==} + engines: {node: '>=12'} + + d3-force@3.0.0: + resolution: {integrity: sha512-zxV/SsA+U4yte8051P4ECydjD/S+qeYtnaIyAs9tgHCqfguma/aAQDjo85A9Z6EKhBirHRJHXIgJUlffT4wdLg==} + engines: {node: '>=12'} + + d3-format@3.1.0: + resolution: {integrity: sha512-YyUI6AEuY/Wpt8KWLgZHsIU86atmikuoOmCfommt0LYHiQSPjvX2AcFc38PX0CBpr2RCyZhjex+NS/LPOv6YqA==} + engines: {node: '>=12'} + + d3-geo@3.1.1: + resolution: {integrity: sha512-637ln3gXKXOwhalDzinUgY83KzNWZRKbYubaG+fGVuc/dxO64RRljtCTnf5ecMyE1RIdtqpkVcq0IbtU2S8j2Q==} + engines: {node: '>=12'} + + d3-hierarchy@3.1.2: + resolution: {integrity: sha512-FX/9frcub54beBdugHjDCdikxThEqjnR93Qt7PvQTOHxyiNCAlvMrHhclk3cD5VeAaq9fxmfRp+CnWw9rEMBuA==} + engines: {node: '>=12'} + + d3-interpolate@3.0.1: + resolution: {integrity: sha512-3bYs1rOD33uo8aqJfKP3JWPAibgw8Zm2+L9vBKEHJ2Rg+viTR7o5Mmv5mZcieN+FRYaAOWX5SJATX6k1PWz72g==} + engines: {node: '>=12'} + + d3-path@1.0.9: + resolution: {integrity: sha512-VLaYcn81dtHVTjEHd8B+pbe9yHWpXKZUC87PzoFmsFrJqgFwDe/qxfp5MlfsfM1V5E/iVt0MmEbWQ7FVIXh/bg==} + + d3-path@3.1.0: + resolution: {integrity: sha512-p3KP5HCf/bvjBSSKuXid6Zqijx7wIfNW+J/maPs+iwR35at5JCbLUT0LzF1cnjbCHWhqzQTIN2Jpe8pRebIEFQ==} + engines: {node: '>=12'} + + d3-polygon@3.0.1: + resolution: {integrity: sha512-3vbA7vXYwfe1SYhED++fPUQlWSYTTGmFmQiany/gdbiWgU/iEyQzyymwL9SkJjFFuCS4902BSzewVGsHHmHtXg==} + engines: {node: '>=12'} + + d3-quadtree@3.0.1: + resolution: {integrity: sha512-04xDrxQTDTCFwP5H6hRhsRcb9xxv2RzkcsygFzmkSIOJy3PeRJP7sNk3VRIbKXcog561P9oU0/rVH6vDROAgUw==} + engines: {node: '>=12'} + + d3-random@3.0.1: + resolution: {integrity: sha512-FXMe9GfxTxqd5D6jFsQ+DJ8BJS4E/fT5mqqdjovykEB2oFbTMDVdg1MGFxfQW+FBOGoB++k8swBrgwSHT1cUXQ==} + engines: {node: '>=12'} + + d3-sankey@0.12.3: + resolution: {integrity: sha512-nQhsBRmM19Ax5xEIPLMY9ZmJ/cDvd1BG3UVvt5h3WRxKg5zGRbvnteTyWAbzeSvlh3tW7ZEmq4VwR5mB3tutmQ==} + + d3-scale-chromatic@3.1.0: + resolution: {integrity: sha512-A3s5PWiZ9YCXFye1o246KoscMWqf8BsD9eRiJ3He7C9OBaxKhAd5TFCdEx/7VbKtxxTsu//1mMJFrEt572cEyQ==} + engines: {node: '>=12'} + + d3-scale@4.0.2: + resolution: {integrity: sha512-GZW464g1SH7ag3Y7hXjf8RoUuAFIqklOAq3MRl4OaWabTFJY9PN/E1YklhXLh+OQ3fM9yS2nOkCoS+WLZ6kvxQ==} + engines: {node: '>=12'} + + d3-selection@3.0.0: + resolution: {integrity: sha512-fmTRWbNMmsmWq6xJV8D19U/gw/bwrHfNXxrIN+HfZgnzqTHp9jOmKMhsTUjXOJnZOdZY9Q28y4yebKzqDKlxlQ==} + engines: {node: '>=12'} + + d3-shape@1.3.7: + resolution: {integrity: sha512-EUkvKjqPFUAZyOlhY5gzCxCeI0Aep04LwIRpsZ/mLFelJiUfnK56jo5JMDSE7yyP2kLSb6LtF+S5chMk7uqPqw==} + + d3-shape@3.2.0: + resolution: {integrity: sha512-SaLBuwGm3MOViRq2ABk3eLoxwZELpH6zhl3FbAoJ7Vm1gofKx6El1Ib5z23NUEhF9AsGl7y+dzLe5Cw2AArGTA==} + engines: {node: '>=12'} + + d3-time-format@4.1.0: + resolution: {integrity: sha512-dJxPBlzC7NugB2PDLwo9Q8JiTR3M3e4/XANkreKSUxF8vvXKqm1Yfq4Q5dl8budlunRVlUUaDUgFt7eA8D6NLg==} + engines: {node: '>=12'} + + d3-time@3.1.0: + resolution: {integrity: sha512-VqKjzBLejbSMT4IgbmVgDjpkYrNWUYJnbCGo874u7MMKIWsILRX+OpX/gTk8MqjpT1A/c6HY2dCA77ZN0lkQ2Q==} + engines: {node: '>=12'} + + d3-timer@3.0.1: + resolution: {integrity: sha512-ndfJ/JxxMd3nw31uyKoY2naivF+r29V+Lc0svZxe1JvvIRmi8hUsrMvdOwgS1o6uBHmiz91geQ0ylPP0aj1VUA==} + engines: {node: '>=12'} + + d3-transition@3.0.1: + resolution: {integrity: sha512-ApKvfjsSR6tg06xrL434C0WydLr7JewBB3V+/39RMHsaXTOG0zmt/OAXeng5M5LBm0ojmxJrpomQVZ1aPvBL4w==} + engines: {node: '>=12'} + peerDependencies: + d3-selection: 2 - 3 + + d3-zoom@3.0.0: + resolution: {integrity: sha512-b8AmV3kfQaqWAuacbPuNbL6vahnOJflOhexLzMMNLga62+/nh0JzvJ0aO/5a5MVgUFGS7Hu1P9P03o3fJkDCyw==} + engines: {node: '>=12'} + + d3@7.9.0: + resolution: {integrity: sha512-e1U46jVP+w7Iut8Jt8ri1YsPOvFpg46k+K8TpCb0P+zjCkjkPnV7WzfDJzMHy1LnA+wj5pLT1wjO901gLXeEhA==} + engines: {node: '>=12'} + + dagre-d3-es@7.0.10: + resolution: {integrity: sha512-qTCQmEhcynucuaZgY5/+ti3X/rnszKZhEQH/ZdWdtP1tA/y3VoHJzcVrO9pjjJCNpigfscAtoUB5ONcd2wNn0A==} + + dayjs@1.11.11: + resolution: {integrity: sha512-okzr3f11N6WuqYtZSvm+F776mB41wRZMhKP+hc34YdW+KmtYYK9iqvHSwo2k9FEH3fhGXvOPV6yz2IcSrfRUDg==} + + debug@4.3.5: + resolution: {integrity: sha512-pt0bNEmneDIvdL1Xsd9oDQ/wrQRkXDT4AUWlNZNPKvW5x/jyO9VFXkJUP07vQ2upmw5PlaITaPKc31jK13V+jg==} + engines: {node: '>=6.0'} + peerDependencies: + supports-color: '*' + peerDependenciesMeta: + supports-color: + optional: true + + decode-named-character-reference@1.0.2: + resolution: {integrity: sha512-O8x12RzrUF8xyVcY0KJowWsmaJxQbmy0/EtnNtHRpsOcT7dFk5W598coHqBVpmWo1oQQfsCqfCmkZN5DJrZVdg==} + + deep-extend@0.6.0: + resolution: {integrity: sha512-LOHxIOaPYdHlJRtCQfDIVZtfw/ufM8+rVj649RIHzcm/vGwQRXFt6OPqIFWsm2XEMrNIEtWR64sY1LEKD2vAOA==} + engines: {node: '>=4.0.0'} + + delaunator@5.0.1: + resolution: {integrity: sha512-8nvh+XBe96aCESrGOqMp/84b13H9cdKbG5P2ejQCh4d4sK9RL4371qou9drQjMhvnPmhWl5hnmqbEE0fXr9Xnw==} + + dequal@2.0.3: + resolution: {integrity: sha512-0je+qPKHEMohvfRTCEo3CrPG6cAzAYgmzKyxRiYSSDkS6eGJdyVJm7WaYA5ECaAD9wLB2T4EEeymA5aFVcYXCA==} + engines: {node: '>=6'} + + diff@5.2.0: + resolution: {integrity: sha512-uIFDxqpRZGZ6ThOk84hEfqWoHx2devRFvpTZcTHur85vImfaxUbTW9Ryh4CpCuDnToOP1CEtXKIgytHBPVff5A==} + engines: {node: '>=0.3.1'} + + dompurify@3.1.5: + resolution: {integrity: sha512-lwG+n5h8QNpxtyrJW/gJWckL+1/DQiYMX8f7t8Z2AZTPw1esVrqjI63i7Zc2Gz0aKzLVMYC1V1PL/ky+aY/NgA==} + + eastasianwidth@0.2.0: + resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} + + elkjs@0.9.3: + resolution: {integrity: sha512-f/ZeWvW/BCXbhGEf1Ujp29EASo/lk1FDnETgNKwJrsVvGZhUWCZyg3xLJjAsxfOmt8KjswHmI5EwCQcPMpOYhQ==} + + emoji-regex@8.0.0: + resolution: {integrity: sha512-MSjYzcWNOA0ewAHpz0MxpYFvwg6yjy1NG3xteoqz644VCo/RPgnr1/GGt+ic3iJTzQ8Eu3TdM14SawnVUmGE6A==} + + emoji-regex@9.2.2: + resolution: {integrity: sha512-L18DaJsXSUk2+42pv8mLs5jJT2hqFkFE4j21wOmgbUqsZ2hL72NsUU785g9RXgo3s0ZNgVl42TiHp3ZtOv/Vyg==} + + entities@4.5.0: + resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} + engines: {node: '>=0.12'} + + esbuild@0.20.2: + resolution: {integrity: sha512-WdOOppmUNU+IbZ0PaDiTst80zjnrOkyJNHoKupIcVyU8Lvla3Ugx94VzkQ32Ijqd7UhHJy75gNWDMUekcrSJ6g==} + engines: {node: '>=12'} + hasBin: true + + estree-walker@2.0.2: + resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} + + fast-glob@3.3.1: + resolution: {integrity: sha512-kNFPyjhh5cKjrUltxs+wFx+ZkbRaxxmZ+X0ZU31SOsxCEtP9VPgtq2teZw1DebupL5GmDaNQ6yKMMVcM41iqDg==} + engines: {node: '>=8.6.0'} + + fastq@1.17.1: + resolution: {integrity: sha512-sRVD3lWVIXWg6By68ZN7vho9a1pQcN/WBFaAAsDDFzlJjvoGx0P8z7V1t72grFJfJhu3YPZBuu25f7Kaw2jN1w==} + + fill-range@7.1.1: + resolution: {integrity: sha512-YsGpe3WHLK8ZYi4tWDg2Jy3ebRz2rXowDxnld4bkQB00cc/1Zw9AWnC0i9ztDJitivtQvaI9KaLyKrc+hBW0yg==} + engines: {node: '>=8'} + + focus-trap@7.5.4: + resolution: {integrity: sha512-N7kHdlgsO/v+iD/dMoJKtsSqs5Dz/dXZVebRgJw23LDk+jMi/974zyiOYDziY2JPp8xivq9BmUGwIJMiuSBi7w==} + + foreground-child@3.1.1: + resolution: {integrity: sha512-TMKDUnIte6bfb5nWv7V/caI169OHgvwjb7V4WkeUvbQQdjr5rWKqHFiKWb/fcOwB+CzBT+qbWjvj+DVwRskpIg==} + engines: {node: '>=14'} + + fsevents@2.3.2: + resolution: {integrity: sha512-xiqMQR4xAeHTuB9uWm+fFRcIOgKBMiOBP+eXiyT7jsgVCq1bkVygt00oASowB7EdtpOHaaPgKt812P9ab+DDKA==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + fsevents@2.3.3: + resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} + engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} + os: [darwin] + + get-stdin@9.0.0: + resolution: {integrity: sha512-dVKBjfWisLAicarI2Sf+JuBE/DghV4UzNAVe9yhEJuzeREd3JhOTE9cUaJTeSa77fsbQUK3pcOpJfM59+VKZaA==} + engines: {node: '>=12'} + + glob-parent@5.1.2: + resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} + engines: {node: '>= 6'} + + glob@10.3.16: + resolution: {integrity: sha512-JDKXl1DiuuHJ6fVS2FXjownaavciiHNUU4mOvV/B793RLh05vZL1rcPnCSaOgv1hDT6RDlY7AB7ZUvFYAtPgAw==} + engines: {node: '>=16 || 14 >=14.18'} + hasBin: true + + hookable@5.5.3: + resolution: {integrity: sha512-Yc+BQe8SvoXH1643Qez1zqLRmbA5rCL+sSmk6TVos0LWVfNIB7PGncdlId77WzLGSIB5KaWgTaNTs2lNVEI6VQ==} + + iconv-lite@0.6.3: + resolution: {integrity: sha512-4fCk79wshMdzMp2rH06qWrJE4iolqLhCUH+OiuIgU++RB0+94NlDL81atO7GX55uUKueo0txHNtvEyI6D7WdMw==} + engines: {node: '>=0.10.0'} + + ignore@5.3.1: + resolution: {integrity: sha512-5Fytz/IraMjqpwfd34ke28PTVMjZjJG2MPn5t7OE4eUCUNf8BAa7b5WUS9/Qvr6mwOQS7Mk6vdsMno5he+T8Xw==} + engines: {node: '>= 4'} + + immutable@4.3.6: + resolution: {integrity: sha512-Ju0+lEMyzMVZarkTn/gqRpdqd5dOPaz1mCZ0SH3JV6iFw81PldE/PEB1hWVEA288HPt4WXW8O7AWxB10M+03QQ==} + + ini@4.1.3: + resolution: {integrity: sha512-X7rqawQBvfdjS10YU1y1YVreA3SsLrW9dX2CewP2EbBJM4ypVNLDkO5y04gejPwKIY9lR+7r9gn3rFPt/kmWFg==} + engines: {node: ^14.17.0 || ^16.13.0 || >=18.0.0} + + internmap@1.0.1: + resolution: {integrity: sha512-lDB5YccMydFBtasVtxnZ3MRBHuaoE8GKsppq+EchKL2U4nK/DmEpPHNH8MZe5HkMtpSiTSOZwfN0tzYjO/lJEw==} + + internmap@2.0.3: + resolution: {integrity: sha512-5Hh7Y1wQbvY5ooGgPbDaL5iYLAPzMTUrjMulskHLH6wnv/A+1q5rgEaiuqEjB+oxGXIVZs1FF+R/KPN3ZSQYYg==} + engines: {node: '>=12'} + + is-binary-path@2.1.0: + resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} + engines: {node: '>=8'} + + is-extglob@2.1.1: + resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} + engines: {node: '>=0.10.0'} + + is-fullwidth-code-point@3.0.0: + resolution: {integrity: sha512-zymm5+u+sCsSWyD9qNaejV3DFvhCKclKdizYaJUuHA83RLjb7nSuGnddCHGv0hk+KY7BMAlsWeK4Ueg6EV6XQg==} + engines: {node: '>=8'} + + is-glob@4.0.3: + resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} + engines: {node: '>=0.10.0'} + + is-number@7.0.0: + resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} + engines: {node: '>=0.12.0'} + + isexe@2.0.0: + resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} + + jackspeak@3.1.2: + resolution: {integrity: sha512-kWmLKn2tRtfYMF/BakihVVRzBKOxz4gJMiL2Rj91WnAB5TPZumSH99R/Yf1qE1u4uRimvCSJfm6hnxohXeEXjQ==} + engines: {node: '>=14'} + + js-yaml@4.1.0: + resolution: {integrity: sha512-wpxZs9NoxZaJESJGIZTyDEaYpl0FKSA+FB9aJiyemKhMwkxQg63h4T1KJgUGHpTqPDNRcmmYLugrRjJlBtWvRA==} + hasBin: true + + jsonc-parser@3.2.1: + resolution: {integrity: sha512-AilxAyFOAcK5wA1+LeaySVBrHsGQvUFCDWXKpZjzaL0PqW+xfBOttn8GNtWKFWqneyMZj41MWF9Kl6iPWLwgOA==} + + jsonpointer@5.0.1: + resolution: {integrity: sha512-p/nXbhSEcu3pZRdkW1OfJhpsVtW1gd4Wa1fnQc9YLiTfAjn0312eMKimbdIQzuZl9aa9xUGaRlP9T/CJE/ditQ==} + engines: {node: '>=0.10.0'} + + katex@0.16.10: + resolution: {integrity: sha512-ZiqaC04tp2O5utMsl2TEZTXxa6WSC4yo0fv5ML++D3QZv/vx2Mct0mTlRx3O+uUkjfuAgOkzsCmq5MiUEsDDdA==} + hasBin: true + + khroma@2.1.0: + resolution: {integrity: sha512-Ls993zuzfayK269Svk9hzpeGUKob/sIgZzyHYdjQoAdQetRKpOLj+k/QQQ/6Qi0Yz65mlROrfd+Ev+1+7dz9Kw==} + + kleur@4.1.5: + resolution: {integrity: sha512-o+NO+8WrRiQEE4/7nwRJhN1HWpVmJm511pBHUxPLtp0BUISzlBplORYSmTclCnJvQq2tKu/sgl3xVpkc7ZWuQQ==} + engines: {node: '>=6'} + + layout-base@1.0.2: + resolution: {integrity: sha512-8h2oVEZNktL4BH2JCOI90iD1yXwL6iNW7KcCKT2QZgQJR2vbqDsldCTPRU9NifTCqHZci57XvQQ15YTu+sTYPg==} + + layout-base@2.0.1: + resolution: {integrity: sha512-dp3s92+uNI1hWIpPGH3jK2kxE2lMjdXdr+DH8ynZHpd6PUlH6x6cbuXnoMmiNumznqaNO31xu9e79F0uuZ0JFg==} + + linkify-it@5.0.0: + resolution: {integrity: sha512-5aHCbzQRADcdP+ATqnDuhhJ/MRIqDkZX5pyjFHRRysS8vZ5AbqGEoFIb6pYHPZ+L/OC2Lc+xT8uHVVR5CAK/wQ==} + + lodash-es@4.17.21: + resolution: {integrity: sha512-mKnC+QJ9pWVzv+C4/U3rRsHapFfHvQFoFB92e52xeyGMcX6/OlIl78je1u8vePzYZSkkogMPJ2yjxxsb89cxyw==} + + lru-cache@10.2.2: + resolution: {integrity: sha512-9hp3Vp2/hFQUiIwKo8XCeFVnrg8Pk3TYNPIR7tJADKi5YfcF7vEaK7avFHTlSy3kOKYaJQaalfEo6YuXdceBOQ==} + engines: {node: 14 || >=16.14} + + magic-string@0.30.10: + resolution: {integrity: sha512-iIRwTIf0QKV3UAnYK4PU8uiEc4SRh5jX0mwpIwETPpHdhVM4f53RSwS/vXvN1JhGX+Cs7B8qIq3d6AH49O5fAQ==} + + mark.js@8.11.1: + resolution: {integrity: sha512-1I+1qpDt4idfgLQG+BNWmrqku+7/2bi5nLf4YwF8y8zXvmfiTBY3PV3ZibfrjBueCByROpuBjLLFCajqkgYoLQ==} + + markdown-it-footnote@4.0.0: + resolution: {integrity: sha512-WYJ7urf+khJYl3DqofQpYfEYkZKbmXmwxQV8c8mO/hGIhgZ1wOe7R4HLFNwqx7TjILbnC98fuyeSsin19JdFcQ==} + + markdown-it@14.1.0: + resolution: {integrity: sha512-a54IwgWPaeBCAAsv13YgmALOF1elABB08FxO9i+r4VFk5Vl4pKokRPeX8u5TCgSsPi6ec1otfLjdOpVcgbpshg==} + hasBin: true + + markdownlint-cli@0.40.0: + resolution: {integrity: sha512-JXhI3dRQcaqwiFYpPz6VJ7aKYheD53GmTz9y4D/d0F1MbZDGOp9pqKlbOfUX/pHP/iAoeiE4wYRmk8/kjLakxA==} + engines: {node: '>=18'} + hasBin: true + + markdownlint-micromark@0.1.9: + resolution: {integrity: sha512-5hVs/DzAFa8XqYosbEAEg6ok6MF2smDj89ztn9pKkCtdKHVdPQuGMH7frFfYL9mLkvfFe4pTyAMffLbjf3/EyA==} + engines: {node: '>=18'} + + markdownlint@0.34.0: + resolution: {integrity: sha512-qwGyuyKwjkEMOJ10XN6OTKNOVYvOIi35RNvDLNxTof5s8UmyGHlCdpngRHoRGNvQVGuxO3BJ7uNSgdeX166WXw==} + engines: {node: '>=18'} + + mdast-util-from-markdown@1.3.1: + resolution: {integrity: sha512-4xTO/M8c82qBcnQc1tgpNtubGUW/Y1tBQ1B0i5CtSoelOLKFYlElIr3bvgREYYO5iRqbMY1YuqZng0GVOI8Qww==} + + mdast-util-to-string@3.2.0: + resolution: {integrity: sha512-V4Zn/ncyN1QNSqSBxTrMOLpjr+IKdHl2v3KVLoWmDPscP4r9GcCi71gjgvUV1SFSKh92AjAG4peFuBl2/YgCJg==} + + mdurl@2.0.0: + resolution: {integrity: sha512-Lf+9+2r+Tdp5wXDXC4PcIBjTDtq4UKjCPMQhKIuzpJNW0b96kVqSwW0bT7FhRSfmAiFYgP+SCRvdrDozfh0U5w==} + + merge2@1.4.1: + resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} + engines: {node: '>= 8'} + + mermaid@10.9.1: + resolution: {integrity: sha512-Mx45Obds5W1UkW1nv/7dHRsbfMM1aOKA2+Pxs/IGHNonygDHwmng8xTHyS9z4KWVi0rbko8gjiBmuwwXQ7tiNA==} + + micromark-core-commonmark@1.1.0: + resolution: {integrity: sha512-BgHO1aRbolh2hcrzL2d1La37V0Aoz73ymF8rAcKnohLy93titmv62E0gP8Hrx9PKcKrqCZ1BbLGbP3bEhoXYlw==} + + micromark-factory-destination@1.1.0: + resolution: {integrity: sha512-XaNDROBgx9SgSChd69pjiGKbV+nfHGDPVYFs5dOoDd7ZnMAE+Cuu91BCpsY8RT2NP9vo/B8pds2VQNCLiu0zhg==} + + micromark-factory-label@1.1.0: + resolution: {integrity: sha512-OLtyez4vZo/1NjxGhcpDSbHQ+m0IIGnT8BoPamh+7jVlzLJBH98zzuCoUeMxvM6WsNeh8wx8cKvqLiPHEACn0w==} + + micromark-factory-space@1.1.0: + resolution: {integrity: sha512-cRzEj7c0OL4Mw2v6nwzttyOZe8XY/Z8G0rzmWQZTBi/jjwyw/U4uqKtUORXQrR5bAZZnbTI/feRV/R7hc4jQYQ==} + + micromark-factory-title@1.1.0: + resolution: {integrity: sha512-J7n9R3vMmgjDOCY8NPw55jiyaQnH5kBdV2/UXCtZIpnHH3P6nHUKaH7XXEYuWwx/xUJcawa8plLBEjMPU24HzQ==} + + micromark-factory-whitespace@1.1.0: + resolution: {integrity: sha512-v2WlmiymVSp5oMg+1Q0N1Lxmt6pMhIHD457whWM7/GUlEks1hI9xj5w3zbc4uuMKXGisksZk8DzP2UyGbGqNsQ==} + + micromark-util-character@1.2.0: + resolution: {integrity: sha512-lXraTwcX3yH/vMDaFWCQJP1uIszLVebzUa3ZHdrgxr7KEU/9mL4mVgCpGbyhvNLNlauROiNUq7WN5u7ndbY6xg==} + + micromark-util-chunked@1.1.0: + resolution: {integrity: sha512-Ye01HXpkZPNcV6FiyoW2fGZDUw4Yc7vT0E9Sad83+bEDiCJ1uXu0S3mr8WLpsz3HaG3x2q0HM6CTuPdcZcluFQ==} + + micromark-util-classify-character@1.1.0: + resolution: {integrity: sha512-SL0wLxtKSnklKSUplok1WQFoGhUdWYKggKUiqhX+Swala+BtptGCu5iPRc+xvzJ4PXE/hwM3FNXsfEVgoZsWbw==} + + micromark-util-combine-extensions@1.1.0: + resolution: {integrity: sha512-Q20sp4mfNf9yEqDL50WwuWZHUrCO4fEyeDCnMGmG5Pr0Cz15Uo7KBs6jq+dq0EgX4DPwwrh9m0X+zPV1ypFvUA==} + + micromark-util-decode-numeric-character-reference@1.1.0: + resolution: {integrity: sha512-m9V0ExGv0jB1OT21mrWcuf4QhP46pH1KkfWy9ZEezqHKAxkj4mPCy3nIH1rkbdMlChLHX531eOrymlwyZIf2iw==} + + micromark-util-decode-string@1.1.0: + resolution: {integrity: sha512-YphLGCK8gM1tG1bd54azwyrQRjCFcmgj2S2GoJDNnh4vYtnL38JS8M4gpxzOPNyHdNEpheyWXCTnnTDY3N+NVQ==} + + micromark-util-encode@1.1.0: + resolution: {integrity: sha512-EuEzTWSTAj9PA5GOAs992GzNh2dGQO52UvAbtSOMvXTxv3Criqb6IOzJUBCmEqrrXSblJIJBbFFv6zPxpreiJw==} + + micromark-util-html-tag-name@1.2.0: + resolution: {integrity: sha512-VTQzcuQgFUD7yYztuQFKXT49KghjtETQ+Wv/zUjGSGBioZnkA4P1XXZPT1FHeJA6RwRXSF47yvJ1tsJdoxwO+Q==} + + micromark-util-normalize-identifier@1.1.0: + resolution: {integrity: sha512-N+w5vhqrBihhjdpM8+5Xsxy71QWqGn7HYNUvch71iV2PM7+E3uWGox1Qp90loa1ephtCxG2ftRV/Conitc6P2Q==} + + micromark-util-resolve-all@1.1.0: + resolution: {integrity: sha512-b/G6BTMSg+bX+xVCshPTPyAu2tmA0E4X98NSR7eIbeC6ycCqCeE7wjfDIgzEbkzdEVJXRtOG4FbEm/uGbCRouA==} + + micromark-util-sanitize-uri@1.2.0: + resolution: {integrity: sha512-QO4GXv0XZfWey4pYFndLUKEAktKkG5kZTdUNaTAkzbuJxn2tNBOr+QtxR2XpWaMhbImT2dPzyLrPXLlPhph34A==} + + micromark-util-subtokenize@1.1.0: + resolution: {integrity: sha512-kUQHyzRoxvZO2PuLzMt2P/dwVsTiivCK8icYTeR+3WgbuPqfHgPPy7nFKbeqRivBvn/3N3GBiNC+JRTMSxEC7A==} + + micromark-util-symbol@1.1.0: + resolution: {integrity: sha512-uEjpEYY6KMs1g7QfJ2eX1SQEV+ZT4rUD3UcF6l57acZvLNK7PBZL+ty82Z1qhK1/yXIY4bdx04FKMgR0g4IAag==} + + micromark-util-types@1.1.0: + resolution: {integrity: sha512-ukRBgie8TIAcacscVHSiddHjO4k/q3pnedmzMQ4iwDcK0FtFCohKOlFbaOL/mPgfnPsL3C1ZyxJa4sbWrBl3jg==} + + micromark@3.2.0: + resolution: {integrity: sha512-uD66tJj54JLYq0De10AhWycZWGQNUvDI55xPgk2sQM5kn1JYlhbCMTtEeT27+vAhW2FBQxLlOmS3pmA7/2z4aA==} + + micromatch@4.0.7: + resolution: {integrity: sha512-LPP/3KorzCwBxfeUuZmaR6bG2kdeHSbe0P2tY3FLRU4vYrjYz5hI4QZwV0njUx3jeuKe67YukQ1LSPZBKDqO/Q==} + engines: {node: '>=8.6'} + + minimatch@9.0.4: + resolution: {integrity: sha512-KqWh+VchfxcMNRAJjj2tnsSJdNbHsVgnkBhTNrW7AjVo6OvLtxw8zfT9oLw1JSohlFzJ8jCoTgaoXvJ+kHt6fw==} + engines: {node: '>=16 || 14 >=14.17'} + + minimist@1.2.8: + resolution: {integrity: sha512-2yyAR8qBkN3YuheJanUpWC5U3bb5osDywNB8RzDVlDwDHbocAJveqqj1u8+SVD7jkWT4yvsHCpWqqWqAxb0zCA==} + + minipass@7.1.1: + resolution: {integrity: sha512-UZ7eQ+h8ywIRAW1hIEl2AqdwzJucU/Kp59+8kkZeSvafXhZjul247BvIJjEVFVeON6d7lM46XX1HXCduKAS8VA==} + engines: {node: '>=16 || 14 >=14.17'} + + minisearch@6.3.0: + resolution: {integrity: sha512-ihFnidEeU8iXzcVHy74dhkxh/dn8Dc08ERl0xwoMMGqp4+LvRSCgicb+zGqWthVokQKvCSxITlh3P08OzdTYCQ==} + + mitt@3.0.1: + resolution: {integrity: sha512-vKivATfr97l2/QBCYAkXYDbrIWPM2IIKEl7YPhjCvKlG3kE2gm+uBo6nEXK3M5/Ffh/FLpKExzOQ3JJoJGFKBw==} + + mri@1.2.0: + resolution: {integrity: sha512-tzzskb3bG8LvYGFF/mDTpq3jpI6Q9wc3LEmBaghu+DdCssd1FakN7Bc0hVNmEyGq1bq3RgfkCb3cmQLpNPOroA==} + engines: {node: '>=4'} + + ms@2.1.2: + resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} + + nanoid@3.3.7: + resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} + engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} + hasBin: true + + non-layered-tidy-tree-layout@2.0.2: + resolution: {integrity: sha512-gkXMxRzUH+PB0ax9dUN0yYF0S25BqeAYqhgMaLUFmpXLEk7Fcu8f4emJuOAY0V8kjDICxROIKsTAKsV/v355xw==} + + normalize-path@3.0.0: + resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} + engines: {node: '>=0.10.0'} + + path-key@3.1.1: + resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} + engines: {node: '>=8'} + + path-scurry@1.11.1: + resolution: {integrity: sha512-Xa4Nw17FS9ApQFJ9umLiJS4orGjm7ZzwUrwamcGQuHSzDyth9boKDaycYdDcZDuqYATXw4HFXgaqWTctW/v1HA==} + engines: {node: '>=16 || 14 >=14.18'} + + perfect-debounce@1.0.0: + resolution: {integrity: sha512-xCy9V055GLEqoFaHoC1SoLIaLmWctgCUaBaWxDZ7/Zx4CTyX7cJQLJOok/orfjZAh9kEYpjJa4d0KcJmCbctZA==} + + picocolors@1.0.1: + resolution: {integrity: sha512-anP1Z8qwhkbmu7MFP5iTt+wQKXgwzf7zTyGlcdzabySa9vd0Xt392U0rVmz9poOaBj0uHJKyyo9/upk0HrEQew==} + + picomatch@2.3.1: + resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} + engines: {node: '>=8.6'} + + playwright-core@1.44.0: + resolution: {integrity: sha512-ZTbkNpFfYcGWohvTTl+xewITm7EOuqIqex0c7dNZ+aXsbrLj0qI8XlGKfPpipjm0Wny/4Lt4CJsWJk1stVS5qQ==} + engines: {node: '>=16'} + hasBin: true + + playwright@1.44.0: + resolution: {integrity: sha512-F9b3GUCLQ3Nffrfb6dunPOkE5Mh68tR7zN32L4jCk4FjQamgesGay7/dAAe1WaMEGV04DkdJfcJzjoCKygUaRQ==} + engines: {node: '>=16'} + hasBin: true + + postcss@8.4.38: + resolution: {integrity: sha512-Wglpdk03BSfXkHoQa3b/oulrotAkwrlLDRSOb9D0bN86FdRyE9lppSp33aHNPgBa0JKCoB+drFLZkQoRRYae5A==} + engines: {node: ^10 || ^12 || >=14} + + preact@10.22.0: + resolution: {integrity: sha512-RRurnSjJPj4rp5K6XoP45Ui33ncb7e4H7WiOHVpjbkvqvA3U+N8Z6Qbo0AE6leGYBV66n8EhEaFixvIu3SkxFw==} + + punycode.js@2.3.1: + resolution: {integrity: sha512-uxFIHU0YlHYhDQtV4R9J6a52SLx28BCjT+4ieh7IGbgwVJWO+km431c4yRlREUAsAmt/uMjQUyQHNEPf0M39CA==} + engines: {node: '>=6'} + + queue-microtask@1.2.3: + resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} + + readdirp@3.6.0: + resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} + engines: {node: '>=8.10.0'} + + reusify@1.0.4: + resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} + engines: {iojs: '>=1.0.0', node: '>=0.10.0'} + + rfdc@1.3.1: + resolution: {integrity: sha512-r5a3l5HzYlIC68TpmYKlxWjmOP6wiPJ1vWv2HeLhNsRZMrCkxeqxiHlQ21oXmQ4F3SiryXBHhAD7JZqvOJjFmg==} + + robust-predicates@3.0.2: + resolution: {integrity: sha512-IXgzBWvWQwE6PrDI05OvmXUIruQTcoMDzRsOd5CDvHCVLcLHMTSYvOK5Cm46kWqlV3yAbuSpBZdJ5oP5OUoStg==} + + rollup@4.18.0: + resolution: {integrity: sha512-QmJz14PX3rzbJCN1SG4Xe/bAAX2a6NpCP8ab2vfu2GiUr8AQcr2nCV/oEO3yneFarB67zk8ShlIyWb2LGTb3Sg==} + engines: {node: '>=18.0.0', npm: '>=8.0.0'} + hasBin: true + + run-con@1.3.2: + resolution: {integrity: sha512-CcfE+mYiTcKEzg0IqS08+efdnH0oJ3zV0wSUFBNrMHMuxCtXvBCLzCJHatwuXDcu/RlhjTziTo/a1ruQik6/Yg==} + hasBin: true + + run-parallel@1.2.0: + resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + + rw@1.3.3: + resolution: {integrity: sha512-PdhdWy89SiZogBLaw42zdeqtRJ//zFd2PgQavcICDUgJT5oW10QCRKbJ6bg4r0/UY2M6BWd5tkxuGFRvCkgfHQ==} + + sade@1.8.1: + resolution: {integrity: sha512-xal3CZX1Xlo/k4ApwCFrHVACi9fBqJ7V+mwhBsuf/1IOKbBy098Fex+Wa/5QMubw09pSZ/u8EY8PWgevJsXp1A==} + engines: {node: '>=6'} + + safer-buffer@2.1.2: + resolution: {integrity: sha512-YZo3K82SD7Riyi0E1EQPojLz7kpepnSQI9IyPbHHg1XXXevb5dJI7tpyN2ADxGcQbHG7vcyRHk0cbwqcQriUtg==} + + sass@1.77.2: + resolution: {integrity: sha512-eb4GZt1C3avsX3heBNlrc7I09nyT00IUuo4eFhAbeXWU2fvA7oXI53SxODVAA+zgZCk9aunAZgO+losjR3fAwA==} + engines: {node: '>=14.0.0'} + hasBin: true + + sax@1.3.0: + resolution: {integrity: sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==} + + search-insights@2.13.0: + resolution: {integrity: sha512-Orrsjf9trHHxFRuo9/rzm0KIWmgzE8RMlZMzuhZOJ01Rnz3D0YBAe+V6473t6/H6c7irs6Lt48brULAiRWb3Vw==} + + shebang-command@2.0.0: + resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} + engines: {node: '>=8'} + + shebang-regex@3.0.0: + resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} + engines: {node: '>=8'} + + shiki@1.6.0: + resolution: {integrity: sha512-P31ROeXcVgW/k3Z+vUUErcxoTah7ZRaimctOpzGuqAntqnnSmx1HOsvnbAB8Z2qfXPRhw61yptAzCsuKOhTHwQ==} + + signal-exit@4.1.0: + resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} + engines: {node: '>=14'} + + sitemap-ts@1.6.1: + resolution: {integrity: sha512-MOvutoHiSUxna/Q8m45Sz33il0aBvChtj3nqExYXEIkVB+CeVrVVKGqSSNCItbfnXYLBG/MG3HxwIOXxmzYf5w==} + + sitemap@7.1.1: + resolution: {integrity: sha512-mK3aFtjz4VdJN0igpIJrinf3EO8U8mxOPsTBzSsy06UtjZQJ3YY3o3Xa7zSc5nMqcMrRwlChHZ18Kxg0caiPBg==} + engines: {node: '>=12.0.0', npm: '>=5.6.0'} + hasBin: true + + source-map-js@1.2.0: + resolution: {integrity: sha512-itJW8lvSA0TXEphiRoawsCksnlf8SyvmFzIhltqAHluXd88pkCd+cXJVHTDwdCr0IzwptSm035IHQktUu1QUMg==} + engines: {node: '>=0.10.0'} + + speakingurl@14.0.1: + resolution: {integrity: sha512-1POYv7uv2gXoyGFpBCmpDVSNV74IfsWlDW216UPjbWufNf+bSU6GdbDsxdcxtfwb4xlI3yxzOTKClUosxARYrQ==} + engines: {node: '>=0.10.0'} + + string-width@4.2.3: + resolution: {integrity: sha512-wKyQRQpjJ0sIp62ErSZdGsjMJWsap5oRNihHhu6G7JVO/9jIB6UyevL+tXuOqrng8j/cxKTWyWUwvSTriiZz/g==} + engines: {node: '>=8'} + + string-width@5.1.2: + resolution: {integrity: sha512-HnLOCR3vjcY8beoNLtcjZ5/nxn2afmME6lhrDrebokqMap+XbeW8n9TXpPDOqdGK5qcI3oT0GKTW6wC7EMiVqA==} + engines: {node: '>=12'} + + strip-ansi@6.0.1: + resolution: {integrity: sha512-Y38VPSHcqkFrCpFnQ9vuSXmquuv5oXOKpGeT6aGrr3o3Gc9AlVa6JBfUSOCnbxGGZF+/0ooI7KrPuUSztUdU5A==} + engines: {node: '>=8'} + + strip-ansi@7.1.0: + resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} + engines: {node: '>=12'} + + strip-json-comments@3.1.1: + resolution: {integrity: sha512-6fPc+R4ihwqP6N/aIv2f1gMH8lOVtWQHoqC4yK6oSDVVocumAsfCqjkXnqiYMhmMwS/mEHLp7Vehlt3ql6lEig==} + engines: {node: '>=8'} + + stylis@4.3.2: + resolution: {integrity: sha512-bhtUjWd/z6ltJiQwg0dUfxEJ+W+jdqQd8TbWLWyeIJHlnsqmGLRFFd8e5mA0AZi/zx90smXRlN66YMTcaSFifg==} + + tabbable@6.2.0: + resolution: {integrity: sha512-Cat63mxsVJlzYvN51JmVXIgNoUokrIaT2zLclCXjRd8boZ0004U4KCs/sToJ75C6sdlByWxpYnb5Boif1VSFew==} + + to-fast-properties@2.0.0: + resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} + engines: {node: '>=4'} + + to-regex-range@5.0.1: + resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} + engines: {node: '>=8.0'} + + toml@3.0.0: + resolution: {integrity: sha512-y/mWCZinnvxjTKYhJ+pYxwD0mRLVvOtdS2Awbgxln6iEnt4rk0yBxeSBHkGJcPucRiG0e55mwWp+g/05rsrd6w==} + + ts-dedent@2.2.0: + resolution: {integrity: sha512-q5W7tVM71e2xjHZTlgfTDoPF/SmqKG5hddq9SzR49CH2hayqRKJtQ4mtRlSxKaJlR/+9rEM+mnBHf7I2/BQcpQ==} + engines: {node: '>=6.10'} + + uc.micro@2.1.0: + resolution: {integrity: sha512-ARDJmphmdvUk6Glw7y9DQ2bFkKBHwQHLi2lsaH6PPmz/Ka9sFOBsBluozhDltWmnv9u/cF6Rt87znRTPV+yp/A==} + + undici-types@5.26.5: + resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} + + unist-util-stringify-position@3.0.3: + resolution: {integrity: sha512-k5GzIBZ/QatR8N5X2y+drfpWG8IDBzdnVj6OInRNWm1oXrzydiaAT2OQiA8DPRRZyAKb9b6I2a6PxYklZD0gKg==} + + uuid@9.0.1: + resolution: {integrity: sha512-b+1eJOlsR9K8HJpow9Ok3fiWOWSIcIzXodvv0rQjVoOVNpWMpxf1wZNpt4y9h10odCNrqnYp1OBzRktckBe3sA==} + hasBin: true + + uvu@0.5.6: + resolution: {integrity: sha512-+g8ENReyr8YsOc6fv/NVJs2vFdHBnBNdfE49rshrTzDWOlUx4Gq7KOS2GD8eqhy2j+Ejq29+SbKH8yjkAqXqoA==} + engines: {node: '>=8'} + hasBin: true + + vite@5.2.11: + resolution: {integrity: sha512-HndV31LWW05i1BLPMUCE1B9E9GFbOu1MbenhS58FuK6owSO5qHm7GiCotrNY1YE5rMeQSFBGmT5ZaLEjFizgiQ==} + engines: {node: ^18.0.0 || >=20.0.0} + hasBin: true + peerDependencies: + '@types/node': ^18.0.0 || >=20.0.0 + less: '*' + lightningcss: ^1.21.0 + sass: '*' + stylus: '*' + sugarss: '*' + terser: ^5.4.0 + peerDependenciesMeta: + '@types/node': + optional: true + less: + optional: true + lightningcss: + optional: true + sass: + optional: true + stylus: + optional: true + sugarss: + optional: true + terser: + optional: true + + vitepress-plugin-mermaid@2.0.16: + resolution: {integrity: sha512-sW0Eu4+1EzRdwZBMGjzwKDsbQiuJIxCy8BlMw7Ur88p9fXalrFYKqZ3wYWLxsFTBipeooFIeanef/xw1P+v7vQ==} + peerDependencies: + mermaid: '10' + vitepress: ^1.0.0 || ^1.0.0-alpha + + vitepress@1.1.4: + resolution: {integrity: sha512-bWIzFZXpPB6NIDBuWnS20aMADH+FcFKDfQNYFvbOWij03PR29eImTceQHIzCKordjXYBhM/TjE5VKFTUJ3EheA==} + hasBin: true + peerDependencies: + markdown-it-mathjax3: ^4 + postcss: ^8 + peerDependenciesMeta: + markdown-it-mathjax3: + optional: true + postcss: + optional: true + + vue-demi@0.14.7: + resolution: {integrity: sha512-EOG8KXDQNwkJILkx/gPcoL/7vH+hORoBaKgGe+6W7VFMvCYJfmF2dGbvgDroVnI8LU7/kTu8mbjRZGBU1z9NTA==} + engines: {node: '>=12'} + hasBin: true + peerDependencies: + '@vue/composition-api': ^1.0.0-rc.1 + vue: ^3.0.0-0 || ^2.6.0 + peerDependenciesMeta: + '@vue/composition-api': + optional: true + + vue@3.4.27: + resolution: {integrity: sha512-8s/56uK6r01r1icG/aEOHqyMVxd1bkYcSe9j8HcKtr/xTOFWvnzIVTehNW+5Yt89f+DLBe4A569pnZLS5HzAMA==} + peerDependencies: + typescript: '*' + peerDependenciesMeta: + typescript: + optional: true + + web-worker@1.3.0: + resolution: {integrity: sha512-BSR9wyRsy/KOValMgd5kMyr3JzpdeoR9KVId8u5GVlTTAtNChlsE4yTxeY7zMdNSyOmoKBv8NH2qeRY9Tg+IaA==} + + which@2.0.2: + resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} + engines: {node: '>= 8'} + hasBin: true + + wrap-ansi@7.0.0: + resolution: {integrity: sha512-YVGIj2kamLSTxw6NsZjoBxfSwsn0ycdesmc4p+Q21c5zPuZ1pl+NfxVdxPtdHvmNVOQ6XSYG4AUtyt/Fi7D16Q==} + engines: {node: '>=10'} + + wrap-ansi@8.1.0: + resolution: {integrity: sha512-si7QWI6zUMq56bESFvagtmzMdGOtoxfR+Sez11Mobfc7tm+VkUckk9bW2UeffTGVUbOksxmSw0AA2gs8g71NCQ==} + engines: {node: '>=12'} + + xml-formatter@3.5.0: + resolution: {integrity: sha512-9ij/f2PLIPv+YDywtdztq7U82kYMDa5yPYwpn0TnXnqJRH6Su8RC/oaw91erHe3aSEbfgBaA1hDzReDFb1SVXw==} + engines: {node: '>= 14'} + + xml-parser-xo@4.1.1: + resolution: {integrity: sha512-Ggf2y90+Y6e9IK5hoPuembVHJ03PhDSdhldEmgzbihzu9k0XBo0sfcFxaSi4W1PlUSSI1ok+MJ0JCXUn+U4Ilw==} + engines: {node: '>= 14'} + +snapshots: + + '@algolia/autocomplete-core@1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)(search-insights@2.13.0)': + dependencies: + '@algolia/autocomplete-plugin-algolia-insights': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)(search-insights@2.13.0) + '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3) + transitivePeerDependencies: + - '@algolia/client-search' + - algoliasearch + - search-insights + + '@algolia/autocomplete-plugin-algolia-insights@1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)(search-insights@2.13.0)': + dependencies: + '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3) + search-insights: 2.13.0 + transitivePeerDependencies: + - '@algolia/client-search' + - algoliasearch + + '@algolia/autocomplete-preset-algolia@1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)': + dependencies: + '@algolia/autocomplete-shared': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3) + '@algolia/client-search': 4.23.3 + algoliasearch: 4.23.3 + + '@algolia/autocomplete-shared@1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)': + dependencies: + '@algolia/client-search': 4.23.3 + algoliasearch: 4.23.3 + + '@algolia/cache-browser-local-storage@4.23.3': + dependencies: + '@algolia/cache-common': 4.23.3 + + '@algolia/cache-common@4.23.3': {} + + '@algolia/cache-in-memory@4.23.3': + dependencies: + '@algolia/cache-common': 4.23.3 + + '@algolia/client-account@4.23.3': + dependencies: + '@algolia/client-common': 4.23.3 + '@algolia/client-search': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/client-analytics@4.23.3': + dependencies: + '@algolia/client-common': 4.23.3 + '@algolia/client-search': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/client-common@4.23.3': + dependencies: + '@algolia/requester-common': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/client-personalization@4.23.3': + dependencies: + '@algolia/client-common': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/client-search@4.23.3': + dependencies: + '@algolia/client-common': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/logger-common@4.23.3': {} + + '@algolia/logger-console@4.23.3': + dependencies: + '@algolia/logger-common': 4.23.3 + + '@algolia/recommend@4.23.3': + dependencies: + '@algolia/cache-browser-local-storage': 4.23.3 + '@algolia/cache-common': 4.23.3 + '@algolia/cache-in-memory': 4.23.3 + '@algolia/client-common': 4.23.3 + '@algolia/client-search': 4.23.3 + '@algolia/logger-common': 4.23.3 + '@algolia/logger-console': 4.23.3 + '@algolia/requester-browser-xhr': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/requester-node-http': 4.23.3 + '@algolia/transporter': 4.23.3 + + '@algolia/requester-browser-xhr@4.23.3': + dependencies: + '@algolia/requester-common': 4.23.3 + + '@algolia/requester-common@4.23.3': {} + + '@algolia/requester-node-http@4.23.3': + dependencies: + '@algolia/requester-common': 4.23.3 + + '@algolia/transporter@4.23.3': + dependencies: + '@algolia/cache-common': 4.23.3 + '@algolia/logger-common': 4.23.3 + '@algolia/requester-common': 4.23.3 + + '@antfu/utils@0.7.6': {} + + '@babel/helper-string-parser@7.24.1': {} + + '@babel/helper-validator-identifier@7.24.5': {} + + '@babel/parser@7.24.5': + dependencies: + '@babel/types': 7.24.5 + + '@babel/types@7.24.5': + dependencies: + '@babel/helper-string-parser': 7.24.1 + '@babel/helper-validator-identifier': 7.24.5 + to-fast-properties: 2.0.0 + + '@braintree/sanitize-url@6.0.4': {} + + '@docsearch/css@3.6.0': {} + + '@docsearch/js@3.6.0(@algolia/client-search@4.23.3)(search-insights@2.13.0)': + dependencies: + '@docsearch/react': 3.6.0(@algolia/client-search@4.23.3)(search-insights@2.13.0) + preact: 10.22.0 + transitivePeerDependencies: + - '@algolia/client-search' + - '@types/react' + - react + - react-dom + - search-insights + + '@docsearch/react@3.6.0(@algolia/client-search@4.23.3)(search-insights@2.13.0)': + dependencies: + '@algolia/autocomplete-core': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3)(search-insights@2.13.0) + '@algolia/autocomplete-preset-algolia': 1.9.3(@algolia/client-search@4.23.3)(algoliasearch@4.23.3) + '@docsearch/css': 3.6.0 + algoliasearch: 4.23.3 + optionalDependencies: + search-insights: 2.13.0 + transitivePeerDependencies: + - '@algolia/client-search' + + '@esbuild/aix-ppc64@0.20.2': + optional: true + + '@esbuild/android-arm64@0.20.2': + optional: true + + '@esbuild/android-arm@0.20.2': + optional: true + + '@esbuild/android-x64@0.20.2': + optional: true + + '@esbuild/darwin-arm64@0.20.2': + optional: true + + '@esbuild/darwin-x64@0.20.2': + optional: true + + '@esbuild/freebsd-arm64@0.20.2': + optional: true + + '@esbuild/freebsd-x64@0.20.2': + optional: true + + '@esbuild/linux-arm64@0.20.2': + optional: true + + '@esbuild/linux-arm@0.20.2': + optional: true + + '@esbuild/linux-ia32@0.20.2': + optional: true + + '@esbuild/linux-loong64@0.20.2': + optional: true + + '@esbuild/linux-mips64el@0.20.2': + optional: true + + '@esbuild/linux-ppc64@0.20.2': + optional: true + + '@esbuild/linux-riscv64@0.20.2': + optional: true + + '@esbuild/linux-s390x@0.20.2': + optional: true + + '@esbuild/linux-x64@0.20.2': + optional: true + + '@esbuild/netbsd-x64@0.20.2': + optional: true + + '@esbuild/openbsd-x64@0.20.2': + optional: true + + '@esbuild/sunos-x64@0.20.2': + optional: true + + '@esbuild/win32-arm64@0.20.2': + optional: true + + '@esbuild/win32-ia32@0.20.2': + optional: true + + '@esbuild/win32-x64@0.20.2': + optional: true + + '@isaacs/cliui@8.0.2': + dependencies: + string-width: 5.1.2 + string-width-cjs: string-width@4.2.3 + strip-ansi: 7.1.0 + strip-ansi-cjs: strip-ansi@6.0.1 + wrap-ansi: 8.1.0 + wrap-ansi-cjs: wrap-ansi@7.0.0 + + '@jridgewell/sourcemap-codec@1.4.15': {} + + '@mermaid-js/mermaid-mindmap@9.3.0': + dependencies: + '@braintree/sanitize-url': 6.0.4 + cytoscape: 3.29.2 + cytoscape-cose-bilkent: 4.1.0(cytoscape@3.29.2) + cytoscape-fcose: 2.2.0(cytoscape@3.29.2) + d3: 7.9.0 + khroma: 2.1.0 + non-layered-tidy-tree-layout: 2.0.2 + optional: true + + '@nodelib/fs.scandir@2.1.5': + dependencies: + '@nodelib/fs.stat': 2.0.5 + run-parallel: 1.2.0 + + '@nodelib/fs.stat@2.0.5': {} + + '@nodelib/fs.walk@1.2.8': + dependencies: + '@nodelib/fs.scandir': 2.1.5 + fastq: 1.17.1 + + '@pkgjs/parseargs@0.11.0': + optional: true + + '@playwright/test@1.44.0': + dependencies: + playwright: 1.44.0 + + '@rollup/rollup-android-arm-eabi@4.18.0': + optional: true + + '@rollup/rollup-android-arm64@4.18.0': + optional: true + + '@rollup/rollup-darwin-arm64@4.18.0': + optional: true - /@vuepress/shared@2.0.0-rc.0: - resolution: {integrity: sha512-ikdSfjRv5LGM1iv4HHwF9P6gqTjaFCXKPK+hzlkHFHNZO1GLqk7/BPc4F51tAG1s8TcLhUZc+54LrfgS7PkXXA==} + '@rollup/rollup-darwin-x64@4.18.0': + optional: true + + '@rollup/rollup-linux-arm-gnueabihf@4.18.0': + optional: true + + '@rollup/rollup-linux-arm-musleabihf@4.18.0': + optional: true + + '@rollup/rollup-linux-arm64-gnu@4.18.0': + optional: true + + '@rollup/rollup-linux-arm64-musl@4.18.0': + optional: true + + '@rollup/rollup-linux-powerpc64le-gnu@4.18.0': + optional: true + + '@rollup/rollup-linux-riscv64-gnu@4.18.0': + optional: true + + '@rollup/rollup-linux-s390x-gnu@4.18.0': + optional: true + + '@rollup/rollup-linux-x64-gnu@4.18.0': + optional: true + + '@rollup/rollup-linux-x64-musl@4.18.0': + optional: true + + '@rollup/rollup-win32-arm64-msvc@4.18.0': + optional: true + + '@rollup/rollup-win32-ia32-msvc@4.18.0': + optional: true + + '@rollup/rollup-win32-x64-msvc@4.18.0': + optional: true + + '@shikijs/core@1.6.0': {} + + '@shikijs/transformers@1.6.0': dependencies: - '@mdit-vue/types': 1.0.0 - '@vue/shared': 3.3.11 - dev: true + shiki: 1.6.0 - /@vuepress/theme-default@2.0.0-rc.0: - resolution: {integrity: sha512-I8Y08evDmMuD1jh3NftPpFFSlCWOizQDJLjN7EQwcg7jiAP4A7c2REo6nBN2EmP24Mi7UrRM+RnytHR5V+pElA==} - peerDependencies: - sass-loader: ^13.3.2 - peerDependenciesMeta: - sass-loader: - optional: true + '@types/d3-scale-chromatic@3.0.3': {} + + '@types/d3-scale@4.0.8': dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/plugin-active-header-links': 2.0.0-rc.0 - '@vuepress/plugin-back-to-top': 2.0.0-rc.0 - '@vuepress/plugin-container': 2.0.0-rc.0 - '@vuepress/plugin-external-link-icon': 2.0.0-rc.0 - '@vuepress/plugin-git': 2.0.0-rc.0 - '@vuepress/plugin-medium-zoom': 2.0.0-rc.0 - '@vuepress/plugin-nprogress': 2.0.0-rc.0 - '@vuepress/plugin-palette': 2.0.0-rc.0 - '@vuepress/plugin-prismjs': 2.0.0-rc.0 - '@vuepress/plugin-theme-data': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - '@vueuse/core': 10.7.0(vue@3.3.11) - sass: 1.69.5 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true + '@types/d3-time': 3.0.3 + + '@types/d3-time@3.0.3': {} - /@vuepress/utils@2.0.0-rc.0: - resolution: {integrity: sha512-Q1ay/woClDHcW0Qe91KsnHoupdNN0tp/vhjvVLuAYxlv/1Obii7hz9WFcajyyGEhmsYxdvG2sGmcxFA02tuKkw==} + '@types/debug@4.1.12': dependencies: - '@types/debug': 4.1.12 - '@types/fs-extra': 11.0.4 - '@types/hash-sum': 1.0.2 - '@vuepress/shared': 2.0.0-rc.0 - debug: 4.3.4 - fs-extra: 11.2.0 - globby: 14.0.0 - hash-sum: 2.0.0 - ora: 7.0.1 - picocolors: 1.0.0 - upath: 2.0.1 + '@types/ms': 0.7.34 + + '@types/estree@1.0.5': {} + + '@types/linkify-it@5.0.0': {} + + '@types/markdown-it@14.1.1': + dependencies: + '@types/linkify-it': 5.0.0 + '@types/mdurl': 2.0.0 + + '@types/mdast@3.0.15': + dependencies: + '@types/unist': 2.0.10 + + '@types/mdurl@2.0.0': {} + + '@types/ms@0.7.34': {} + + '@types/node@17.0.45': {} + + '@types/node@20.12.12': + dependencies: + undici-types: 5.26.5 + + '@types/sax@1.2.7': + dependencies: + '@types/node': 20.12.12 + + '@types/unist@2.0.10': {} + + '@types/web-bluetooth@0.0.20': {} + + '@vitejs/plugin-vue@5.0.4(vite@5.2.11(@types/node@20.12.12)(sass@1.77.2))(vue@3.4.27)': + dependencies: + vite: 5.2.11(@types/node@20.12.12)(sass@1.77.2) + vue: 3.4.27 + + '@vue/compiler-core@3.4.27': + dependencies: + '@babel/parser': 7.24.5 + '@vue/shared': 3.4.27 + entities: 4.5.0 + estree-walker: 2.0.2 + source-map-js: 1.2.0 + + '@vue/compiler-dom@3.4.27': + dependencies: + '@vue/compiler-core': 3.4.27 + '@vue/shared': 3.4.27 + + '@vue/compiler-sfc@3.4.27': + dependencies: + '@babel/parser': 7.24.5 + '@vue/compiler-core': 3.4.27 + '@vue/compiler-dom': 3.4.27 + '@vue/compiler-ssr': 3.4.27 + '@vue/shared': 3.4.27 + estree-walker: 2.0.2 + magic-string: 0.30.10 + postcss: 8.4.38 + source-map-js: 1.2.0 + + '@vue/compiler-ssr@3.4.27': + dependencies: + '@vue/compiler-dom': 3.4.27 + '@vue/shared': 3.4.27 + + '@vue/devtools-api@7.2.1(vue@3.4.27)': + dependencies: + '@vue/devtools-kit': 7.2.1(vue@3.4.27) transitivePeerDependencies: - - supports-color - dev: true + - vue + + '@vue/devtools-kit@7.2.1(vue@3.4.27)': + dependencies: + '@vue/devtools-shared': 7.2.1 + hookable: 5.5.3 + mitt: 3.0.1 + perfect-debounce: 1.0.0 + speakingurl: 14.0.1 + vue: 3.4.27 + + '@vue/devtools-shared@7.2.1': + dependencies: + rfdc: 1.3.1 + + '@vue/reactivity@3.4.27': + dependencies: + '@vue/shared': 3.4.27 + + '@vue/runtime-core@3.4.27': + dependencies: + '@vue/reactivity': 3.4.27 + '@vue/shared': 3.4.27 + + '@vue/runtime-dom@3.4.27': + dependencies: + '@vue/runtime-core': 3.4.27 + '@vue/shared': 3.4.27 + csstype: 3.1.3 + + '@vue/server-renderer@3.4.27(vue@3.4.27)': + dependencies: + '@vue/compiler-ssr': 3.4.27 + '@vue/shared': 3.4.27 + vue: 3.4.27 - /@vueuse/core@10.7.0(vue@3.3.11): - resolution: {integrity: sha512-4EUDESCHtwu44ZWK3Gc/hZUVhVo/ysvdtwocB5vcauSV4B7NiGY5972WnsojB3vRNdxvAt7kzJWE2h9h7C9d5w==} + '@vue/shared@3.4.27': {} + + '@vueuse/core@10.9.0(vue@3.4.27)': dependencies: '@types/web-bluetooth': 0.0.20 - '@vueuse/metadata': 10.7.0 - '@vueuse/shared': 10.7.0(vue@3.3.11) - vue-demi: 0.14.6(vue@3.3.11) + '@vueuse/metadata': 10.9.0 + '@vueuse/shared': 10.9.0(vue@3.4.27) + vue-demi: 0.14.7(vue@3.4.27) transitivePeerDependencies: - '@vue/composition-api' - vue - dev: true - - /@vueuse/metadata@10.7.0: - resolution: {integrity: sha512-GlaH7tKP2iBCZ3bHNZ6b0cl9g0CJK8lttkBNUX156gWvNYhTKEtbweWLm9rxCPIiwzYcr/5xML6T8ZUEt+DkvA==} - dev: true - /@vueuse/shared@10.7.0(vue@3.3.11): - resolution: {integrity: sha512-kc00uV6CiaTdc3i1CDC4a3lBxzaBE9AgYNtFN87B5OOscqeWElj/uza8qVDmk7/U8JbqoONLbtqiLJ5LGRuqlw==} + '@vueuse/integrations@10.9.0(focus-trap@7.5.4)(vue@3.4.27)': dependencies: - vue-demi: 0.14.6(vue@3.3.11) + '@vueuse/core': 10.9.0(vue@3.4.27) + '@vueuse/shared': 10.9.0(vue@3.4.27) + vue-demi: 0.14.7(vue@3.4.27) + optionalDependencies: + focus-trap: 7.5.4 transitivePeerDependencies: - '@vue/composition-api' - vue - dev: true - - /algoliasearch@4.21.1: - resolution: {integrity: sha512-Ym0MGwOcjQhZ+s1N/j0o94g3vQD0MzNpWsfJLyPVCt0zHflbi0DwYX+9GPmTJ4BzegoxWMyCPgcmpd3R+VlOzQ==} - dependencies: - '@algolia/cache-browser-local-storage': 4.21.1 - '@algolia/cache-common': 4.21.1 - '@algolia/cache-in-memory': 4.21.1 - '@algolia/client-account': 4.21.1 - '@algolia/client-analytics': 4.21.1 - '@algolia/client-common': 4.21.1 - '@algolia/client-personalization': 4.21.1 - '@algolia/client-search': 4.21.1 - '@algolia/logger-common': 4.21.1 - '@algolia/logger-console': 4.21.1 - '@algolia/requester-browser-xhr': 4.21.1 - '@algolia/requester-common': 4.21.1 - '@algolia/requester-node-http': 4.21.1 - '@algolia/transporter': 4.21.1 - dev: true - - /ansi-regex@6.0.1: - resolution: {integrity: sha512-n5M855fKb2SsfMIiFFoVrABHJC8QtHwVx+mHWP3QcEqBHYienj5dHSgjbxtC0WEZXYt4wcD6zrQElDPhFuZgfA==} - engines: {node: '>=12'} - dev: true - /anymatch@3.1.3: - resolution: {integrity: sha512-KMReFUr0B4t+D+OBkjR3KYqvocp2XaSzO55UcB6mgQMd3KbcE+mWTyvVV7D/zsdEbNnV6acZUutkiHQXvTr1Rw==} - engines: {node: '>= 8'} - dependencies: - normalize-path: 3.0.0 - picomatch: 2.3.1 - dev: true + '@vueuse/metadata@10.9.0': {} - /arg@5.0.2: - resolution: {integrity: sha512-PYjyFOLKQ9y57JvQ6QLo8dAgNqswh8M1RMJYdQduT6xbWSgK36P/Z/v+p888pM69jMMfS8Xd8F6I1kQ/I9HUGg==} - dev: true + '@vueuse/shared@10.9.0(vue@3.4.27)': + dependencies: + vue-demi: 0.14.7(vue@3.4.27) + transitivePeerDependencies: + - '@vue/composition-api' + - vue - /argparse@1.0.10: - resolution: {integrity: sha512-o5Roy6tNG4SL/FOkCAN6RzjiakZS25RLYFrcMttJqbdd8BWrnA+fGz57iN5Pb06pvBGvl5gQ0B48dJlslXvoTg==} + algoliasearch@4.23.3: dependencies: - sprintf-js: 1.0.3 - dev: true + '@algolia/cache-browser-local-storage': 4.23.3 + '@algolia/cache-common': 4.23.3 + '@algolia/cache-in-memory': 4.23.3 + '@algolia/client-account': 4.23.3 + '@algolia/client-analytics': 4.23.3 + '@algolia/client-common': 4.23.3 + '@algolia/client-personalization': 4.23.3 + '@algolia/client-search': 4.23.3 + '@algolia/logger-common': 4.23.3 + '@algolia/logger-console': 4.23.3 + '@algolia/recommend': 4.23.3 + '@algolia/requester-browser-xhr': 4.23.3 + '@algolia/requester-common': 4.23.3 + '@algolia/requester-node-http': 4.23.3 + '@algolia/transporter': 4.23.3 - /argparse@2.0.1: - resolution: {integrity: sha512-8+9WqebbFzpX9OR+Wa6O29asIogeRMzcGtAINdpMHHyAg10f05aSFVBbcEqGf/PXw1EjAZ+q2/bEBg3DvurK3Q==} - dev: true + ansi-regex@5.0.1: {} - /autoprefixer@10.4.16(postcss@8.4.32): - resolution: {integrity: sha512-7vd3UC6xKp0HLfua5IjZlcXvGAGy7cBAXTg2lyQ/8WpNhd6SiZ8Be+xm3FyBSYJx5GKcpRCzBh7RH4/0dnY+uQ==} - engines: {node: ^10 || ^12 || >=14} - hasBin: true - peerDependencies: - postcss: ^8.1.0 - dependencies: - browserslist: 4.22.2 - caniuse-lite: 1.0.30001568 - fraction.js: 4.3.7 - normalize-range: 0.1.2 - picocolors: 1.0.0 - postcss: 8.4.32 - postcss-value-parser: 4.2.0 - dev: true - - /base64-js@1.5.1: - resolution: {integrity: sha512-AKpaYlHn8t4SVbOHCy+b5+KKgvR4vrsD8vbvrbiQJps7fKDTkjkDry6ji0rUJjC0kzbNePLwzxq8iypo41qeWA==} - dev: true - - /binary-extensions@2.2.0: - resolution: {integrity: sha512-jDctJ/IVQbZoJykoeHbhXpOlNBqGNcwXJKJog42E5HDPUwQTSdjCHdihjj0DlnheQ7blbT6dHOafNAiS8ooQKA==} - engines: {node: '>=8'} - dev: true + ansi-regex@6.0.1: {} - /bl@5.1.0: - resolution: {integrity: sha512-tv1ZJHLfTDnXE6tMHv73YgSJaWR2AFuPwMntBe7XL/GBFHnT0CLnsHMogfk5+GzCDC5ZWarSCYaIGATZt9dNsQ==} + ansi-styles@4.3.0: dependencies: - buffer: 6.0.3 - inherits: 2.0.4 - readable-stream: 3.6.2 - dev: true + color-convert: 2.0.1 - /boolbase@1.0.0: - resolution: {integrity: sha512-JZOSA7Mo9sNGB8+UjSgzdLtokWAky1zbztM3WRLCbZ70/3cTANmQmOdR7y2g+J0e2WXywy1yS468tY+IruqEww==} - dev: true + ansi-styles@6.2.1: {} - /braces@3.0.2: - resolution: {integrity: sha512-b8um+L1RzM3WDSzvhm6gIz1yfTbBt6YTlcEKAvsmqCZZFw46z626lVj9j1yEPW33H5H+lBQpZMP1k8l+78Ha0A==} - engines: {node: '>=8'} + anymatch@3.1.3: dependencies: - fill-range: 7.0.1 - dev: true + normalize-path: 3.0.0 + picomatch: 2.3.1 + optional: true - /browserslist@4.22.2: - resolution: {integrity: sha512-0UgcrvQmBDvZHFGdYUehrCNIazki7/lUP3kkoi/r3YB2amZbFM9J43ZRkJTXBUZK4gmx56+Sqk9+Vs9mwZx9+A==} - engines: {node: ^6 || ^7 || ^8 || ^9 || ^10 || ^11 || ^12 || >=13.7} - hasBin: true - dependencies: - caniuse-lite: 1.0.30001568 - electron-to-chromium: 1.4.611 - node-releases: 2.0.14 - update-browserslist-db: 1.0.13(browserslist@4.22.2) - dev: true + arg@5.0.2: {} + + argparse@2.0.1: {} + + balanced-match@1.0.2: {} - /buffer@6.0.3: - resolution: {integrity: sha512-FTiCpNxtwiZZHEZbcbTIcZjERVICn9yq/pDFkTl95/AxzD1naBctN7YO68riM/gLSDY7sdrMby8hofADYuuqOA==} + binary-extensions@2.3.0: + optional: true + + brace-expansion@2.0.1: dependencies: - base64-js: 1.5.1 - ieee754: 1.2.1 - dev: true + balanced-match: 1.0.2 - /cac@6.7.14: - resolution: {integrity: sha512-b6Ilus+c3RrdDk+JhLKUAQfzzgLEPy6wcXqS7f/xe1EETvsDP6GORG7SFuOs6cID5YkqchW/LXZbX5bc8j7ZcQ==} - engines: {node: '>=8'} - dev: true - - /caniuse-lite@1.0.30001568: - resolution: {integrity: sha512-vSUkH84HontZJ88MiNrOau1EBrCqEQYgkC5gIySiDlpsm8sGVrhU7Kx4V6h0tnqaHzIHZv08HlJIwPbL4XL9+A==} - dev: true - - /chalk@5.3.0: - resolution: {integrity: sha512-dLitG79d+GV1Nb/VYcCDFivJeK1hiukt9QjRNVOsUtTy1rR1YJsmpGGTZ3qJos+uw7WmWF4wUwBd9jxjocFC2w==} - engines: {node: ^12.17.0 || ^14.13 || >=16.0.0} - dev: true - - /cheerio-select@2.1.0: - resolution: {integrity: sha512-9v9kG0LvzrlcungtnJtpGNxY+fzECQKhK4EGJX2vByejiMX84MFNQw4UxPJl3bFbTMw+Dfs37XaIkCwTZfLh4g==} - dependencies: - boolbase: 1.0.0 - css-select: 5.1.0 - css-what: 6.1.0 - domelementtype: 2.3.0 - domhandler: 5.0.3 - domutils: 3.1.0 - dev: true - - /cheerio@1.0.0-rc.12: - resolution: {integrity: sha512-VqR8m68vM46BNnuZ5NtnGBKIE/DfN0cRIzg9n40EIq9NOv90ayxLBXA8fXC5gquFRGJSTRqBq25Jt2ECLR431Q==} - engines: {node: '>= 6'} + braces@3.0.3: dependencies: - cheerio-select: 2.1.0 - dom-serializer: 2.0.0 - domhandler: 5.0.3 - domutils: 3.1.0 - htmlparser2: 8.0.2 - parse5: 7.1.2 - parse5-htmlparser2-tree-adapter: 7.0.0 - dev: true - - /chokidar@3.5.3: - resolution: {integrity: sha512-Dr3sfKRP6oTcjf2JmUmFJfeVMvXBdegxB0iVQ5eb2V10uFJUCAS8OByZdVAyVb8xXNz3GjjTgj9kLWsZTqE6kw==} - engines: {node: '>= 8.10.0'} + fill-range: 7.1.1 + + character-entities@2.0.2: {} + + chokidar@3.6.0: dependencies: anymatch: 3.1.3 - braces: 3.0.2 + braces: 3.0.3 glob-parent: 5.1.2 is-binary-path: 2.1.0 is-glob: 4.0.3 @@ -1303,1063 +1914,919 @@ packages: readdirp: 3.6.0 optionalDependencies: fsevents: 2.3.3 - dev: true + optional: true - /cli-cursor@4.0.0: - resolution: {integrity: sha512-VGtlMu3x/4DOtIUwEkRezxUZ2lBacNJCHash0N0WeZDBS+7Ux1dm3XWAgWYxLJFMMdOeXMHXorshEFhbMSGelg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + color-convert@2.0.1: dependencies: - restore-cursor: 4.0.0 - dev: true + color-name: 1.1.4 - /cli-spinners@2.9.2: - resolution: {integrity: sha512-ywqV+5MmyL4E7ybXgKys4DugZbX0FC6LnwrhjuykIjnK9k8OQacQ7axGKnjDXWNhns0xot3bZI5h55H8yo9cJg==} - engines: {node: '>=6'} - dev: true + color-name@1.1.4: {} - /connect-history-api-fallback@2.0.0: - resolution: {integrity: sha512-U73+6lQFmfiNPrYbXqr6kZ1i1wiRqXnp2nhMsINseWXO8lDau0LGEffJ8kQi4EjLZympVgRdvqjAgiZ1tgzDDA==} - engines: {node: '>=0.8'} - dev: true + commander@12.0.0: {} - /cross-spawn@7.0.3: - resolution: {integrity: sha512-iRDPJKUPVEND7dHPO8rkbOnPpyDygcDFtWjpeWNCgy8WP2rXcxXL8TskReQl6OrB2G7+UJrags1q15Fudc7G6w==} - engines: {node: '>= 8'} + commander@7.2.0: {} + + commander@8.3.0: {} + + cose-base@1.0.3: + dependencies: + layout-base: 1.0.2 + + cose-base@2.2.0: + dependencies: + layout-base: 2.0.1 + optional: true + + cross-spawn@7.0.3: dependencies: path-key: 3.1.1 shebang-command: 2.0.0 which: 2.0.2 - dev: true - /css-select@5.1.0: - resolution: {integrity: sha512-nwoRF1rvRRnnCqqY7updORDsuqKzqYJ28+oSMaJMMgOauh3fvwHqMS7EZpIPqK8GL+g9mKxF1vP/ZjSeNjEVHg==} + csstype@3.1.3: {} + + cytoscape-cose-bilkent@4.1.0(cytoscape@3.29.2): dependencies: - boolbase: 1.0.0 - css-what: 6.1.0 - domhandler: 5.0.3 - domutils: 3.1.0 - nth-check: 2.1.1 - dev: true + cose-base: 1.0.3 + cytoscape: 3.29.2 - /css-what@6.1.0: - resolution: {integrity: sha512-HTUrgRJ7r4dsZKU6GjmpfRK1O76h97Z8MfS1G0FozR+oF2kG6Vfe8JE6zwrkbxigziPHinCJ+gCPjA9EaBDtRw==} - engines: {node: '>= 6'} - dev: true + cytoscape-fcose@2.2.0(cytoscape@3.29.2): + dependencies: + cose-base: 2.2.0 + cytoscape: 3.29.2 + optional: true - /csstype@3.1.3: - resolution: {integrity: sha512-M1uQkMl8rQK/szD0LNhtqxIPLpimGm8sOBwU7lLnCpSbTyY3yeU1Vc7l4KT5zT4s/yOxHH5O7tIuuLOCnLADRw==} - dev: true + cytoscape@3.29.2: {} - /dayjs@1.11.10: - resolution: {integrity: sha512-vjAczensTgRcqDERK0SR2XMwsF/tSvnvlv6VcF2GIhg6Sx4yOIt/irsr1RDJsKiIyBzJDpCoXiWWq28MqH2cnQ==} - dev: true + d3-array@2.12.1: + dependencies: + internmap: 1.0.1 - /debug@4.3.4: - resolution: {integrity: sha512-PRWFHuSU3eDtQJPvnNY7Jcket1j0t5OuOsFzPPzsekD52Zl8qUfFIPEiswXqIvHWGVHOgX+7G/vCNNhehwxfkQ==} - engines: {node: '>=6.0'} - peerDependencies: - supports-color: '*' - peerDependenciesMeta: - supports-color: - optional: true + d3-array@3.2.4: dependencies: - ms: 2.1.2 - dev: true + internmap: 2.0.3 + + d3-axis@3.0.0: {} - /dom-serializer@2.0.0: - resolution: {integrity: sha512-wIkAryiqt/nV5EQKqQpo3SToSOV9J0DnbJqwK7Wv/Trc92zIAYZ4FlMu+JPFW1DfGFt81ZTCGgDEabffXeLyJg==} + d3-brush@3.0.0: dependencies: - domelementtype: 2.3.0 - domhandler: 5.0.3 - entities: 4.5.0 - dev: true + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-transition: 3.0.1(d3-selection@3.0.0) + + d3-chord@3.0.1: + dependencies: + d3-path: 3.1.0 - /domelementtype@2.3.0: - resolution: {integrity: sha512-OLETBj6w0OsagBwdXnPdN0cnMfF9opN69co+7ZrbfPGrdpPVNBUj02spi6B1N7wChLQiPn4CSH/zJvXw56gmHw==} - dev: true + d3-color@3.1.0: {} - /domhandler@5.0.3: - resolution: {integrity: sha512-cgwlv/1iFQiFnU96XXgROh8xTeetsnJiDsTc7TYCLFd9+/WNkIqPTxiM/8pSd8VIrhXGTf1Ny1q1hquVqDJB5w==} - engines: {node: '>= 4'} + d3-contour@4.0.2: dependencies: - domelementtype: 2.3.0 - dev: true + d3-array: 3.2.4 - /domutils@3.1.0: - resolution: {integrity: sha512-H78uMmQtI2AhgDJjWeQmHwJJ2bLPD3GMmO7Zja/ZZh84wkm+4ut+IUnUdRa8uCGX88DiVx1j6FRe1XfxEgjEZA==} + d3-delaunay@6.0.4: dependencies: - dom-serializer: 2.0.0 - domelementtype: 2.3.0 - domhandler: 5.0.3 - dev: true + delaunator: 5.0.1 - /eastasianwidth@0.2.0: - resolution: {integrity: sha512-I88TYZWc9XiYHRQ4/3c5rjjfgkjhLyW2luGIheGERbNQ6OY7yTybanSpDXZa8y7VUP9YmDcYa+eyq4ca7iLqWA==} - dev: true + d3-dispatch@3.0.1: {} - /electron-to-chromium@1.4.611: - resolution: {integrity: sha512-ZtRpDxrjHapOwxtv+nuth5ByB8clyn8crVynmRNGO3wG3LOp8RTcyZDqwaI6Ng6y8FCK2hVZmJoqwCskKbNMaw==} - dev: true + d3-drag@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-selection: 3.0.0 - /emoji-regex@10.3.0: - resolution: {integrity: sha512-QpLs9D9v9kArv4lfDEgg1X/gN5XLnf/A6l9cs8SPZLRZR3ZkY9+kwIQTxm+fsSej5UMYGE8fdoaZVIBlqG0XTw==} - dev: true + d3-dsv@3.0.1: + dependencies: + commander: 7.2.0 + iconv-lite: 0.6.3 + rw: 1.3.3 - /entities@3.0.1: - resolution: {integrity: sha512-WiyBqoomrwMdFG1e0kqvASYfnlb0lp8M5o5Fw2OFq1hNZxxcNk8Ik0Xm7LxzBhuidnZB/UtBqVCgUz3kBOP51Q==} - engines: {node: '>=0.12'} - dev: true + d3-ease@3.0.1: {} - /entities@4.5.0: - resolution: {integrity: sha512-V0hjH4dGPh9Ao5p0MoRY6BVqtwCjhz6vI5LT8AJ55H+4g9/4vbHx1I54fS0XuclLhDHArPQCiMjDxjaL8fPxhw==} - engines: {node: '>=0.12'} - dev: true + d3-fetch@3.0.1: + dependencies: + d3-dsv: 3.0.1 - /envinfo@7.11.0: - resolution: {integrity: sha512-G9/6xF1FPbIw0TtalAMaVPpiq2aDEuKLXM314jPVAO9r2fo2a4BLqMNkmRS7O/xPPZ+COAhGIz3ETvHEV3eUcg==} - engines: {node: '>=4'} - hasBin: true - dev: true + d3-force@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-quadtree: 3.0.1 + d3-timer: 3.0.1 - /esbuild@0.19.9: - resolution: {integrity: sha512-U9CHtKSy+EpPsEBa+/A2gMs/h3ylBC0H0KSqIg7tpztHerLi6nrrcoUJAkNCEPumx8yJ+Byic4BVwHgRbN0TBg==} - engines: {node: '>=12'} - hasBin: true - requiresBuild: true - optionalDependencies: - '@esbuild/android-arm': 0.19.9 - '@esbuild/android-arm64': 0.19.9 - '@esbuild/android-x64': 0.19.9 - '@esbuild/darwin-arm64': 0.19.9 - '@esbuild/darwin-x64': 0.19.9 - '@esbuild/freebsd-arm64': 0.19.9 - '@esbuild/freebsd-x64': 0.19.9 - '@esbuild/linux-arm': 0.19.9 - '@esbuild/linux-arm64': 0.19.9 - '@esbuild/linux-ia32': 0.19.9 - '@esbuild/linux-loong64': 0.19.9 - '@esbuild/linux-mips64el': 0.19.9 - '@esbuild/linux-ppc64': 0.19.9 - '@esbuild/linux-riscv64': 0.19.9 - '@esbuild/linux-s390x': 0.19.9 - '@esbuild/linux-x64': 0.19.9 - '@esbuild/netbsd-x64': 0.19.9 - '@esbuild/openbsd-x64': 0.19.9 - '@esbuild/sunos-x64': 0.19.9 - '@esbuild/win32-arm64': 0.19.9 - '@esbuild/win32-ia32': 0.19.9 - '@esbuild/win32-x64': 0.19.9 - dev: true - - /escalade@3.1.1: - resolution: {integrity: sha512-k0er2gUkLf8O0zKJiAhmkTnJlTvINGv7ygDNPbeIsX/TJjGJZHuh9B2UxbsaEkmlEo9MfhrSzmhIlhRlI2GXnw==} - engines: {node: '>=6'} - dev: true + d3-format@3.1.0: {} - /esprima@4.0.1: - resolution: {integrity: sha512-eGuFFw7Upda+g4p+QHvnW0RyTX/SVeJBDM/gCtMARO0cLuT2HcEKnTPvhjV6aGeqrCB/sbNop0Kszm0jsaWU4A==} - engines: {node: '>=4'} - hasBin: true - dev: true + d3-geo@3.1.1: + dependencies: + d3-array: 3.2.4 - /estree-walker@2.0.2: - resolution: {integrity: sha512-Rfkk/Mp/DL7JVje3u18FxFujQlTNR2q6QfMSMB7AvCBx91NGj/ba3kCfza0f6dVDbw7YlRf/nDrn7pQrCCyQ/w==} - dev: true + d3-hierarchy@3.1.2: {} - /execa@8.0.1: - resolution: {integrity: sha512-VyhnebXciFV2DESc+p6B+y0LjSm0krU4OgJN44qFAhBY0TJ+1V61tYD2+wHusZ6F9n5K+vl8k0sTy7PEfV4qpg==} - engines: {node: '>=16.17'} + d3-interpolate@3.0.1: dependencies: - cross-spawn: 7.0.3 - get-stream: 8.0.1 - human-signals: 5.0.0 - is-stream: 3.0.0 - merge-stream: 2.0.0 - npm-run-path: 5.1.0 - onetime: 6.0.0 - signal-exit: 4.1.0 - strip-final-newline: 3.0.0 - dev: true + d3-color: 3.1.0 - /extend-shallow@2.0.1: - resolution: {integrity: sha512-zCnTtlxNoAiDc3gqY2aYAWFx7XWWiasuF2K8Me5WbN8otHKTUKBwjPtNpRs/rbUZm7KxWAaNj7P1a/p52GbVug==} - engines: {node: '>=0.10.0'} + d3-path@1.0.9: {} + + d3-path@3.1.0: {} + + d3-polygon@3.0.1: {} + + d3-quadtree@3.0.1: {} + + d3-random@3.0.1: {} + + d3-sankey@0.12.3: dependencies: - is-extendable: 0.1.1 - dev: true + d3-array: 2.12.1 + d3-shape: 1.3.7 - /fast-glob@3.3.2: - resolution: {integrity: sha512-oX2ruAFQwf/Orj8m737Y5adxDQO0LAB7/S5MnxCdTNDd4p6BsyIVsv9JQsATbTSq8KHRpLwIHbVlUNatxd+1Ow==} - engines: {node: '>=8.6.0'} + d3-scale-chromatic@3.1.0: + dependencies: + d3-color: 3.1.0 + d3-interpolate: 3.0.1 + + d3-scale@4.0.2: + dependencies: + d3-array: 3.2.4 + d3-format: 3.1.0 + d3-interpolate: 3.0.1 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + + d3-selection@3.0.0: {} + + d3-shape@1.3.7: + dependencies: + d3-path: 1.0.9 + + d3-shape@3.2.0: + dependencies: + d3-path: 3.1.0 + + d3-time-format@4.1.0: + dependencies: + d3-time: 3.1.0 + + d3-time@3.1.0: + dependencies: + d3-array: 3.2.4 + + d3-timer@3.0.1: {} + + d3-transition@3.0.1(d3-selection@3.0.0): + dependencies: + d3-color: 3.1.0 + d3-dispatch: 3.0.1 + d3-ease: 3.0.1 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-timer: 3.0.1 + + d3-zoom@3.0.0: + dependencies: + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-interpolate: 3.0.1 + d3-selection: 3.0.0 + d3-transition: 3.0.1(d3-selection@3.0.0) + + d3@7.9.0: + dependencies: + d3-array: 3.2.4 + d3-axis: 3.0.0 + d3-brush: 3.0.0 + d3-chord: 3.0.1 + d3-color: 3.1.0 + d3-contour: 4.0.2 + d3-delaunay: 6.0.4 + d3-dispatch: 3.0.1 + d3-drag: 3.0.0 + d3-dsv: 3.0.1 + d3-ease: 3.0.1 + d3-fetch: 3.0.1 + d3-force: 3.0.0 + d3-format: 3.1.0 + d3-geo: 3.1.1 + d3-hierarchy: 3.1.2 + d3-interpolate: 3.0.1 + d3-path: 3.1.0 + d3-polygon: 3.0.1 + d3-quadtree: 3.0.1 + d3-random: 3.0.1 + d3-scale: 4.0.2 + d3-scale-chromatic: 3.1.0 + d3-selection: 3.0.0 + d3-shape: 3.2.0 + d3-time: 3.1.0 + d3-time-format: 4.1.0 + d3-timer: 3.0.1 + d3-transition: 3.0.1(d3-selection@3.0.0) + d3-zoom: 3.0.0 + + dagre-d3-es@7.0.10: + dependencies: + d3: 7.9.0 + lodash-es: 4.17.21 + + dayjs@1.11.11: {} + + debug@4.3.5: + dependencies: + ms: 2.1.2 + + decode-named-character-reference@1.0.2: + dependencies: + character-entities: 2.0.2 + + deep-extend@0.6.0: {} + + delaunator@5.0.1: + dependencies: + robust-predicates: 3.0.2 + + dequal@2.0.3: {} + + diff@5.2.0: {} + + dompurify@3.1.5: {} + + eastasianwidth@0.2.0: {} + + elkjs@0.9.3: {} + + emoji-regex@8.0.0: {} + + emoji-regex@9.2.2: {} + + entities@4.5.0: {} + + esbuild@0.20.2: + optionalDependencies: + '@esbuild/aix-ppc64': 0.20.2 + '@esbuild/android-arm': 0.20.2 + '@esbuild/android-arm64': 0.20.2 + '@esbuild/android-x64': 0.20.2 + '@esbuild/darwin-arm64': 0.20.2 + '@esbuild/darwin-x64': 0.20.2 + '@esbuild/freebsd-arm64': 0.20.2 + '@esbuild/freebsd-x64': 0.20.2 + '@esbuild/linux-arm': 0.20.2 + '@esbuild/linux-arm64': 0.20.2 + '@esbuild/linux-ia32': 0.20.2 + '@esbuild/linux-loong64': 0.20.2 + '@esbuild/linux-mips64el': 0.20.2 + '@esbuild/linux-ppc64': 0.20.2 + '@esbuild/linux-riscv64': 0.20.2 + '@esbuild/linux-s390x': 0.20.2 + '@esbuild/linux-x64': 0.20.2 + '@esbuild/netbsd-x64': 0.20.2 + '@esbuild/openbsd-x64': 0.20.2 + '@esbuild/sunos-x64': 0.20.2 + '@esbuild/win32-arm64': 0.20.2 + '@esbuild/win32-ia32': 0.20.2 + '@esbuild/win32-x64': 0.20.2 + + estree-walker@2.0.2: {} + + fast-glob@3.3.1: dependencies: '@nodelib/fs.stat': 2.0.5 '@nodelib/fs.walk': 1.2.8 glob-parent: 5.1.2 merge2: 1.4.1 - micromatch: 4.0.5 - dev: true + micromatch: 4.0.7 - /fastq@1.15.0: - resolution: {integrity: sha512-wBrocU2LCXXa+lWBt8RoIRD89Fi8OdABODa/kEnyeyjS5aZO5/GNvI5sEINADqP/h8M29UHTHUb53sUu5Ihqdw==} + fastq@1.17.1: dependencies: reusify: 1.0.4 - dev: true - - /fflate@0.8.1: - resolution: {integrity: sha512-/exOvEuc+/iaUm105QIiOt4LpBdMTWsXxqR0HDF35vx3fmaKzw7354gTilCh5rkzEt8WYyG//ku3h3nRmd7CHQ==} - dev: true - /fill-range@7.0.1: - resolution: {integrity: sha512-qOo9F+dMUmC2Lcb4BbVvnKJxTPjCm+RRpe4gDuGrzkL7mEVl/djYSu2OdQ2Pa302N4oqkSg9ir6jaLWJ2USVpQ==} - engines: {node: '>=8'} + fill-range@7.1.1: dependencies: to-regex-range: 5.0.1 - dev: true - /fraction.js@4.3.7: - resolution: {integrity: sha512-ZsDfxO51wGAXREY55a7la9LScWpwv9RxIrYABrlvOFBlH/ShPnrtsXeuUIfXKKOVicNxQ+o8JTbJvjS4M89yew==} - dev: true + focus-trap@7.5.4: + dependencies: + tabbable: 6.2.0 - /fs-extra@11.2.0: - resolution: {integrity: sha512-PmDi3uwK5nFuXh7XDTlVnS17xJS7vW36is2+w3xcv8SVxiB4NyATf4ctkVY5bkSjX0Y4nbvZCq1/EjtEyr9ktw==} - engines: {node: '>=14.14'} + foreground-child@3.1.1: dependencies: - graceful-fs: 4.2.11 - jsonfile: 6.1.0 - universalify: 2.0.1 - dev: true + cross-spawn: 7.0.3 + signal-exit: 4.1.0 - /fsevents@2.3.3: - resolution: {integrity: sha512-5xoDfX+fL7faATnagmWPpbFtwh/R77WmMMqqHGS65C3vvB0YHrgF+B1YmZ3441tMj5n63k0212XNoJwzlhffQw==} - engines: {node: ^8.16.0 || ^10.6.0 || >=11.0.0} - os: [darwin] - requiresBuild: true - dev: true + fsevents@2.3.2: optional: true - /get-stream@8.0.1: - resolution: {integrity: sha512-VaUJspBffn/LMCJVoMvSAdmscJyS1auj5Zulnn5UoYcY531UWmdwhRWkcGKnGU93m5HSXP9LP2usOryrBtQowA==} - engines: {node: '>=16'} - dev: true + fsevents@2.3.3: + optional: true - /glob-parent@5.1.2: - resolution: {integrity: sha512-AOIgSQCepiJYwP3ARnGx+5VnTu2HBYdzbGP45eLw1vr3zB3vZLeyed1sC9hnbcOc9/SrMyM5RPQrkGz4aS9Zow==} - engines: {node: '>= 6'} + get-stdin@9.0.0: {} + + glob-parent@5.1.2: dependencies: is-glob: 4.0.3 - dev: true - /globby@14.0.0: - resolution: {integrity: sha512-/1WM/LNHRAOH9lZta77uGbq0dAEQM+XjNesWwhlERDVenqothRbnzTrL3/LrIoEPPjeUHC3vrS6TwoyxeHs7MQ==} - engines: {node: '>=18'} + glob@10.3.16: dependencies: - '@sindresorhus/merge-streams': 1.0.0 - fast-glob: 3.3.2 - ignore: 5.3.0 - path-type: 5.0.0 - slash: 5.1.0 - unicorn-magic: 0.1.0 - dev: true + foreground-child: 3.1.1 + jackspeak: 3.1.2 + minimatch: 9.0.4 + minipass: 7.1.1 + path-scurry: 1.11.1 - /graceful-fs@4.2.11: - resolution: {integrity: sha512-RbJ5/jmFcNNCcDV5o9eTnBLJ/HszWV0P73bc+Ff4nS/rJj+YaS6IGyiOL0VoBYX+l1Wrl3k63h/KrH+nhJ0XvQ==} - dev: true + hookable@5.5.3: {} - /gray-matter@4.0.3: - resolution: {integrity: sha512-5v6yZd4JK3eMI3FqqCouswVqwugaA9r4dNZB1wwcmrD02QkV5H0y7XBQW8QwQqEaZY1pM9aqORSORhJRdNK44Q==} - engines: {node: '>=6.0'} + iconv-lite@0.6.3: + dependencies: + safer-buffer: 2.1.2 + + ignore@5.3.1: {} + + immutable@4.3.6: + optional: true + + ini@4.1.3: {} + + internmap@1.0.1: {} + + internmap@2.0.3: {} + + is-binary-path@2.1.0: + dependencies: + binary-extensions: 2.3.0 + optional: true + + is-extglob@2.1.1: {} + + is-fullwidth-code-point@3.0.0: {} + + is-glob@4.0.3: + dependencies: + is-extglob: 2.1.1 + + is-number@7.0.0: {} + + isexe@2.0.0: {} + + jackspeak@3.1.2: + dependencies: + '@isaacs/cliui': 8.0.2 + optionalDependencies: + '@pkgjs/parseargs': 0.11.0 + + js-yaml@4.1.0: + dependencies: + argparse: 2.0.1 + + jsonc-parser@3.2.1: {} + + jsonpointer@5.0.1: {} + + katex@0.16.10: dependencies: - js-yaml: 3.14.1 - kind-of: 6.0.3 - section-matter: 1.0.0 - strip-bom-string: 1.0.0 - dev: true + commander: 8.3.0 - /hash-sum@2.0.0: - resolution: {integrity: sha512-WdZTbAByD+pHfl/g9QSsBIIwy8IT+EsPiKDs0KNX+zSHhdDLFKdZu0BQHljvO+0QI/BasbMSUa8wYNCZTvhslg==} - dev: true + khroma@2.1.0: {} - /htmlparser2@8.0.2: - resolution: {integrity: sha512-GYdjWKDkbRLkZ5geuHs5NY1puJ+PXwP7+fHPRz06Eirsb9ugf6d8kkXav6ADhcODhFFPMIXyxkxSuMf3D6NCFA==} - dependencies: - domelementtype: 2.3.0 - domhandler: 5.0.3 - domutils: 3.1.0 - entities: 4.5.0 - dev: true + kleur@4.1.5: {} - /human-signals@5.0.0: - resolution: {integrity: sha512-AXcZb6vzzrFAUE61HnN4mpLqd/cSIwNQjtNWR0euPm6y0iqx3G4gOXaIDdtdDwZmhwe82LA6+zinmW4UBWVePQ==} - engines: {node: '>=16.17.0'} - dev: true + layout-base@1.0.2: {} - /ieee754@1.2.1: - resolution: {integrity: sha512-dcyqhDvX1C46lXZcVqCpK+FtMRQVdIMN6/Df5js2zouUsqG7I6sFxitIC+7KYK29KdXOLHdu9zL4sFnoVQnqaA==} - dev: true + layout-base@2.0.1: + optional: true - /ignore@5.3.0: - resolution: {integrity: sha512-g7dmpshy+gD7mh88OC9NwSGTKoc3kyLAZQRU1mt53Aw/vnvfXnbC+F/7F7QoYVKbV+KNvJx8wArewKy1vXMtlg==} - engines: {node: '>= 4'} - dev: true + linkify-it@5.0.0: + dependencies: + uc.micro: 2.1.0 - /immutable@4.3.4: - resolution: {integrity: sha512-fsXeu4J4i6WNWSikpI88v/PcVflZz+6kMhUfIwc5SY+poQRPnaf5V7qds6SUyUN3cVxEzuCab7QIoLOQ+DQ1wA==} - dev: true + lodash-es@4.17.21: {} - /inherits@2.0.4: - resolution: {integrity: sha512-k/vGaX4/Yla3WzyMCvTQOXYeIHvqOKtnqBduzTHpzpQZzAskKMhZ2K+EnBiSM9zGSoIFeMpXKxa4dYeZIQqewQ==} - dev: true + lru-cache@10.2.2: {} - /is-binary-path@2.1.0: - resolution: {integrity: sha512-ZMERYes6pDydyuGidse7OsHxtbI7WVeUEozgR/g7rd0xUimYNlvZRE/K2MgZTjWy725IfelLeVcEM97mmtRGXw==} - engines: {node: '>=8'} + magic-string@0.30.10: dependencies: - binary-extensions: 2.2.0 - dev: true + '@jridgewell/sourcemap-codec': 1.4.15 - /is-extendable@0.1.1: - resolution: {integrity: sha512-5BMULNob1vgFX6EjQw5izWDxrecWK9AM72rugNr0TFldMOi0fj6Jk+zeKIt0xGj4cEfQIJth4w3OKWOJ4f+AFw==} - engines: {node: '>=0.10.0'} - dev: true + mark.js@8.11.1: {} - /is-extglob@2.1.1: - resolution: {integrity: sha512-SbKbANkN603Vi4jEZv49LeVJMn4yGwsbzZworEoyEiutsN3nJYdbO36zfhGJ6QEDpOZIFkDtnq5JRxmvl3jsoQ==} - engines: {node: '>=0.10.0'} - dev: true + markdown-it-footnote@4.0.0: {} - /is-glob@4.0.3: - resolution: {integrity: sha512-xelSayHH36ZgE7ZWhli7pW34hNbNl8Ojv5KVmkJD4hBdD3th8Tfk9vYasLM+mXWOZhFkgZfxhLSnrwRr4elSSg==} - engines: {node: '>=0.10.0'} + markdown-it@14.1.0: dependencies: - is-extglob: 2.1.1 - dev: true - - /is-interactive@2.0.0: - resolution: {integrity: sha512-qP1vozQRI+BMOPcjFzrjXuQvdak2pHNUMZoeG2eRbiSqyvbEf/wQtEOTOX1guk6E3t36RkaqiSt8A/6YElNxLQ==} - engines: {node: '>=12'} - dev: true + argparse: 2.0.1 + entities: 4.5.0 + linkify-it: 5.0.0 + mdurl: 2.0.0 + punycode.js: 2.3.1 + uc.micro: 2.1.0 + + markdownlint-cli@0.40.0: + dependencies: + commander: 12.0.0 + get-stdin: 9.0.0 + glob: 10.3.16 + ignore: 5.3.1 + js-yaml: 4.1.0 + jsonc-parser: 3.2.1 + jsonpointer: 5.0.1 + markdownlint: 0.34.0 + minimatch: 9.0.4 + run-con: 1.3.2 + toml: 3.0.0 + + markdownlint-micromark@0.1.9: {} + + markdownlint@0.34.0: + dependencies: + markdown-it: 14.1.0 + markdownlint-micromark: 0.1.9 + + mdast-util-from-markdown@1.3.1: + dependencies: + '@types/mdast': 3.0.15 + '@types/unist': 2.0.10 + decode-named-character-reference: 1.0.2 + mdast-util-to-string: 3.2.0 + micromark: 3.2.0 + micromark-util-decode-numeric-character-reference: 1.1.0 + micromark-util-decode-string: 1.1.0 + micromark-util-normalize-identifier: 1.1.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 + unist-util-stringify-position: 3.0.3 + uvu: 0.5.6 + transitivePeerDependencies: + - supports-color - /is-number@7.0.0: - resolution: {integrity: sha512-41Cifkg6e8TylSpdtTpeLVMqvSBEVzTttHvERD741+pnZ8ANv0004MRL43QKPDlK9cGvNp6NZWZUBlbGXYxxng==} - engines: {node: '>=0.12.0'} - dev: true + mdast-util-to-string@3.2.0: + dependencies: + '@types/mdast': 3.0.15 + + mdurl@2.0.0: {} + + merge2@1.4.1: {} + + mermaid@10.9.1: + dependencies: + '@braintree/sanitize-url': 6.0.4 + '@types/d3-scale': 4.0.8 + '@types/d3-scale-chromatic': 3.0.3 + cytoscape: 3.29.2 + cytoscape-cose-bilkent: 4.1.0(cytoscape@3.29.2) + d3: 7.9.0 + d3-sankey: 0.12.3 + dagre-d3-es: 7.0.10 + dayjs: 1.11.11 + dompurify: 3.1.5 + elkjs: 0.9.3 + katex: 0.16.10 + khroma: 2.1.0 + lodash-es: 4.17.21 + mdast-util-from-markdown: 1.3.1 + non-layered-tidy-tree-layout: 2.0.2 + stylis: 4.3.2 + ts-dedent: 2.2.0 + uuid: 9.0.1 + web-worker: 1.3.0 + transitivePeerDependencies: + - supports-color - /is-stream@3.0.0: - resolution: {integrity: sha512-LnQR4bZ9IADDRSkvpqMGvt/tEJWclzklNgSw48V5EAaAeDd6qGvN8ei6k5p0tvxSR171VmGyHuTiAOfxAbr8kA==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dev: true + micromark-core-commonmark@1.1.0: + dependencies: + decode-named-character-reference: 1.0.2 + micromark-factory-destination: 1.1.0 + micromark-factory-label: 1.1.0 + micromark-factory-space: 1.1.0 + micromark-factory-title: 1.1.0 + micromark-factory-whitespace: 1.1.0 + micromark-util-character: 1.2.0 + micromark-util-chunked: 1.1.0 + micromark-util-classify-character: 1.1.0 + micromark-util-html-tag-name: 1.2.0 + micromark-util-normalize-identifier: 1.1.0 + micromark-util-resolve-all: 1.1.0 + micromark-util-subtokenize: 1.1.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 + uvu: 0.5.6 - /is-unicode-supported@1.3.0: - resolution: {integrity: sha512-43r2mRvz+8JRIKnWJ+3j8JtjRKZ6GmjzfaE/qiBJnikNnYv/6bagRJ1kUhNk8R5EX/GkobD+r+sfxCPJsiKBLQ==} - engines: {node: '>=12'} - dev: true + micromark-factory-destination@1.1.0: + dependencies: + micromark-util-character: 1.2.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 - /isexe@2.0.0: - resolution: {integrity: sha512-RHxMLp9lnKHGHRng9QFhRCMbYAcVpn69smSGcq3f36xjgVVWThj4qqLbTLlq7Ssj8B+fIQ1EuCEGI2lKsyQeIw==} - dev: true + micromark-factory-label@1.1.0: + dependencies: + micromark-util-character: 1.2.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 + uvu: 0.5.6 - /js-yaml@3.14.1: - resolution: {integrity: sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==} - hasBin: true + micromark-factory-space@1.1.0: dependencies: - argparse: 1.0.10 - esprima: 4.0.1 - dev: true + micromark-util-character: 1.2.0 + micromark-util-types: 1.1.0 - /jsonfile@6.1.0: - resolution: {integrity: sha512-5dgndWOriYSm5cnYaJNhalLNDKOqFwyDB/rr1E9ZsGciGvKPs8R2xYGCacuf3z6K1YKDz182fd+fY3cn3pMqXQ==} + micromark-factory-title@1.1.0: dependencies: - universalify: 2.0.1 - optionalDependencies: - graceful-fs: 4.2.11 - dev: true + micromark-factory-space: 1.1.0 + micromark-util-character: 1.2.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 - /kind-of@6.0.3: - resolution: {integrity: sha512-dcS1ul+9tmeD95T+x28/ehLgd9mENa3LsvDTtzm3vyBEO7RPptvAD+t44WVXaUjTBRcrpFeFlC8WCruUR456hw==} - engines: {node: '>=0.10.0'} - dev: true + micromark-factory-whitespace@1.1.0: + dependencies: + micromark-factory-space: 1.1.0 + micromark-util-character: 1.2.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 - /lilconfig@3.0.0: - resolution: {integrity: sha512-K2U4W2Ff5ibV7j7ydLr+zLAkIg5JJ4lPn1Ltsdt+Tz/IjQ8buJ55pZAxoP34lqIiwtF9iAvtLv3JGv7CAyAg+g==} - engines: {node: '>=14'} - dev: true + micromark-util-character@1.2.0: + dependencies: + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 - /linkify-it@4.0.1: - resolution: {integrity: sha512-C7bfi1UZmoj8+PQx22XyeXCuBlokoyWQL5pWSP+EI6nzRylyThouddufc2c1NDIcP9k5agmN9fLpA7VNJfIiqw==} + micromark-util-chunked@1.1.0: dependencies: - uc.micro: 1.0.6 - dev: true + micromark-util-symbol: 1.1.0 - /log-symbols@5.1.0: - resolution: {integrity: sha512-l0x2DvrW294C9uDCoQe1VSU4gf529FkSZ6leBl4TiqZH/e+0R7hSfHQBNut2mNygDgHwvYHfFLn6Oxb3VWj2rA==} - engines: {node: '>=12'} + micromark-util-classify-character@1.1.0: dependencies: - chalk: 5.3.0 - is-unicode-supported: 1.3.0 - dev: true + micromark-util-character: 1.2.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 - /lru-cache@6.0.0: - resolution: {integrity: sha512-Jo6dJ04CmSjuznwJSS3pUeWmd/H0ffTlkXXgwZi+eq1UCmqQwCh+eLsYOYCwY991i2Fah4h1BEMCx4qThGbsiA==} - engines: {node: '>=10'} + micromark-util-combine-extensions@1.1.0: dependencies: - yallist: 4.0.0 - dev: true + micromark-util-chunked: 1.1.0 + micromark-util-types: 1.1.0 - /magic-string@0.30.5: - resolution: {integrity: sha512-7xlpfBaQaP/T6Vh8MO/EqXSW5En6INHEvEXQiuff7Gku0PWjU3uf6w/j9o7O+SpB5fOAkrI5HeoNgwjEO0pFsA==} - engines: {node: '>=12'} + micromark-util-decode-numeric-character-reference@1.1.0: dependencies: - '@jridgewell/sourcemap-codec': 1.4.15 - dev: true + micromark-util-symbol: 1.1.0 - /markdown-it-anchor@8.6.7(@types/markdown-it@13.0.7)(markdown-it@13.0.2): - resolution: {integrity: sha512-FlCHFwNnutLgVTflOYHPW2pPcl2AACqVzExlkGQNsi4CJgqOHN7YTgDd4LuhgN1BFO3TS0vLAruV1Td6dwWPJA==} - peerDependencies: - '@types/markdown-it': '*' - markdown-it: '*' + micromark-util-decode-string@1.1.0: dependencies: - '@types/markdown-it': 13.0.7 - markdown-it: 13.0.2 - dev: true + decode-named-character-reference: 1.0.2 + micromark-util-character: 1.2.0 + micromark-util-decode-numeric-character-reference: 1.1.0 + micromark-util-symbol: 1.1.0 - /markdown-it-container@3.0.0: - resolution: {integrity: sha512-y6oKTq4BB9OQuY/KLfk/O3ysFhB3IMYoIWhGJEidXt1NQFocFK2sA2t0NYZAMyMShAGL6x5OPIbrmXPIqaN9rw==} - dev: true + micromark-util-encode@1.1.0: {} - /markdown-it-emoji@2.0.2: - resolution: {integrity: sha512-zLftSaNrKuYl0kR5zm4gxXjHaOI3FAOEaloKmRA5hijmJZvSjmxcokOLlzycb/HXlUFWzXqpIEoyEMCE4i9MvQ==} - dev: true + micromark-util-html-tag-name@1.2.0: {} - /markdown-it@13.0.2: - resolution: {integrity: sha512-FtwnEuuK+2yVU7goGn/MJ0WBZMM9ZPgU9spqlFs7/A/pDIUNSOQZhUgOqYCficIuR2QaFnrt8LHqBWsbTAoI5w==} - hasBin: true + micromark-util-normalize-identifier@1.1.0: dependencies: - argparse: 2.0.1 - entities: 3.0.1 - linkify-it: 4.0.1 - mdurl: 1.0.1 - uc.micro: 1.0.6 - dev: true + micromark-util-symbol: 1.1.0 - /mdurl@1.0.1: - resolution: {integrity: sha512-/sKlQJCBYVY9Ers9hqzKou4H6V5UWc/M59TH2dvkt+84itfnq7uFOMLpOiOS4ujvHP4etln18fmIxA5R5fll0g==} - dev: true + micromark-util-resolve-all@1.1.0: + dependencies: + micromark-util-types: 1.1.0 - /medium-zoom@1.1.0: - resolution: {integrity: sha512-ewyDsp7k4InCUp3jRmwHBRFGyjBimKps/AJLjRSox+2q/2H4p/PNpQf+pwONWlJiOudkBXtbdmVbFjqyybfTmQ==} - dev: true + micromark-util-sanitize-uri@1.2.0: + dependencies: + micromark-util-character: 1.2.0 + micromark-util-encode: 1.1.0 + micromark-util-symbol: 1.1.0 - /merge-stream@2.0.0: - resolution: {integrity: sha512-abv/qOcuPfk3URPfDzmZU1LKmuw8kT+0nIHvKrKgFrwifol/doWcdA4ZqsWQ8ENrFKkd67Mfpo/LovbIUsbt3w==} - dev: true + micromark-util-subtokenize@1.1.0: + dependencies: + micromark-util-chunked: 1.1.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 + uvu: 0.5.6 - /merge2@1.4.1: - resolution: {integrity: sha512-8q7VEgMJW4J8tcfVPy8g09NcQwZdbwFEqhe/WZkoIzjn/3TGDwtOCYtXGxA3O8tPzpczCCDgv+P2P5y00ZJOOg==} - engines: {node: '>= 8'} - dev: true + micromark-util-symbol@1.1.0: {} - /micromatch@4.0.5: - resolution: {integrity: sha512-DMy+ERcEW2q8Z2Po+WNXuw3c5YaUSFjAO5GsJqfEl7UjvtIuFKO6ZrKvcItdy98dwFI2N1tg3zNIdKaQT+aNdA==} - engines: {node: '>=8.6'} - dependencies: - braces: 3.0.2 - picomatch: 2.3.1 - dev: true + micromark-util-types@1.1.0: {} - /mimic-fn@2.1.0: - resolution: {integrity: sha512-OqbOk5oEQeAZ8WXWydlu9HJjz9WVdEIvamMCcXmuqUYjTknH/sqsWvhQ3vgwKFRR1HpjvNBKQ37nbJgYzGqGcg==} - engines: {node: '>=6'} - dev: true + micromark@3.2.0: + dependencies: + '@types/debug': 4.1.12 + debug: 4.3.5 + decode-named-character-reference: 1.0.2 + micromark-core-commonmark: 1.1.0 + micromark-factory-space: 1.1.0 + micromark-util-character: 1.2.0 + micromark-util-chunked: 1.1.0 + micromark-util-combine-extensions: 1.1.0 + micromark-util-decode-numeric-character-reference: 1.1.0 + micromark-util-encode: 1.1.0 + micromark-util-normalize-identifier: 1.1.0 + micromark-util-resolve-all: 1.1.0 + micromark-util-sanitize-uri: 1.2.0 + micromark-util-subtokenize: 1.1.0 + micromark-util-symbol: 1.1.0 + micromark-util-types: 1.1.0 + uvu: 0.5.6 + transitivePeerDependencies: + - supports-color - /mimic-fn@4.0.0: - resolution: {integrity: sha512-vqiC06CuhBTUdZH+RYl8sFrL096vA45Ok5ISO6sE/Mr1jRbGH4Csnhi8f3wKVl7x8mO4Au7Ir9D3Oyv1VYMFJw==} - engines: {node: '>=12'} - dev: true + micromatch@4.0.7: + dependencies: + braces: 3.0.3 + picomatch: 2.3.1 - /ms@2.1.2: - resolution: {integrity: sha512-sGkPx+VjMtmA6MX27oA4FBFELFCZZ4S4XqeGOXCv68tT+jb3vk/RyaKWP0PTKyWtmLSM0b+adUTEvbs1PEaH2w==} - dev: true + minimatch@9.0.4: + dependencies: + brace-expansion: 2.0.1 - /nanoid@3.3.7: - resolution: {integrity: sha512-eSRppjcPIatRIMC1U6UngP8XFcz8MQWGQdt1MTBQ7NaAmvXDfvNxbvWV3x2y6CdEUciCSsDHDQZbhYaB8QEo2g==} - engines: {node: ^10 || ^12 || ^13.7 || ^14 || >=15.0.1} - hasBin: true - dev: true + minimist@1.2.8: {} - /node-releases@2.0.14: - resolution: {integrity: sha512-y10wOWt8yZpqXmOgRo77WaHEmhYQYGNA6y421PKsKYWEK8aW+cqAphborZDhqfyKrbZEN92CN1X2KbafY2s7Yw==} - dev: true + minipass@7.1.1: {} - /normalize-path@3.0.0: - resolution: {integrity: sha512-6eZs5Ls3WtCisHWp9S2GUy8dqkpGi4BVSz3GaqiE6ezub0512ESztXUwUB6C6IKbQkY2Pnb/mD4WYojCRwcwLA==} - engines: {node: '>=0.10.0'} - dev: true + minisearch@6.3.0: {} - /normalize-range@0.1.2: - resolution: {integrity: sha512-bdok/XvKII3nUpklnV6P2hxtMNrCboOjAcyBuQnWEhO665FwrSNRxU+AqpsyvO6LgGYPspN+lu5CLtw4jPRKNA==} - engines: {node: '>=0.10.0'} - dev: true + mitt@3.0.1: {} - /npm-run-path@5.1.0: - resolution: {integrity: sha512-sJOdmRGrY2sjNTRMbSvluQqg+8X7ZK61yvzBEIDhz4f8z1TZFYABsqjjCBd/0PUNE9M6QDgHJXQkGUEm7Q+l9Q==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - path-key: 4.0.0 - dev: true + mri@1.2.0: {} - /nth-check@2.1.1: - resolution: {integrity: sha512-lqjrjmaOoAnWfMmBPL+XNnynZh2+swxiX3WUE0s4yEHI6m+AwrK2UZOimIRl3X/4QctVqS8AiZjFqyOGrMXb/w==} - dependencies: - boolbase: 1.0.0 - dev: true + ms@2.1.2: {} - /onetime@5.1.2: - resolution: {integrity: sha512-kbpaSSGJTWdAY5KPVeMOKXSrPtr8C8C7wodJbcsd51jRnmD+GZu8Y0VoU6Dm5Z4vWr0Ig/1NKuWRKf7j5aaYSg==} - engines: {node: '>=6'} - dependencies: - mimic-fn: 2.1.0 - dev: true + nanoid@3.3.7: {} - /onetime@6.0.0: - resolution: {integrity: sha512-1FlR+gjXK7X+AsAHso35MnyN5KqGwJRi/31ft6x0M194ht7S+rWAvd7PHss9xSKMzE0asv1pyIHaJYq+BbacAQ==} - engines: {node: '>=12'} - dependencies: - mimic-fn: 4.0.0 - dev: true + non-layered-tidy-tree-layout@2.0.2: {} - /ora@7.0.1: - resolution: {integrity: sha512-0TUxTiFJWv+JnjWm4o9yvuskpEJLXTcng8MJuKd+SzAzp2o+OP3HWqNhB4OdJRt1Vsd9/mR0oyaEYlOnL7XIRw==} - engines: {node: '>=16'} - dependencies: - chalk: 5.3.0 - cli-cursor: 4.0.0 - cli-spinners: 2.9.2 - is-interactive: 2.0.0 - is-unicode-supported: 1.3.0 - log-symbols: 5.1.0 - stdin-discarder: 0.1.0 - string-width: 6.1.0 - strip-ansi: 7.1.0 - dev: true + normalize-path@3.0.0: + optional: true - /parse5-htmlparser2-tree-adapter@7.0.0: - resolution: {integrity: sha512-B77tOZrqqfUfnVcOrUvfdLbz4pu4RopLD/4vmu3HUPswwTA8OH0EMW9BlWR2B0RCoiZRAHEUu7IxeP1Pd1UU+g==} - dependencies: - domhandler: 5.0.3 - parse5: 7.1.2 - dev: true + path-key@3.1.1: {} - /parse5@7.1.2: - resolution: {integrity: sha512-Czj1WaSVpaoj0wbhMzLmWD69anp2WH7FXMB9n1Sy8/ZFF9jolSQVMu1Ij5WIyGmcBmhk7EOndpO4mIpihVqAXw==} + path-scurry@1.11.1: dependencies: - entities: 4.5.0 - dev: true - - /path-key@3.1.1: - resolution: {integrity: sha512-ojmeN0qd+y0jszEtoY48r0Peq5dwMEkIlCOu6Q5f41lfkswXuKtYrhgoTpLnyIcHm24Uhqx+5Tqm2InSwLhE6Q==} - engines: {node: '>=8'} - dev: true + lru-cache: 10.2.2 + minipass: 7.1.1 - /path-key@4.0.0: - resolution: {integrity: sha512-haREypq7xkM7ErfgIyA0z+Bj4AGKlMSdlQE2jvJo6huWD1EdkKYV+G/T4nq0YEF2vgTT8kqMFKo1uHn950r4SQ==} - engines: {node: '>=12'} - dev: true + perfect-debounce@1.0.0: {} - /path-type@5.0.0: - resolution: {integrity: sha512-5HviZNaZcfqP95rwpv+1HDgUamezbqdSYTyzjTvwtJSnIH+3vnbmWsItli8OFEndS984VT55M3jduxZbX351gg==} - engines: {node: '>=12'} - dev: true + picocolors@1.0.1: {} - /picocolors@1.0.0: - resolution: {integrity: sha512-1fygroTLlHu66zi26VoTDv8yRgm0Fccecssto+MhsZ0D/DGW2sm8E8AjW7NU5VVTRt5GxbeZ5qBuJr+HyLYkjQ==} - dev: true + picomatch@2.3.1: {} - /picomatch@2.3.1: - resolution: {integrity: sha512-JU3teHTNjmE2VCGFzuY8EXzCDVwEqB2a8fsIvwaStHhAWJEeVd1o1QD80CU6+ZdEXXSLbSsuLwJjkCBWqRQUVA==} - engines: {node: '>=8.6'} - dev: true + playwright-core@1.44.0: {} - /postcss-load-config@4.0.2(postcss@8.4.32): - resolution: {integrity: sha512-bSVhyJGL00wMVoPUzAVAnbEoWyqRxkjv64tUl427SKnPrENtq6hJwUojroMz2VB+Q1edmi4IfrAPpami5VVgMQ==} - engines: {node: '>= 14'} - peerDependencies: - postcss: '>=8.0.9' - ts-node: '>=9.0.0' - peerDependenciesMeta: - postcss: - optional: true - ts-node: - optional: true + playwright@1.44.0: dependencies: - lilconfig: 3.0.0 - postcss: 8.4.32 - yaml: 2.3.4 - dev: true - - /postcss-value-parser@4.2.0: - resolution: {integrity: sha512-1NNCs6uurfkVbeXG4S8JFT9t19m45ICnif8zWLd5oPSZ50QnwMfK+H3jv408d4jw/7Bttv5axS5IiHoLaVNHeQ==} - dev: true + playwright-core: 1.44.0 + optionalDependencies: + fsevents: 2.3.2 - /postcss@8.4.32: - resolution: {integrity: sha512-D/kj5JNu6oo2EIy+XL/26JEDTlIbB8hw85G8StOE6L74RQAVVP5rej6wxCNqyMbR4RkPfqvezVbPw81Ngd6Kcw==} - engines: {node: ^10 || ^12 || >=14} + postcss@8.4.38: dependencies: nanoid: 3.3.7 - picocolors: 1.0.0 - source-map-js: 1.0.2 - dev: true + picocolors: 1.0.1 + source-map-js: 1.2.0 - /preact@10.19.3: - resolution: {integrity: sha512-nHHTeFVBTHRGxJXKkKu5hT8C/YWBkPso4/Gad6xuj5dbptt9iF9NZr9pHbPhBrnT2klheu7mHTxTZ/LjwJiEiQ==} - dev: true - - /prismjs@1.29.0: - resolution: {integrity: sha512-Kx/1w86q/epKcmte75LNrEoT+lX8pBpavuAbvJWRXar7Hz8jrtF+e3vY751p0R8H9HdArwaCTNDDzHg/ScJK1Q==} - engines: {node: '>=6'} - dev: true + preact@10.22.0: {} - /queue-microtask@1.2.3: - resolution: {integrity: sha512-NuaNSa6flKT5JaSYQzJok04JzTL1CA6aGhv5rfLW3PgqA+M2ChpZQnAC8h8i4ZFkBS8X5RqkDBHA7r4hej3K9A==} - dev: true + punycode.js@2.3.1: {} - /readable-stream@3.6.2: - resolution: {integrity: sha512-9u/sniCrY3D5WdsERHzHE4G2YCXqoG5FTHUiCC4SIbr6XcLZBY05ya9EKjYek9O5xOAwjGq+1JdGBAS7Q9ScoA==} - engines: {node: '>= 6'} - dependencies: - inherits: 2.0.4 - string_decoder: 1.3.0 - util-deprecate: 1.0.2 - dev: true + queue-microtask@1.2.3: {} - /readdirp@3.6.0: - resolution: {integrity: sha512-hOS089on8RduqdbhvQ5Z37A0ESjsqz6qnRcffsMU3495FuTdqSm+7bhJ29JvIOsBDEEnan5DPu9t3To9VRlMzA==} - engines: {node: '>=8.10.0'} + readdirp@3.6.0: dependencies: picomatch: 2.3.1 - dev: true + optional: true - /restore-cursor@4.0.0: - resolution: {integrity: sha512-I9fPXU9geO9bHOt9pHHOhOkYerIMsmVaWB0rA2AI9ERh/+x/i7MV5HKBNrg+ljO5eoPVgCcnFuRjJ9uH6I/3eg==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} - dependencies: - onetime: 5.1.2 - signal-exit: 3.0.7 - dev: true + reusify@1.0.4: {} - /reusify@1.0.4: - resolution: {integrity: sha512-U9nH88a3fc/ekCF1l0/UP1IosiuIjyTh7hBvXVMHYgVcfGvt897Xguj2UOLDeI5BG2m7/uwyaLVT6fbtCwTyzw==} - engines: {iojs: '>=1.0.0', node: '>=0.10.0'} - dev: true + rfdc@1.3.1: {} - /rollup@4.8.0: - resolution: {integrity: sha512-NpsklK2fach5CdI+PScmlE5R4Ao/FSWtF7LkoIrHDxPACY/xshNasPsbpG0VVHxUTbf74tJbVT4PrP8JsJ6ZDA==} - engines: {node: '>=18.0.0', npm: '>=8.0.0'} - hasBin: true + robust-predicates@3.0.2: {} + + rollup@4.18.0: + dependencies: + '@types/estree': 1.0.5 optionalDependencies: - '@rollup/rollup-android-arm-eabi': 4.8.0 - '@rollup/rollup-android-arm64': 4.8.0 - '@rollup/rollup-darwin-arm64': 4.8.0 - '@rollup/rollup-darwin-x64': 4.8.0 - '@rollup/rollup-linux-arm-gnueabihf': 4.8.0 - '@rollup/rollup-linux-arm64-gnu': 4.8.0 - '@rollup/rollup-linux-arm64-musl': 4.8.0 - '@rollup/rollup-linux-riscv64-gnu': 4.8.0 - '@rollup/rollup-linux-x64-gnu': 4.8.0 - '@rollup/rollup-linux-x64-musl': 4.8.0 - '@rollup/rollup-win32-arm64-msvc': 4.8.0 - '@rollup/rollup-win32-ia32-msvc': 4.8.0 - '@rollup/rollup-win32-x64-msvc': 4.8.0 + '@rollup/rollup-android-arm-eabi': 4.18.0 + '@rollup/rollup-android-arm64': 4.18.0 + '@rollup/rollup-darwin-arm64': 4.18.0 + '@rollup/rollup-darwin-x64': 4.18.0 + '@rollup/rollup-linux-arm-gnueabihf': 4.18.0 + '@rollup/rollup-linux-arm-musleabihf': 4.18.0 + '@rollup/rollup-linux-arm64-gnu': 4.18.0 + '@rollup/rollup-linux-arm64-musl': 4.18.0 + '@rollup/rollup-linux-powerpc64le-gnu': 4.18.0 + '@rollup/rollup-linux-riscv64-gnu': 4.18.0 + '@rollup/rollup-linux-s390x-gnu': 4.18.0 + '@rollup/rollup-linux-x64-gnu': 4.18.0 + '@rollup/rollup-linux-x64-musl': 4.18.0 + '@rollup/rollup-win32-arm64-msvc': 4.18.0 + '@rollup/rollup-win32-ia32-msvc': 4.18.0 + '@rollup/rollup-win32-x64-msvc': 4.18.0 fsevents: 2.3.3 - dev: true - /run-parallel@1.2.0: - resolution: {integrity: sha512-5l4VyZR86LZ/lDxZTR6jqL8AFE2S0IFLMP26AbjsLVADxHdhB/c0GUsH+y39UfCi3dzz8OlQuPmnaJOMoDHQBA==} + run-con@1.3.2: + dependencies: + deep-extend: 0.6.0 + ini: 4.1.3 + minimist: 1.2.8 + strip-json-comments: 3.1.1 + + run-parallel@1.2.0: dependencies: queue-microtask: 1.2.3 - dev: true - /safe-buffer@5.2.1: - resolution: {integrity: sha512-rp3So07KcdmmKbGvgaNxQSJr7bGVSVk5S9Eq1F+ppbRo70+YeaDxkw5Dd8NPN+GD6bjnYm2VuPuCXmpuYvmCXQ==} - dev: true + rw@1.3.3: {} - /sass@1.69.5: - resolution: {integrity: sha512-qg2+UCJibLr2LCVOt3OlPhr/dqVHWOa9XtZf2OjbLs/T4VPSJ00udtgJxH3neXZm+QqX8B+3cU7RaLqp1iVfcQ==} - engines: {node: '>=14.0.0'} - hasBin: true + sade@1.8.1: dependencies: - chokidar: 3.5.3 - immutable: 4.3.4 - source-map-js: 1.0.2 - dev: true - - /sax@1.3.0: - resolution: {integrity: sha512-0s+oAmw9zLl1V1cS9BtZN7JAd0cW5e0QH4W3LWEK6a4LaLEA2OTpGYWDY+6XasBLtz6wkm3u1xRw95mRuJ59WA==} - dev: true + mri: 1.2.0 - /search-insights@2.13.0: - resolution: {integrity: sha512-Orrsjf9trHHxFRuo9/rzm0KIWmgzE8RMlZMzuhZOJ01Rnz3D0YBAe+V6473t6/H6c7irs6Lt48brULAiRWb3Vw==} - dev: true + safer-buffer@2.1.2: {} - /section-matter@1.0.0: - resolution: {integrity: sha512-vfD3pmTzGpufjScBh50YHKzEu2lxBWhVEHsNGoEXmCmn2hKGfeNLYMzCJpe8cD7gqX7TJluOVpBkAequ6dgMmA==} - engines: {node: '>=4'} + sass@1.77.2: dependencies: - extend-shallow: 2.0.1 - kind-of: 6.0.3 - dev: true + chokidar: 3.6.0 + immutable: 4.3.6 + source-map-js: 1.2.0 + optional: true - /semver@7.5.4: - resolution: {integrity: sha512-1bCSESV6Pv+i21Hvpxp3Dx+pSD8lIPt8uVjRrxAUt/nbswYc+tK6Y2btiULjd4+fnq15PX+nqQDC7Oft7WkwcA==} - engines: {node: '>=10'} - hasBin: true - dependencies: - lru-cache: 6.0.0 - dev: true + sax@1.3.0: {} - /shebang-command@2.0.0: - resolution: {integrity: sha512-kHxr2zZpYtdmrN1qDjrrX/Z1rR1kG8Dx+gkpK1G4eXmvXswmcE1hTWBWYUzlraYw1/yZp6YuDY77YtvbN0dmDA==} - engines: {node: '>=8'} + search-insights@2.13.0: {} + + shebang-command@2.0.0: dependencies: shebang-regex: 3.0.0 - dev: true - /shebang-regex@3.0.0: - resolution: {integrity: sha512-7++dFhtcx3353uBaq8DDR4NuxBetBzC7ZQOhmTQInHEd6bSrXdiEyzCvG07Z44UYdLShWUyXt5M/yhz8ekcb1A==} - engines: {node: '>=8'} - dev: true + shebang-regex@3.0.0: {} + + shiki@1.6.0: + dependencies: + '@shikijs/core': 1.6.0 - /signal-exit@3.0.7: - resolution: {integrity: sha512-wnD2ZE+l+SPC/uoS0vXeE9L1+0wuaMqKlfz9AMUo38JsyLSBWSFcHR1Rri62LZc12vLr1gb3jl7iwQhgwpAbGQ==} - dev: true + signal-exit@4.1.0: {} - /signal-exit@4.1.0: - resolution: {integrity: sha512-bzyZ1e88w9O1iNJbKnOlvYTrWPDl46O1bG0D3XInv+9tkPrxrN8jUUTiFlDkkmKWgn1M6CfIA13SuGqOa9Korw==} - engines: {node: '>=14'} - dev: true + sitemap-ts@1.6.1: + dependencies: + '@antfu/utils': 0.7.6 + fast-glob: 3.3.1 + sitemap: 7.1.1 + xml-formatter: 3.5.0 - /sitemap@7.1.1: - resolution: {integrity: sha512-mK3aFtjz4VdJN0igpIJrinf3EO8U8mxOPsTBzSsy06UtjZQJ3YY3o3Xa7zSc5nMqcMrRwlChHZ18Kxg0caiPBg==} - engines: {node: '>=12.0.0', npm: '>=5.6.0'} - hasBin: true + sitemap@7.1.1: dependencies: '@types/node': 17.0.45 '@types/sax': 1.2.7 arg: 5.0.2 sax: 1.3.0 - dev: true - /slash@5.1.0: - resolution: {integrity: sha512-ZA6oR3T/pEyuqwMgAKT0/hAv8oAXckzbkmR0UkUosQ+Mc4RxGoJkRmwHgHufaenlyAgE1Mxgpdcrf75y6XcnDg==} - engines: {node: '>=14.16'} - dev: true + source-map-js@1.2.0: {} - /source-map-js@1.0.2: - resolution: {integrity: sha512-R0XvVJ9WusLiqTCEiGCmICCMplcCkIwwR11mOSD9CR5u+IXYdiseeEuXCVAjS54zqwkLcPNnmU4OeJ6tUrWhDw==} - engines: {node: '>=0.10.0'} - dev: true - - /sprintf-js@1.0.3: - resolution: {integrity: sha512-D9cPgkvLlV3t3IzL0D0YLvGA9Ahk4PcvVwUbN0dSGr1aP0Nrt4AEnTUbuGvquEC0mA64Gqt1fzirlRs5ibXx8g==} - dev: true + speakingurl@14.0.1: {} - /stdin-discarder@0.1.0: - resolution: {integrity: sha512-xhV7w8S+bUwlPTb4bAOUQhv8/cSS5offJuX8GQGq32ONF0ZtDWKfkdomM3HMRA+LhX6um/FZ0COqlwsjD53LeQ==} - engines: {node: ^12.20.0 || ^14.13.1 || >=16.0.0} + string-width@4.2.3: dependencies: - bl: 5.1.0 - dev: true + emoji-regex: 8.0.0 + is-fullwidth-code-point: 3.0.0 + strip-ansi: 6.0.1 - /string-width@6.1.0: - resolution: {integrity: sha512-k01swCJAgQmuADB0YIc+7TuatfNvTBVOoaUWJjTB9R4VJzR5vNWzf5t42ESVZFPS8xTySF7CAdV4t/aaIm3UnQ==} - engines: {node: '>=16'} + string-width@5.1.2: dependencies: eastasianwidth: 0.2.0 - emoji-regex: 10.3.0 + emoji-regex: 9.2.2 strip-ansi: 7.1.0 - dev: true - /string_decoder@1.3.0: - resolution: {integrity: sha512-hkRX8U1WjJFd8LsDJ2yQ/wWWxaopEsABU1XfkM8A+j0+85JAGppt16cr1Whg6KIbb4okU6Mql6BOj+uup/wKeA==} + strip-ansi@6.0.1: dependencies: - safe-buffer: 5.2.1 - dev: true + ansi-regex: 5.0.1 - /strip-ansi@7.1.0: - resolution: {integrity: sha512-iq6eVVI64nQQTRYq2KtEg2d2uU7LElhTJwsH4YzIHZshxlgZms/wIc4VoDQTlG/IvVIrBKG06CrZnp0qv7hkcQ==} - engines: {node: '>=12'} + strip-ansi@7.1.0: dependencies: ansi-regex: 6.0.1 - dev: true - /strip-bom-string@1.0.0: - resolution: {integrity: sha512-uCC2VHvQRYu+lMh4My/sFNmF2klFymLX1wHJeXnbEJERpV/ZsVuonzerjfrGpIGF7LBVa1O7i9kjiWvJiFck8g==} - engines: {node: '>=0.10.0'} - dev: true + strip-json-comments@3.1.1: {} - /strip-final-newline@3.0.0: - resolution: {integrity: sha512-dOESqjYr96iWYylGObzd39EuNTa5VJxyvVAEm5Jnh7KGo75V43Hk1odPQkNDyXNmUR6k+gEiDVXnjB8HJ3crXw==} - engines: {node: '>=12'} - dev: true + stylis@4.3.2: {} - /striptags@3.2.0: - resolution: {integrity: sha512-g45ZOGzHDMe2bdYMdIvdAfCQkCTDMGBazSw1ypMowwGIee7ZQ5dU0rBJ8Jqgl+jAKIv4dbeE1jscZq9wid1Tkw==} - dev: true + tabbable@6.2.0: {} - /to-fast-properties@2.0.0: - resolution: {integrity: sha512-/OaKK0xYrs3DmxRYqL/yDc+FxFUVYhDlXMhRmv3z915w2HF1tnN1omB354j8VUGO/hbRzyD6Y3sA7v7GS/ceog==} - engines: {node: '>=4'} - dev: true + to-fast-properties@2.0.0: {} - /to-regex-range@5.0.1: - resolution: {integrity: sha512-65P7iz6X5yEr1cwcgvQxbbIw7Uk3gOy5dIdtZ4rDveLqhrdJP+Li/Hx6tyK0NEb+2GCyneCMJiGqrADCSNk8sQ==} - engines: {node: '>=8.0'} + to-regex-range@5.0.1: dependencies: is-number: 7.0.0 - dev: true - /ts-debounce@4.0.0: - resolution: {integrity: sha512-+1iDGY6NmOGidq7i7xZGA4cm8DAa6fqdYcvO5Z6yBevH++Bdo9Qt/mN0TzHUgcCcKv1gmh9+W5dHqz8pMWbCbg==} - dev: true + toml@3.0.0: {} - /uc.micro@1.0.6: - resolution: {integrity: sha512-8Y75pvTYkLJW2hWQHXxoqRgV7qb9B+9vFEtidML+7koHUFapnVJAZ6cKs+Qjz5Aw3aZWHMC6u0wJE3At+nSGwA==} - dev: true + ts-dedent@2.2.0: {} - /undici-types@5.26.5: - resolution: {integrity: sha512-JlCMO+ehdEIKqlFxk6IfVoAUVmgz7cU7zD/h9XZ0qzeosSHmUJVOzSQvvYSYWXkFXC+IfLKSIffhv0sVZup6pA==} - dev: true + uc.micro@2.1.0: {} - /unicorn-magic@0.1.0: - resolution: {integrity: sha512-lRfVq8fE8gz6QMBuDM6a+LO3IAzTi05H6gCVaUpir2E1Rwpo4ZUog45KpNXKC/Mn3Yb9UDuHumeFTo9iV/D9FQ==} - engines: {node: '>=18'} - dev: true + undici-types@5.26.5: {} - /universalify@2.0.1: - resolution: {integrity: sha512-gptHNQghINnc/vTGIk0SOFGFNXw7JVrlRUtConJRlvaw6DuX0wO5Jeko9sWrMBhh+PsYAZ7oXAiOnf/UKogyiw==} - engines: {node: '>= 10.0.0'} - dev: true + unist-util-stringify-position@3.0.3: + dependencies: + '@types/unist': 2.0.10 - /upath@2.0.1: - resolution: {integrity: sha512-1uEe95xksV1O0CYKXo8vQvN1JEbtJp7lb7C5U9HMsIp6IVwntkH/oNUzyVNQSd4S1sYk2FpSSW44FqMc8qee5w==} - engines: {node: '>=4'} - dev: true + uuid@9.0.1: {} - /update-browserslist-db@1.0.13(browserslist@4.22.2): - resolution: {integrity: sha512-xebP81SNcPuNpPP3uzeW1NYXxI3rxyJzF3pD6sH4jE7o/IX+WtSpwnVU+qIsDPyk0d3hmFQ7mjqc6AtV604hbg==} - hasBin: true - peerDependencies: - browserslist: '>= 4.21.0' + uvu@0.5.6: dependencies: - browserslist: 4.22.2 - escalade: 3.1.1 - picocolors: 1.0.0 - dev: true - - /util-deprecate@1.0.2: - resolution: {integrity: sha512-EPD5q1uXyFxJpCrLnCc1nHnq3gOa6DZBocAIiI2TaSCA7VCJ1UJDMagCzIkXNsUYfD1daK//LTEQ8xiIbrHtcw==} - dev: true + dequal: 2.0.3 + diff: 5.2.0 + kleur: 4.1.5 + sade: 1.8.1 - /vite@5.0.12: - resolution: {integrity: sha512-4hsnEkG3q0N4Tzf1+t6NdN9dg/L3BM+q8SWgbSPnJvrgH2kgdyzfVJwbR1ic69/4uMJJ/3dqDZZE5/WwqW8U1w==} - engines: {node: ^18.0.0 || >=20.0.0} - hasBin: true - peerDependencies: - '@types/node': ^18.0.0 || >=20.0.0 - less: '*' - lightningcss: ^1.21.0 - sass: '*' - stylus: '*' - sugarss: '*' - terser: ^5.4.0 - peerDependenciesMeta: - '@types/node': - optional: true - less: - optional: true - lightningcss: - optional: true - sass: - optional: true - stylus: - optional: true - sugarss: - optional: true - terser: - optional: true + vite@5.2.11(@types/node@20.12.12)(sass@1.77.2): dependencies: - esbuild: 0.19.9 - postcss: 8.4.32 - rollup: 4.8.0 + esbuild: 0.20.2 + postcss: 8.4.38 + rollup: 4.18.0 optionalDependencies: + '@types/node': 20.12.12 fsevents: 2.3.3 - dev: true - - /vue-demi@0.14.6(vue@3.3.11): - resolution: {integrity: sha512-8QA7wrYSHKaYgUxDA5ZC24w+eHm3sYCbp0EzcDwKqN3p6HqtTCGR/GVsPyZW92unff4UlcSh++lmqDWN3ZIq4w==} - engines: {node: '>=12'} - hasBin: true - requiresBuild: true - peerDependencies: - '@vue/composition-api': ^1.0.0-rc.1 - vue: ^3.0.0-0 || ^2.6.0 - peerDependenciesMeta: - '@vue/composition-api': - optional: true - dependencies: - vue: 3.3.11 - dev: true - - /vue-router@4.2.5(vue@3.3.11): - resolution: {integrity: sha512-DIUpKcyg4+PTQKfFPX88UWhlagBEBEfJ5A8XDXRJLUnZOvcpMF8o/dnL90vpVkGaPbjvXazV/rC1qBKrZlFugw==} - peerDependencies: - vue: ^3.2.0 - dependencies: - '@vue/devtools-api': 6.5.1 - vue: 3.3.11 - dev: true + sass: 1.77.2 - /vue@3.3.11: - resolution: {integrity: sha512-d4oBctG92CRO1cQfVBZp6WJAs0n8AK4Xf5fNjQCBeKCvMI1efGQ5E3Alt1slFJS9fZuPcFoiAiqFvQlv1X7t/w==} - peerDependencies: - typescript: '*' - peerDependenciesMeta: - typescript: - optional: true - dependencies: - '@vue/compiler-dom': 3.3.11 - '@vue/compiler-sfc': 3.3.11 - '@vue/runtime-dom': 3.3.11 - '@vue/server-renderer': 3.3.11(vue@3.3.11) - '@vue/shared': 3.3.11 - dev: true - - /vuepress-plugin-sitemap2@2.0.0-rc.4(vuepress@2.0.0-rc.0): - resolution: {integrity: sha512-zi57grbyAFL54HUZNmmAWELYgwPsqa8p63HkEBSpXiQEa3JbYumAXHPZp4sIBGlBxcF8X34GtddrVw9FDlCtZA==} - engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} - deprecated: Please use @vuepress/plugin-sitemap@v2 instead - peerDependencies: - vuepress: 2.0.0-rc.0 - vuepress-vite: 2.0.0-rc.0 - vuepress-webpack: 2.0.0-rc.0 - peerDependenciesMeta: - vuepress: - optional: true - vuepress-vite: - optional: true - vuepress-webpack: - optional: true - dependencies: - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - sitemap: 7.1.1 - vuepress: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) - vuepress-shared: 2.0.0-rc.4(vuepress@2.0.0-rc.0) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true - - /vuepress-shared@2.0.0-rc.4(vuepress@2.0.0-rc.0): - resolution: {integrity: sha512-YndYftQ9AUdWWESZHFZ7QjuUGXqgVayHzu3Qfar9GWr45NP2ZW7edKN4adU2/bOiokYG1Rfj47dgMUrRxEgqhg==} - engines: {node: '>=18.16.0', npm: '>=8', pnpm: '>=7', yarn: '>=2'} - peerDependencies: - vuepress: 2.0.0-rc.0 - vuepress-vite: 2.0.0-rc.0 - vuepress-webpack: 2.0.0-rc.0 - peerDependenciesMeta: - vuepress: - optional: true - vuepress-vite: - optional: true - vuepress-webpack: - optional: true + vitepress-plugin-mermaid@2.0.16(mermaid@10.9.1)(vitepress@1.1.4(@algolia/client-search@4.23.3)(@types/node@20.12.12)(postcss@8.4.38)(sass@1.77.2)(search-insights@2.13.0)): dependencies: - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/shared': 2.0.0-rc.0 - '@vuepress/utils': 2.0.0-rc.0 - '@vueuse/core': 10.7.0(vue@3.3.11) - cheerio: 1.0.0-rc.12 - dayjs: 1.11.10 - execa: 8.0.1 - fflate: 0.8.1 - gray-matter: 4.0.3 - semver: 7.5.4 - striptags: 3.2.0 - vue: 3.3.11 - vue-router: 4.2.5(vue@3.3.11) - vuepress: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) - transitivePeerDependencies: - - '@vue/composition-api' - - supports-color - - typescript - dev: true - - /vuepress-vite@2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11): - resolution: {integrity: sha512-+2XBejeiskPyr2raBeA2o4uDFDsjtadpUVmtio3qqFtQpOhidz/ORuiTLr2UfLtFn1ASIHP6Vy2YjQ0e/TeUVw==} - engines: {node: '>=18.16.0'} - hasBin: true - peerDependencies: - '@vuepress/client': 2.0.0-rc.0 - vue: ^3.3.4 - dependencies: - '@vuepress/bundler-vite': 2.0.0-rc.0 - '@vuepress/cli': 2.0.0-rc.0 - '@vuepress/client': 2.0.0-rc.0 - '@vuepress/core': 2.0.0-rc.0 - '@vuepress/theme-default': 2.0.0-rc.0 - vue: 3.3.11 + mermaid: 10.9.1 + vitepress: 1.1.4(@algolia/client-search@4.23.3)(@types/node@20.12.12)(postcss@8.4.38)(sass@1.77.2)(search-insights@2.13.0) + optionalDependencies: + '@mermaid-js/mermaid-mindmap': 9.3.0 + + vitepress@1.1.4(@algolia/client-search@4.23.3)(@types/node@20.12.12)(postcss@8.4.38)(sass@1.77.2)(search-insights@2.13.0): + dependencies: + '@docsearch/css': 3.6.0 + '@docsearch/js': 3.6.0(@algolia/client-search@4.23.3)(search-insights@2.13.0) + '@shikijs/core': 1.6.0 + '@shikijs/transformers': 1.6.0 + '@types/markdown-it': 14.1.1 + '@vitejs/plugin-vue': 5.0.4(vite@5.2.11(@types/node@20.12.12)(sass@1.77.2))(vue@3.4.27) + '@vue/devtools-api': 7.2.1(vue@3.4.27) + '@vueuse/core': 10.9.0(vue@3.4.27) + '@vueuse/integrations': 10.9.0(focus-trap@7.5.4)(vue@3.4.27) + focus-trap: 7.5.4 + mark.js: 8.11.1 + minisearch: 6.3.0 + shiki: 1.6.0 + vite: 5.2.11(@types/node@20.12.12)(sass@1.77.2) + vue: 3.4.27 + optionalDependencies: + postcss: 8.4.38 transitivePeerDependencies: + - '@algolia/client-search' - '@types/node' + - '@types/react' - '@vue/composition-api' + - async-validator + - axios + - change-case + - drauu + - fuse.js + - idb-keyval + - jwt-decode - less - lightningcss + - nprogress + - qrcode + - react + - react-dom - sass - - sass-loader + - search-insights + - sortablejs - stylus - sugarss - - supports-color - terser - - ts-node - typescript - dev: true + - universal-cookie - /vuepress@2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11): - resolution: {integrity: sha512-sydt/B7+pIw926G5PntYmptLkC5o2buXKh+WR1+P2KnsvkXU+UGnQrJJ0FBvu/4RNuY99tkUZd59nyPhEmRrCg==} - engines: {node: '>=18.16.0'} - hasBin: true + vue-demi@0.14.7(vue@3.4.27): dependencies: - vuepress-vite: 2.0.0-rc.0(@vuepress/client@2.0.0-rc.0)(vue@3.3.11) - transitivePeerDependencies: - - '@types/node' - - '@vue/composition-api' - - '@vuepress/client' - - less - - lightningcss - - sass - - sass-loader - - stylus - - sugarss - - supports-color - - terser - - ts-node - - typescript - - vue - dev: true + vue: 3.4.27 - /which@2.0.2: - resolution: {integrity: sha512-BLI3Tl1TW3Pvl70l3yq3Y64i+awpwXqsGBYWkkqMtnbXgrMD+yj7rhW0kuEDxzJaYXGjEW5ogapKNMEKNMjibA==} - engines: {node: '>= 8'} - hasBin: true + vue@3.4.27: + dependencies: + '@vue/compiler-dom': 3.4.27 + '@vue/compiler-sfc': 3.4.27 + '@vue/runtime-dom': 3.4.27 + '@vue/server-renderer': 3.4.27(vue@3.4.27) + '@vue/shared': 3.4.27 + + web-worker@1.3.0: {} + + which@2.0.2: dependencies: isexe: 2.0.0 - dev: true - /yallist@4.0.0: - resolution: {integrity: sha512-3wdGidZyq5PB084XLES5TpOSRA3wjXAlIWMhum2kRcv/41Sn2emQ0dycQW4uZXLejwKvg6EsvbdlVL+FYEct7A==} - dev: true + wrap-ansi@7.0.0: + dependencies: + ansi-styles: 4.3.0 + string-width: 4.2.3 + strip-ansi: 6.0.1 - /yaml@2.3.4: - resolution: {integrity: sha512-8aAvwVUSHpfEqTQ4w/KMlf3HcRdt50E5ODIQJBw1fQ5RL34xabzxtUlzTXVqc4rkZsPbvrXKWnABCD7kWSmocA==} - engines: {node: '>= 14'} - dev: true + wrap-ansi@8.1.0: + dependencies: + ansi-styles: 6.2.1 + string-width: 5.1.2 + strip-ansi: 7.1.0 + + xml-formatter@3.5.0: + dependencies: + xml-parser-xo: 4.1.1 + + xml-parser-xo@4.1.1: {} diff --git a/runatlantis.io/.vitepress/components/Banner.vue b/runatlantis.io/.vitepress/components/Banner.vue new file mode 100644 index 0000000000..7a3e5ffc3d --- /dev/null +++ b/runatlantis.io/.vitepress/components/Banner.vue @@ -0,0 +1,95 @@ + + + + + + + diff --git a/runatlantis.io/.vitepress/components/shims.d.ts b/runatlantis.io/.vitepress/components/shims.d.ts new file mode 100644 index 0000000000..d1f3133128 --- /dev/null +++ b/runatlantis.io/.vitepress/components/shims.d.ts @@ -0,0 +1,5 @@ +declare module '*.vue' { + import type { DefineComponent } from 'vue'; + const component: DefineComponent; + export default component; +} diff --git a/runatlantis.io/.vitepress/config.ts b/runatlantis.io/.vitepress/config.ts new file mode 100644 index 0000000000..2a6ae1cf60 --- /dev/null +++ b/runatlantis.io/.vitepress/config.ts @@ -0,0 +1,133 @@ +import { generateSitemap as sitemap } from "sitemap-ts" +import footnote from 'markdown-it-footnote' +import { defineConfig } from 'vitepress'; +import * as navbars from "./navbars"; +import * as sidebars from "./sidebars"; +import { withMermaid } from "vitepress-plugin-mermaid"; + +// https://vitepress.dev/reference/site-config +const config = defineConfig({ + title: 'Atlantis', + description: 'Atlantis: Terraform Pull Request Automation', + lang: 'en-US', + lastUpdated: true, + locales: { + root: { + label: 'English', + lang: 'en-US', + themeConfig: { + nav: navbars.en, + sidebar: sidebars.en, + }, + }, + }, + themeConfig: { + // https://vitepress.dev/reference/default-theme-config + editLink: { + pattern: 'https://github.com/runatlantis/atlantis/edit/main/runatlantis.io/:path' + }, + // headline "depth" the right nav will show for its TOC + // + // https://vitepress.dev/reference/frontmatter-config#outline + outline: [2, 3], + search: { + provider: 'algolia', + options: { + // We internally discussed how this API key is exposed in the code and decided + // that it is a non-issue because this API key can easily be extracted by + // looking at the browser dev tools since the key is used in the API requests. + apiKey: '3b733dff1539ca3a210775860301fa86', + indexName: 'runatlantis', + appId: 'BH4D9OD16A', + locales: { + '/': { + placeholder: 'Search Documentation', + translations: { + button: { + buttonText: 'Search Documentation', + }, + }, + }, + }, + } + }, + socialLinks: [ + { icon: "slack", link: "https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw" }, + { icon: "twitter", link: "https://twitter.com/runatlantis" }, + { icon: "github", link: "https://github.com/runatlantis/atlantis" }, + ], + }, + // SEO Improvement - sitemap.xml & robots.txt + buildEnd: async ({ outDir }) => { + sitemap({ + hostname: "https://www.runatlantis.io/", + outDir: outDir, + generateRobotsTxt: true, + }) + }, + head: [ + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-196x196.png', sizes: '196x196' }], + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-96x96.png', sizes: '96x96' }], + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-32x32.png', sizes: '32x32' }], + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-16x16.png', sizes: '16x16' }], + ['link', { rel: 'icon', type: 'image/png', href: '/favicon-128.png', sizes: '128x128' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '57x57', href: '/apple-touch-icon-57x57.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '114x114', href: '/apple-touch-icon-114x114.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '72x72', href: '/apple-touch-icon-72x72.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '144x144', href: '/apple-touch-icon-144x144.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '60x60', href: '/apple-touch-icon-60x60.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '120x120', href: '/apple-touch-icon-120x120.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '76x76', href: '/apple-touch-icon-76x76.png' }], + ['link', { rel: 'apple-touch-icon-precomposed', sizes: '152x152', href: '/apple-touch-icon-152x152.png' }], + ['meta', { name: 'msapplication-TileColor', content: '#FFFFFF' }], + ['meta', { name: 'msapplication-TileImage', content: '/mstile-144x144.png' }], + ['meta', { name: 'msapplication-square70x70logo', content: '/mstile-70x70.png' }], + ['meta', { name: 'msapplication-square150x150logo', content: '/mstile-150x150.png' }], + ['meta', { name: 'msapplication-wide310x150logo', content: '/mstile-310x150.png' }], + ['meta', { name: 'msapplication-square310x310logo', content: '/mstile-310x310.png' }], + ['link', { rel: 'stylesheet', sizes: '152x152', href: 'https://fonts.googleapis.com/css?family=Lato:400,900' }], + ['meta', { name: 'google-site-verification', content: 'kTnsDBpHqtTNY8oscYxrQeeiNml2d2z-03Ct9wqeCeE' }], + // google analytics + [ + 'script', + { async: '', src: 'https://www.googletagmanager.com/gtag/js?id=UA-6850151-3' } + ], + [ + 'script', + {}, + `window.dataLayer = window.dataLayer || []; + function gtag(){dataLayer.push(arguments);} + gtag('js', new Date()); + + gtag('config', 'UA-6850151-3');` + ], + [ + 'script', + { id: 'restore-banner-preference' }, + ` + (() => { + const restore = (key, cls, def = false) => { + const saved = localStorage.getItem(key); + if (saved ? saved !== 'false' && new Date() < saved : def) { + document.documentElement.classList.add(cls); + } + }; + restore('survey-banner', 'banner-dismissed'); + })();`, + ] + ], + markdown: { + config: (md) => { + md.use(footnote) + } + }, + vite: { + server: { + fs: { + cachedChecks: false, + }, + } + } +}) + +export default withMermaid(config) diff --git a/runatlantis.io/.vitepress/navbars.ts b/runatlantis.io/.vitepress/navbars.ts new file mode 100644 index 0000000000..bedf951053 --- /dev/null +++ b/runatlantis.io/.vitepress/navbars.ts @@ -0,0 +1,9 @@ +const en = [ + { text: "Home", link: "/" }, + { text: "Guide", link: "/guide" }, + { text: "Docs", link: "/docs" }, + { text: "Contributing", link: "/contributing" }, + { text: "Blog", link: "/blog" }, +]; + +export { en }; diff --git a/runatlantis.io/.vitepress/sidebars.ts b/runatlantis.io/.vitepress/sidebars.ts new file mode 100644 index 0000000000..0dfc306003 --- /dev/null +++ b/runatlantis.io/.vitepress/sidebars.ts @@ -0,0 +1,164 @@ +const en = [ + { + text: "Guide", + link: "/guide", + collapsed: false, + items: [ + { text: "Test Drive", link: "/guide/test-drive" }, + { text: "Testing locally", link: "/guide/testing-locally" }, + ], + }, + { + text: "Docs", + link: "/docs", + collapsed: true, + items: [ + { + text: "Installing Atlantis", + collapsed: true, + items: [ + { text: "Installing Guide", link: "/docs/installation-guide" }, + { text: "Requirements", link: "/docs/requirements" }, + { text: "Git Host Access Credentials", link: "/docs/access-credentials" }, + { text: "Webhook Secrets", link: "/docs/webhook-secrets" }, + { text: "Deployment", link: "/docs/deployment" }, + { text: "Configuring Webhooks", link: "/docs/configuring-webhooks" }, + { text: "Provider Credentials", link: "/docs/provider-credentials" }, + ] + }, + { + text: "Configuring Atlantis", + collapsed: true, + items: [ + { text: "Overview", link: "/docs/configuring-atlantis" }, + { text: "Server Configuration", link: "/docs/server-configuration" }, + { text: "Server Side Repo Config", link: "/docs/server-side-repo-config" }, + { text: "Pre Workflow Hooks", link: "/docs/pre-workflow-hooks" }, + { text: "Post Workflow Hooks", link: "/docs/post-workflow-hooks" }, + { text: "Conftest Policy Checking", link: "/docs/policy-checking" }, + { text: "Custom Workflows", link: "/docs/custom-workflows" }, + { text: "Repo Level atlantis.yaml", link: "/docs/repo-level-atlantis-yaml" }, + { text: "Upgrading atlantis.yaml", link: "/docs/upgrading-atlantis-yaml" }, + { text: "Command Requirements", link: "/docs/command-requirements" }, + { text: "Checkout Strategy", link: "/docs/checkout-strategy" }, + { text: "Terraform Versions", link: "/docs/terraform-versions" }, + { text: "Terraform Cloud", link: "/docs/terraform-cloud" }, + { text: "Using Slack Hooks", link: "/docs/using-slack-hooks" }, + { text: "Stats", link: "/docs/stats" }, + { text: "FAQ", link: "/docs/faq" }, + ] + }, + { + text: "Using Atlantis", + collapsed: true, + items: [ + { text: "Overview", link: "/docs/using-atlantis" }, + { text: "API endpoints", link: "/docs/api-endpoints" }, + ] + }, + { + text: 'How Atlantis Works', + collapsed: true, + items: [ + { text: 'Overview', link: '/docs/how-atlantis-works', }, + { text: 'Locking', link: '/docs/locking', }, + { text: 'Autoplanning', link: '/docs/autoplanning', }, + { text: 'Automerging', link: '/docs/automerging', }, + { text: 'Security', link: '/docs/security', }, + ] + }, + { + text: 'Real-time Terraform Logs', + link: '/docs/streaming-logs', + }, + { + text: 'Troubleshooting', + collapsed: true, + items: [ + { text: 'HTTPS, SSL, TLS', 'link': '/docs/troubleshooting-https', }, + ] + }, + ], + }, + { + text: "Contributing", + link: "/contributing", + collapsed: false, + items: [ + { + text: 'Implementation Details', + items: [ + { text: "Events Controller", link: "/contributing/events-controller" }, + ] + }, + { text: "Glossary", link: "/contributing/glossary" }, + ] + + }, + { + text: "Blog", + link: "/blog", + collapsed: false, + items: [ + { + text: "2024", + collapsed: true, + items: [ + { + text: "Integrating Atlantis with OpenTofu", + link: "/blog/2024/integrating-atlantis-with-opentofu" + }, + { + text: "Atlantis User Survey Results", + link: "/blog/2024/april-2024-survey-results" + }, + ] + }, + { + text: "2019", + collapsed: true, + items: [ + { + text: "4 Reasons To Try HashiCorp's (New) Free Terraform Remote State Storage", + link: "/blog/2019/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage" + }, + ] + }, + { + text: "2018", + collapsed: true, + items: [ + { + text: "I'm Joining HashiCorp!", + link: "/blog/2018/joining-hashicorp" + }, + { + text: "Putting The Dev Into DevOps: Why Your Developers Should Write Terraform Too", + link: "/blog/2018/putting-the-dev-into-devops-why-your-developers-should-write-terraform-too" + }, + { + text: "Atlantis 0.4.4 Now Supports Bitbucket", + link: "/blog/2018/atlantis-0-4-4-now-supports-bitbucket" + }, + { + text: "Terraform And The Dangers Of Applying Locally", + link: "/blog/2018/terraform-and-the-dangers-of-applying-locally" + }, + { + text: "Hosting Our Static Site over SSL with S3, ACM, CloudFront and Terraform", + link: "/blog/2018/hosting-our-static-site-over-ssl-with-s3-acm-cloudfront-and-terraform" + }, + ] + }, + { + text: "2017", + collapsed: true, + items: [ + { text: "Introducing Atlantis", link: "/blog/2017/introducing-atlantis" }, + ] + }, + ] + } +] + +export { en } diff --git a/runatlantis.io/.vitepress/theme/index.ts b/runatlantis.io/.vitepress/theme/index.ts new file mode 100644 index 0000000000..395964ae4b --- /dev/null +++ b/runatlantis.io/.vitepress/theme/index.ts @@ -0,0 +1,11 @@ +import DefaultTheme from "vitepress/theme"; +import { defineAsyncComponent, h } from 'vue'; + +export default { + ...DefaultTheme, + Layout() { + return h(DefaultTheme.Layout, null, { + 'layout-top': () => h(defineAsyncComponent(() => import('../components/Banner.vue'))) + }); + } +}; diff --git a/runatlantis.io/.vuepress/config.js b/runatlantis.io/.vuepress/config.js deleted file mode 100644 index 9658fc29a5..0000000000 --- a/runatlantis.io/.vuepress/config.js +++ /dev/null @@ -1,194 +0,0 @@ -import { googleAnalyticsPlugin } from '@vuepress/plugin-google-analytics' -import { docsearchPlugin } from '@vuepress/plugin-docsearch' -import { getDirname, path } from '@vuepress/utils' -import { defaultTheme, defineUserConfig } from 'vuepress' -import { sitemapPlugin } from 'vuepress-plugin-sitemap2'; - -const __dirname = getDirname(import.meta.url) - -export default defineUserConfig({ - alias: { - '@theme/Home.vue': path.resolve(__dirname, './theme/components/Home.vue'), - }, - locales: { - '/': { - lang: 'en-US', - title: 'Atlantis', - description: 'Atlantis: Terraform Pull Request Automation', - }, -/* - '/es/': { - lang: 'es-ES', - title: 'Atlantis', - description: 'Atlantis: AutomatizaciÃŗn de Pull Requests para Terraform', - }, -*/ - }, - plugins: [ - googleAnalyticsPlugin({ - id: 'UA-6850151-3', - }), - sitemapPlugin({ - hostname: 'https://runatlantis.io', - }), - docsearchPlugin({ - // We internally discussed how this API key is exposed in the code and decided - // that it is a non-issue because this API key can easily be extracted by - // looking at the browser dev tools since the key is used in the API requests. - apiKey: '3b733dff1539ca3a210775860301fa86', - indexName: 'runatlantis', - appId: 'BH4D9OD16A', - locales: { - '/': { - placeholder: 'Search Documentation', - translations: { - button: { - buttonText: 'Search Documentation', - }, - }, - }, - }, - }), - ], - head: [ - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-196x196.png', sizes: '196x196' }], - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-96x96.png', sizes: '96x96' }], - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-32x32.png', sizes: '32x32' }], - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-16x16.png', sizes: '16x16' }], - ['link', { rel: 'icon', type: 'image/png', href: '/favicon-128.png', sizes: '128x128' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '57x57', href: '/apple-touch-icon-57x57.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '114x114', href: '/apple-touch-icon-114x114.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '72x72', href: '/apple-touch-icon-72x72.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '144x144', href: '/apple-touch-icon-144x144.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '60x60', href: '/apple-touch-icon-60x60.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '120x120', href: '/apple-touch-icon-120x120.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '76x76', href: '/apple-touch-icon-76x76.png' }], - ['link', { rel: 'apple-touch-icon-precomposed', sizes: '152x152', href: '/apple-touch-icon-152x152.png' }], - ['meta', { name: 'msapplication-TileColor', content: '#FFFFFF' }], - ['meta', { name: 'msapplication-TileImage', content: '/mstile-144x144.png' }], - ['meta', { name: 'msapplication-square70x70logo', content: '/mstile-70x70.png' }], - ['meta', { name: 'msapplication-square150x150logo', content: '/mstile-150x150.png' }], - ['meta', { name: 'msapplication-wide310x150logo', content: '/mstile-310x150.png' }], - ['meta', { name: 'msapplication-square310x310logo', content: '/mstile-310x310.png' }], - ['link', { rel: 'stylesheet', sizes: '152x152', href: 'https://fonts.googleapis.com/css?family=Lato:400,900' }], - ['meta', { name: 'google-site-verification', content: 'kTnsDBpHqtTNY8oscYxrQeeiNml2d2z-03Ct9wqeCeE' }], - ], - themePlugins: { - activeHeaderLinks: false, - }, - theme: defaultTheme({ - docsBranch: "main", - logo: '/hero.png', - locales: { - '/': { - selectLanguageName: 'English', - navbar: [ - { text: 'Home', link: '/' }, - { text: 'Guide', link: '/guide/' }, - { text: 'Docs', link: '/docs/' }, - { text: 'Blog', link: 'https://medium.com/runatlantis' }, - ], - }, -/* - '/es/': { - selectLanguageName: 'Spanish', - navbar: [ - { text: 'Home', link: '/es/' }, - { text: 'Guide', link: '/es/guide/' }, - { text: 'Docs', link: '/es/docs/' }, - { text: 'Blog', link: 'https://medium.com/runatlantis' }, - ], - }, -*/ - }, - sidebar: { - '/guide/': [ - '', - 'test-drive', - 'testing-locally', - ], - '/docs/': [ - { - text: 'Installing Atlantis', - collapsible: true, - children: [ - 'installation-guide', - 'requirements', - 'access-credentials', - 'webhook-secrets', - 'deployment', - 'configuring-webhooks', - 'provider-credentials', - ] - }, - { - text: 'Configuring Atlantis', - collapsible: true, - children: [ - { - text: 'Overview', - link: 'configuring-atlantis', - }, - 'server-configuration', - 'server-side-repo-config', - 'pre-workflow-hooks', - 'post-workflow-hooks', - 'policy-checking', - 'custom-workflows', - 'repo-level-atlantis-yaml', - 'upgrading-atlantis-yaml', - 'command-requirements', - 'checkout-strategy', - 'terraform-versions', - 'terraform-cloud', - 'using-slack-hooks', - 'stats', - 'faq', - ] - }, - { - text: 'Using Atlantis', - collapsible: true, - children: [ - { - text: 'Overview', - link: 'using-atlantis', - }, - 'api-endpoints', - ] - }, - { - text: 'How Atlantis Works', - collapsible: true, - children: [ - { - text: 'Overview', - link: 'how-atlantis-works', - }, - 'locking', - 'autoplanning', - 'automerging', - 'security', - ] - }, - { - text: 'Real-time Terraform Logs', - collapsible: true, - children: [ - 'streaming-logs', - ] - }, - { - text: 'Troubleshooting', - collapsible: true, - children: [ - 'troubleshooting-https', - ] - } - ] - }, - repo: 'runatlantis/atlantis', - docsDir: 'runatlantis.io', - editLink: true, - }) -}) diff --git a/runatlantis.io/.vuepress/public/_redirects b/runatlantis.io/.vuepress/public/_redirects deleted file mode 100644 index a025dc528b..0000000000 --- a/runatlantis.io/.vuepress/public/_redirects +++ /dev/null @@ -1,2 +0,0 @@ -/guide/getting-started.html /guide/ -/docs/atlantis-yaml-reference.html /docs/repo-level-atlantis-yaml.html diff --git a/runatlantis.io/.vuepress/public/certificate.svg b/runatlantis.io/.vuepress/public/certificate.svg deleted file mode 100644 index 17df5278b1..0000000000 --- a/runatlantis.io/.vuepress/public/certificate.svg +++ /dev/null @@ -1,59 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/runatlantis.io/.vuepress/public/checkmark.svg b/runatlantis.io/.vuepress/public/checkmark.svg deleted file mode 100644 index ccdc2f7404..0000000000 --- a/runatlantis.io/.vuepress/public/checkmark.svg +++ /dev/null @@ -1,12 +0,0 @@ - - - - - - - - - diff --git a/runatlantis.io/.vuepress/public/coding.svg b/runatlantis.io/.vuepress/public/coding.svg deleted file mode 100644 index 1f67eec776..0000000000 --- a/runatlantis.io/.vuepress/public/coding.svg +++ /dev/null @@ -1,42 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/runatlantis.io/.vuepress/public/list.svg b/runatlantis.io/.vuepress/public/list.svg deleted file mode 100644 index 8c5735e658..0000000000 --- a/runatlantis.io/.vuepress/public/list.svg +++ /dev/null @@ -1,49 +0,0 @@ - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - - diff --git a/runatlantis.io/.vuepress/public/mobile-workflow-min.png b/runatlantis.io/.vuepress/public/mobile-workflow-min.png deleted file mode 100644 index b8eea33cc2..0000000000 Binary files a/runatlantis.io/.vuepress/public/mobile-workflow-min.png and /dev/null differ diff --git a/runatlantis.io/.vuepress/public/powerful.svg b/runatlantis.io/.vuepress/public/powerful.svg deleted file mode 100644 index e179434b71..0000000000 --- a/runatlantis.io/.vuepress/public/powerful.svg +++ /dev/null @@ -1,18 +0,0 @@ - - - - - - - - - - - - - - - - - diff --git a/runatlantis.io/.vuepress/public/workflow-min.png b/runatlantis.io/.vuepress/public/workflow-min.png deleted file mode 100644 index 1c9e383c70..0000000000 Binary files a/runatlantis.io/.vuepress/public/workflow-min.png and /dev/null differ diff --git a/runatlantis.io/.vuepress/styles/index.scss b/runatlantis.io/.vuepress/styles/index.scss deleted file mode 100644 index 6daf19f232..0000000000 --- a/runatlantis.io/.vuepress/styles/index.scss +++ /dev/null @@ -1,243 +0,0 @@ -// https://v2.vuepress.vuejs.org/reference/default-theme/styles.html#style-file - -// colors -$textColor: var(--c-text); -$borderColor: var(--c-border); -$buttonTextColor: var(--c-badge-danger-text); -$buttonColor: var(--c-brand); -$buttonHoverColor: var(--c-brand-light); -$darkBackground: var(--c-bg-light); -$darkBackgroundBorder: var(--c-border-dark); - -// layout -$navbarHeight: 3.6rem; -$sidebarWidth: 20rem; -$contentWidth: 740px; - -// responsive breakpoints -$MQNarrow: 959px; -$MQMobile: 719px; -$MQMobileNarrow: 419px; - -$homeWidth: 960px; - -.container { - padding-top: 3.6rem; -} - -.home { - padding: 0 2rem; - max-width: $homeWidth; - margin: 0px auto 80px; - .hero { - text-align: center; - img { - max-height: 280px; - display: block; - margin: 3rem auto 1.5rem; - } - h1 { - font-size: 3rem; - } - h1, .description, .action { - margin: 1.8rem auto; - } - .description { - max-width: 35rem; - font-family: Lato, sans-serif; - font-size: 1.9rem; - line-height: 1.3; - } - .action { - display: inline; - } - .action-button { - display: inline-block; - font-size: 1.2rem; - color: $buttonTextColor; - cursor: pointer; - background-color: $buttonColor; - padding: 0.8rem 1.6rem; - border-radius: 4px; - transition: background-color .1s ease; - box-sizing: border-box; - margin: 0 10px; - &:hover { - background-color: $buttonHoverColor; - } - } - } - h2 { - border-bottom: none; - } - .features { - border-top: 1px solid $borderColor; - padding: 1.2rem 0; - margin-top: 0; - } - .footer { - padding: 2.5rem; - border-top: 1px solid $borderColor; - text-align: center; - } -} - -.getting-started-footer { - padding: 2.5rem 0; - margin: 0 auto; -} - -.workflow-container { - border-top: 2px solid $borderColor; -} - -.workflow { - text-align: center; - margin: 80px auto; - max-width: $homeWidth; - img { - width: 100%; - } - .mobile { - display: none; - } -} - -.benefits-container { - border-top: 1px solid $darkBackgroundBorder; - .benefit-container { - border-bottom: 1px solid $darkBackgroundBorder; - .title { - padding-top: 40px; - text-align: center; - } - &.-dark { - background-color: $darkBackground; - } - .benefit { - max-width: $homeWidth; - margin: 0 auto; - display: flex; - flex-flow: row wrap; - align-items: center; - .item { - flex-basis: 50%; - flex-grow: 1; - min-width: 250px; - .image { - padding: 40px; - text-align: center; - img { - max-height: 200px; - } - } - } - .description { - padding: 40px; - h2 { - border: none; - } - ul { - list-style-type: none; - padding-left: 0; - } - li { - display: flex; - align-items: center; - line-height: 25px; - margin-bottom: 20px; - } - .checkmark { - width: 20px; - margin-right: 10px; - vertical-align: middle; - align-self: baseline; - padding-top: 5px; - } - } - } - } -} - -@media (max-width: $MQMobile) { - .workflow { - .mobile { - display: block; - } - .desktop { - display: none; - } - } - - .benefits-container { - .benefit-container { - .benefit { - flex-direction: column; - .item { - &.image { - order: -1; - } - } - } - } - } -} - -@media (max-width: $MQMobileNarrow) { - .home { - padding-left: 1.5rem; - padding-right: 1.5rem; - .hero { - img { - max-height: 210px; - margin: 2rem auto 1.2rem; - } - h1 { - font-size: 2rem; - } - h1, .description, .action { - margin: 1.2rem auto; - } - .description { - font-size: 1.2rem; - } - .action-button { - font-size: 1rem; - padding: 0.6rem 1.2rem; - } - } - } -} - -.theme-container { - &.home-custom { - .hero { - h1 { - font-size: 64px; - font-family: Lato, sans-serif; - font-weight: 900; - } - img { - height: 200px; - } - } - p { - &.description { - position: relative; - &:before { - position: absolute; - content: ''; - width: 40px; - height: 3px; - top: -19px; - left: 50%; - margin-left: -20px; - background: #f36; - } - } - } - } -} -.sidebar-heading { - font-size: inherit; -} diff --git a/runatlantis.io/.vuepress/styles/palette.scss b/runatlantis.io/.vuepress/styles/palette.scss deleted file mode 100644 index 7f406d4555..0000000000 --- a/runatlantis.io/.vuepress/styles/palette.scss +++ /dev/null @@ -1,4 +0,0 @@ -$accentColor: #0074db; -$textColor: #2c3e50; -$borderColor: #eaecef; -$codeBgColor: #282c34; diff --git a/runatlantis.io/.vuepress/theme/components/Home.vue b/runatlantis.io/.vuepress/theme/components/Home.vue deleted file mode 100644 index 271a574c85..0000000000 --- a/runatlantis.io/.vuepress/theme/components/Home.vue +++ /dev/null @@ -1,175 +0,0 @@ - - - diff --git a/runatlantis.io/.vuepress/theme/index.js b/runatlantis.io/.vuepress/theme/index.js deleted file mode 100644 index 85ad504429..0000000000 --- a/runatlantis.io/.vuepress/theme/index.js +++ /dev/null @@ -1,6 +0,0 @@ -// introduce custom home with navbar -// https://stackoverflow.com/a/60220684 -// https://vuepress.vuejs.org/theme/inheritance.html#usage -module.exports = { - extend: '@vuepress/theme-default' -} diff --git a/runatlantis.io/README.md b/runatlantis.io/README.md deleted file mode 100644 index 5772c9faf7..0000000000 --- a/runatlantis.io/README.md +++ /dev/null @@ -1,9 +0,0 @@ ---- -home: true -pageClass: home-custom -heroImage: /hero.png -heroText: Atlantis -actionText: Get Started → -actionLink: /guide/ -title: Terraform Pull Request Automation ---- diff --git a/runatlantis.io/blog.md b/runatlantis.io/blog.md new file mode 100644 index 0000000000..6ad783293c --- /dev/null +++ b/runatlantis.io/blog.md @@ -0,0 +1,28 @@ +--- +title: Welcome to Our Blog +aside: false +--- + +# Welcome to Our Blog + +We are thrilled to have you here! Our blog is a collection of insightful articles, tips, and updates from our team. Whether you're new or have been following us for a while, there's always something new to learn and explore. + +### Explore Our Popular Posts + +We have a rich history of blog posts dating back to 2017-2019. Here are some of our popular posts: + +- [4 Reasons To Try HashiCorp's (New) Free Terraform Remote State Storage](/blog/2019/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage) +- [I'm Joining HashiCorp!](/blog/2018/joining-hashicorp) +- [Putting The Dev Into DevOps: Why Your Developers Should Write Terraform Too](/blog/2018/putting-the-dev-into-devops-why-your-developers-should-write-terraform-too) +- [Atlantis 0.4.4 Now Supports Bitbucket](/blog/2018/atlantis-0-4-4-now-supports-bitbucket) +- [Terraform And The Dangers Of Applying Locally](/blog/2018/terraform-and-the-dangers-of-applying-locally) +- [Hosting Our Static Site over SSL with S3, ACM, CloudFront and Terraform](/blog/2018/hosting-our-static-site-over-ssl-with-s3-acm-cloudfront-and-terraform) +- [Introducing Atlantis](/blog/2017/introducing-atlantis) + +### Welcoming New Blog Authors + +We are excited to welcome new authors to our blog. Our diverse team brings a wealth of knowledge and experience to share with our readers. Stay tuned for fresh perspectives and in-depth articles on the latest trends and technologies. + +If you have any questions or topics you would like us to cover, feel free to reach out [on Slack](https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw). We are always looking to engage with our community and provide valuable content. + +Happy reading! diff --git a/runatlantis.io/blog/2017/introducing-atlantis.md b/runatlantis.io/blog/2017/introducing-atlantis.md new file mode 100644 index 0000000000..2957d994e7 --- /dev/null +++ b/runatlantis.io/blog/2017/introducing-atlantis.md @@ -0,0 +1,113 @@ +--- +title: Introducing Atlantis +lang: en-US +--- + +# Introducing Atlantis + +::: info +This post was originally written on September 11th, 2017 + +Original post: +::: + +We're very excited to announce the open source release of Atlantis! Atlantis is a tool for +collaborating on Terraform that's been in use at Hootsuite for over a year. The core +functionality of Atlantis enables developers and operators to run `terraform plan` and +`apply` directly from Terraform pull requests. Atlantis then comments back on the pull +request with the output of the commands: + +![](/blog/intro/intro1.gif) + +This is a simple feature, however it has had a massive effect on how our team writes Terraform. +By bringing a Terraform workflow to pull requests, Atlantis helped our Ops team collaborate +better on Terraform and also enabled our entire development team to write and execute Terraform safely. + +Atlantis was built to solve two problems that arose at Hootsuite as we adopted Terraform: + +### 1. Effective Collaboration + +What's the best way to collaborate on Terraform in a team setting? + +### 2. Developers Writing Terraform + +How can we enable our developers to write and apply Terraform safely? + +## Effective Collaboration + +When writing Terraform, there are a number of workflows you can follow. The simplest workflow is just using `master`: + +![](/blog/intro/intro2.webp) + +In this workflow, you work on `master` and run `terraform` locally. +The problem with this workflow is that there is no collaboration or code review. +So we start to use pull requests: + +![](/blog/intro/intro3.webp) + +We still run `terraform plan` locally, but once we're satisfied with the changes we create a pull request for review. When the pull request is approved, we run `apply` locally. + +This workflow is an improvement, but there are still problems. The first problem is that it's hard to review just the diff on the pull request. To properly review a change, you really need to see the output from `terraform plan`. + +![](/blog/intro/intro4.webp) + +What looks like a small change... + +![](/blog/intro/intro5.webp) + +...can have a big plan + +The second problem is that now it's easy for `master` to get out of sync with what's actually been applied. This can happen if you merge a pull request without running `apply` or if the `apply` has an error halfway through, you forget to fix it and then you merge to `master`. Now what's in `master` isn't actually what's running on production. At best, this causes confusion the next time someone runs `terraform plan`. At worst, it causes an outage when someone assumes that what's in `master` is actually running, and depends on it. + +With the Atlantis workflow, these problems are solved: + +![](/blog/intro/intro6.webp) + +Now it's easy to review changes because you see the `terraform plan` output on the pull request. + +![](/blog/intro/intro7.webp) + +Pull requests are easy to review since you can see the plan + +It's also easy to ensure that the pull request is `terraform apply`'d before merging to master because you can see the actual `apply` output on the pull request. + +![](/blog/intro/intro8.webp) + +So, Atlantis makes working on Terraform within an operations team much easier, but how does it help with getting your whole team to write Terraform? + +## Developers Writing Terraform + +Terraform usually starts out being used by the Ops team. As a result of using Terraform, the Ops team becomes much faster at making infrastructure changes, but the way developers request those changes remains the same: they use a ticketing system or chat to ask operations for help, the request goes into a queue and later Ops responds that the task is complete. + +Soon however, the Ops team starts to realize that it's possible for developers to make some of these Terraform changes themselves! There are some problems that arise though: + +- Developers don't have the credentials to actually run Terraform commands +- If you give them credentials, it's hard to review what is actually being applied + +With Atlantis, these problems are solved. All `terraform plan` and `apply` commands are run from the pull request. This means developers don't need to have any credentials to run Terraform locally. Of course, this can be dangerous: how can you ensure developers (who might be new to Terraform) aren't applying things they shouldn't? The answer is code reviews and approvals. + +Since Atlantis comments back with the `plan` output directly on the pull request, it's easy for an operations engineer to review exactly what changes will be applied. And Atlantis can run in `require-approval` mode, that will require a GitHub pull request approval before allowing `apply` to be run: + +![](/blog/intro/intro9.webp) + +With Atlantis, developers are able to write and apply Terraform safely. They submit pull requests, can run `atlantis plan` until their change looks good and then get approval from Ops to `apply`. + +Since the introduction of Atlantis at Hootsuite, we've had **78** contributors to our Terraform repositories, **58** of whom are developers (**75%**). + +## Where we are now + +Since the introduction of Atlantis at Hootsuite we've grown to 144 Terraform repositories [^1] that manage thousands of Amazon resources. Atlantis is used for every single Terraform change throughout our organization. + +## Getting started with Atlantis + +If you'd like to try out Atlantis for your team you can download the latest release from . If you run `atlantis testdrive` you can get started in less than 5 minutes. To read more about Atlantis go to . + +Check out our video for more information: + + + +[^1]: We split our Terraform up into multiple states, each with its own repository (see [1], [2], [3]). + +[1]: https://blog.gruntwork.io/how-to-manage-terraform-state-28f5697e68fa +[2]: https://charity.wtf/2016/03/30/terraform-vpc-and-why-you-want-a-tfstate-file-per-env/ +[3]: https://www.nclouds.com/blog/terraform-multi-state-management/ diff --git a/runatlantis.io/blog/2018/atlantis-0-4-4-now-supports-bitbucket.md b/runatlantis.io/blog/2018/atlantis-0-4-4-now-supports-bitbucket.md new file mode 100644 index 0000000000..ce6e39f0cb --- /dev/null +++ b/runatlantis.io/blog/2018/atlantis-0-4-4-now-supports-bitbucket.md @@ -0,0 +1,102 @@ +--- +title: Atlantis 0.4.4 Now Supports Bitbucket +lang: en-US +--- + +# Atlantis 0.4.4 Now Supports Bitbucket + +::: info +This post was originally written on July 25th, 2018 + +Original post: +::: + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic1.webp) + +Atlantis is an [open source](https://github.com/runatlantis/atlantis) platform for using Terraform in teams. I'm happy to announce that the [latest release](https://github.com/runatlantis/atlantis/releases) of Atlantis (0.4.4) now supports both Bitbucket Cloud (bitbucket.org) **and** Bitbucket Server (aka Stash). + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic2.gif) + +Atlantis now supports the three major Git hosts: GitHub, GitLab and Bitbucket. The rest of this post will talk about how to use Atlantis with Bitbucket. + +## What is Atlantis? + +Atlantis is a self-hosted application that listens for Terraform pull request events via webhooks. It runs `terraform plan` and `apply` remotely and comments back on the pull request with the output. + +With Atlantis, you collaborate on the Terraform pull request itself instead of running `terraform apply` from your own computers which can be dangerous: + +Check out for more information. + +## Getting Started + +The easiest way to try out Atlantis with Bitbucket is to run Atlantis locally on your own computer. Eventually you'll want to deploy it as a standalone app but this is the easiest way to try it out. Follow [these instructions](https://www.runatlantis.io/guide/getting-started.html) to get Atlantis running locally. + +Create a Pull Request +If you've got the Atlantis webhook configured for your repository and Atlantis is running, it's time to create a new pull request. I recommend adding a `null_resource` to one of your Terraform files for the the test pull request. It won't actually create anything so it's safe to use as a test. + +Using the web editor, open up one of your Terraform files and add: + +```tf +resource "null_resource" "example" {} +``` + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic3.webp) + +Click Commit and select **Create a pull request for this change**. + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic4.webp) + +Wait a few seconds and then refresh. Atlantis should have automatically run `terraform plan` and commented back on the pull request: + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic5.webp) + +Now it's easier for your colleagues to review the pull request because they can see the `terraform plan` output. + +### Terraform Apply + +Since all we're doing is adding a null resource, I think it's safe to run `terraform apply`. To do so, I add a comment to the pull request: `atlantis apply`: + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic6.webp) + +Atlantis is listening for pull request comments and will run `terraform apply` remotely and comment back with the output: + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic7.webp) + +### Pull Request Approvals + +If you don't want anyone to be able to `terraform apply`, you can run Atlantis with `--require-approval` or add that setting to your [atlantis.yaml file](https://www.runatlantis.io/docs/command-requirements.html#approved). + +This will ensure that the pull request has been approved before someone can run `apply`. + +## Other Features + +### Customizable Commands + +Apart from being able to `plan` and `apply` from the pull request, Atlantis also enables you to customize the exact commands that are run via an `atlantis.yaml` config file. For example to use the `-var-file` flag: + +```yaml{14} +# atlantis.yaml +version: 2 +projects: +- name: staging + dir: "." + workflow: staging + +workflows: + staging: + plan: + steps: + - init + - plan: + extra_args: ["-var-file", "staging.tfvars"] +``` + +### Locking For Coordination + +![](/blog/atlantis-0-4-4-now-supports-bitbucket/pic8.webp) + +Atlantis will prevent other pull requests from running against the same directory as an open pull request so that each plan is applied atomically. Once the first pull request is merged, other pull requests are unlocked. + +## Next Steps + +If you're interested in using Atlantis with Bitbucket, check out our Getting Started docs. Happy Terraforming! diff --git a/runatlantis.io/blog/2018/hosting-our-static-site-over-ssl-with-s3-acm-cloudfront-and-terraform.md b/runatlantis.io/blog/2018/hosting-our-static-site-over-ssl-with-s3-acm-cloudfront-and-terraform.md new file mode 100644 index 0000000000..a9506073f3 --- /dev/null +++ b/runatlantis.io/blog/2018/hosting-our-static-site-over-ssl-with-s3-acm-cloudfront-and-terraform.md @@ -0,0 +1,174 @@ +--- +title: Hosting Our Static Site over SSL with S3, ACM, CloudFront and Terraform +lang: en-US +--- + +# Hosting Our Static Site over SSL with S3, ACM, CloudFront and Terraform + +::: info +This post was originally written on March 4, 2018 + +Original post: +::: + +In this post I cover how I hosted using + +- S3 — for storing the static site +- CloudFront — for serving the static site over SSL +- AWS Certificate Manager — for generating the SSL certificates +- Route53 — for routing the domain name to the correct location + +I chose Terraform in this case because Atlantis is a tool for automating and collaborating on Terraform in a team (see github.com/runatlantis/atlantis)–and so obviously it made sense to host our homepage using Terraform–but also because it's now much easier to manage. I don't have to go into the AWS console and click around to find what settings I want to change. Instead I can just look at ~100 lines of code, make a change, and run `terraform apply`. + +::: info +NOTE: 4 months after this writing, I moved the site to [Netlify](https://www.netlify.com/) because it automatically builds from my master branch on any change, updates faster since I don't need to wait for the Cloudfront cache to expire and gives me [deploy previews](https://www.netlify.com/blog/2016/07/20/introducing-deploy-previews-in-netlify/) of changes. The DNS records are still hosted on AWS. +::: + +# Overview + +There's a surprising number of components required to get all this working so I'm going to start with an overview of what they're all needed for. Here's what the final architecture looks like: + +![](/blog/hosting-our-static-site/pic1.webp) + +That's what the final product looks like, but lets start with the steps required to get there. + +## Step 1 — Generate The Site + +The first step is to have a site generated. Our site uses [Hugo](https://gohugo.io/), a Golang site generator. Once it's set up, you just need to run `hugo` and it will generate a directory with HTML and all your content ready to host. + +## Step 2 — Host The Content + +Once you've got a website, you need it to be accessible on the internet. I used S3 for this because it's dirt cheap and it integrates well with all the other necessary components. I simply upload my website folder to the S3 bucket. + +## Step 3 — Generate an SSL Certificate + +I needed to generate an SSL certificate for . I used the AWS Certificate Manager for this because it's free and is easily integrated with the rest of the system. + +## Step 4 — Set up DNS + +Because I'm going to host the site on AWS services, I need requests to to be routed to those services. Route53 is the obvious solution. + +## Step 5 — Host with CloudFront + +At this point, we've generated an SSL certificate for and our website is available on the internet via its S3 url so can't we just CNAME to the S3 bucket and call it a day? Unfortunately not. + +Since we generated our own certificate, we would need S3 to sign its responses using our certificiate. S3 doesn't support this and thus we need CloudFront. CloudFront supports using our own SSL cert and will just pull its data from the S3 bucket. + +# Terraform Time + +Now that we know what our architecture should look like, it's simply a matter of writing the Terraform. + +## Initial Setup + +Create a new file `main.tf`: + +<<< @/public/blog/hosting-our-static-site/code/main.tf + +## S3 Bucket + +Assuming we've generated our site content already, we need to create an S3 bucket to host the content. + +<<< @/public/blog/hosting-our-static-site/code/s3-bucket.tf + +We should be able to run Terraform now to create the S3 bucket + +```sh +terraform init +`terraform apply` +``` + +![](/blog/hosting-our-static-site/pic2.webp) + +Now we want to upload our content to the S3 bucket: + +```sh +$ cd dir/with/website +# generate the HTML +$ hugo -d generated +$ cd generated +# send it to our S3 bucket +$ aws s3 sync . s3://www.runatlantis.io/ # change this to your bucket +``` + +Now we need the S3 url to see our content: + +```sh +$ terraform state show aws_s3_bucket.www | grep website_endpoint +website_endpoint = www.runatlantis.io.s3-website-us-east-1.amazonaws.com +``` + +You should see your site hosted at that url! + +## SSL Certificate + +Let's use the AWS Certificate Manager to create our SSL certificate. + +<<< @/public/blog/hosting-our-static-site/code/ssl-cert.tf + +Before you run `terraform apply`, ensure you're forwarding any of + +- `administrator@your_domain_name` +- `hostmaster@your_domain_name` +- `postmaster@your_domain_name` +- `webmaster@your_domain_name` +- `admin@your_domain_name` + +To an email address you can access. Then, run `terraform apply` and you should get an email from AWS to confirm you own this domain where you'll need to click on the link. + +## CloudFront + +Now we're ready for CloudFront to host our website using the S3 bucket for the content and using our SSL certificate. Warning! There's a lot of code ahead but most of it is just defaults. + +<<< @/public/blog/hosting-our-static-site/code/cloudfront.tf + +Apply the changes with `terraform apply` and then find the domain name that CloudFront gives us: + +```sh +$ terraform state show aws_cloudfront_distribution.www_distribution | grep ^domain_name +domain_name = d1l8j8yicxhafq.cloudfront.net +``` + +You'll probably get an error if you go to that URL right away. You need to wait a couple minutes for CloudFront to set itself up. It took me 10 minutes. You can view its progress in the console: + +## DNS + +We're almost done! We've got CloudFront hosting our site, now we need to point our DNS at it. + +<<< @/public/blog/hosting-our-static-site/code/dns.tf + +If you bought your domain from somewhere else like Namecheap, you'll need to point your DNS at the nameservers listed in the state for the Route53 zone you created. First `terraform apply` (which may take a while), then find out your nameservers. + +```sh +$ terraform state show aws_route53_zone.zone +id = Z2FNAJGFW912JG +comment = Managed by Terraform +force_destroy = false +name = runatlantis.io +name_servers.# = 4 +name_servers.0 = ns-1349.awsdns-40.org +name_servers.1 = ns-1604.awsdns-08.co.uk +name_servers.2 = ns-412.awsdns-51.com +name_servers.3 = ns-938.awsdns-53.net +tags.% = 0 +zone_id = Z2FNAJGFW912JG +``` + +Then look at your domain's docs for how to change your nameservers to all 4 listed. + +## That's it...? + +Once the DNS propagates you should see your site at `https://www.yourdomain`! But what about `https://yourdomain`? i.e. without the `www.`? Shouldn't this redirect to `https://www.yourdomain`? + +## Root Domain + +It turns out, we need to create a whole new S3 bucket, CloudFront distribution and Route53 record just to get this to happen. That's because although S3 can serve up a redirect to the www version of your site, it can't host SSL certs and so you need CloudFront. I've included all the terraform necessary for that below. + +Congrats! You're done! + + + +If you're using Terraform in a team, check out Atlantis: for automation and collaboration to make your team happier! + +Here's the Terraform needed to redirect your root domain: + +<<< @/public/blog/hosting-our-static-site/code/full.tf diff --git a/runatlantis.io/blog/2018/joining-hashicorp.md b/runatlantis.io/blog/2018/joining-hashicorp.md new file mode 100644 index 0000000000..69ab04cda8 --- /dev/null +++ b/runatlantis.io/blog/2018/joining-hashicorp.md @@ -0,0 +1,50 @@ +--- +title: I'm Joining HashiCorp! +lang: en-US +--- + +# I'm Joining HashiCorp + +::: info +This post was originally written on October 23th, 2018 + +Original post: +::: + +Dear Atlantis Community, + +My name is Luke and I'm the maintainer of [Atlantis](https://www.runatlantis.io/), an open source tool for Terraform collaboration. Today I'm excited to announce that I'm joining HashiCorp! + +![](/blog/joining-hashicorp/pic1.webp) + +## What Does This Mean For Atlantis? + +In the near term, nothing will change for Atlantis and its users. As a HashiCorp employee I will continue to maintain Atlantis, review pull requests, triage issues, and write code. + +In the long term, HashiCorp and I want to address collaboration workflows for all users of Terraform. We are still working out the details of how Atlantis will fit into the longer term plan, but whatever direction we take, we're committed to keeping Atlantis free and open source. + +## HashiCorp and Atlantis + +Why does HashiCorp want to support Atlantis? + +Today HashiCorp [announced their commitment to provide collaboration solutions to the whole Terraform community](https://www.hashicorp.com/blog/terraform-collaboration-for-everyone). They see the Atlantis project as one manifestation of this vision and understand its importance to many in the Terraform community. They believe that by working together, we can create a solution that will scale from a single user to hundreds of collaborators in a large organization. + +## Why am I joining? + +Those of you who know me, may wonder why I made this decision. It came down to wanting to continue working on Atlantis–and the larger story of Terraform collaboration–and finding a way to support myself. + +In January, 9 months ago, I quit my job at Hootsuite to work **full time** on Atlantis (Atlantis was originally created at Hootsuite by my friend [Anubhav Mishra](https://twitter.com/anubhavm)). I left because I knew that the Terraform community was in need of a solution for collaboration and that with full time development, Atlantis could be that solution. + +During the last 9 months, Atlantis matured into a fully fledged collaboration solution and gained many new users. It has been an amazing time, but I've been working for free! I've always known that for Atlantis to be successful in the long term, I would need to find a way to support myself. + +A couple of weeks ago, as I was playing around with Atlantis monetization strategies, HashiCorp contacted me. I learned that they shared a vision of building Terraform collaboration solutions for the broader community and that they were interested in combining forces. They also assured me that they wanted to do right by the Atlantis community. + +This was a compelling offer versus solo-founding a company around Atlantis: I would be able to focus on coding and product instead of business and sales and I could spend all of my time on Atlantis and the larger story of Terraform collaboration. As a result, I came to the conclusion that joining HashiCorp was the right decision for me and the community. + +## Conclusion + +Atlantis has been a passion of mine for almost two years now. I deeply care about the future of the project and its community and I know that this move will ensure that that future is bright. + +There are probably some questions I haven't answered in this post so please don't hesitate to reach out, either via [Twitter](https://twitter.com/lkysow) or on the [Atlantis Slack](https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw). + +I'm excited for the future of Atlantis and Terraform collaboration and I hope you are too. diff --git a/runatlantis.io/blog/2018/putting-the-dev-into-devops-why-your-developers-should-write-terraform-too.md b/runatlantis.io/blog/2018/putting-the-dev-into-devops-why-your-developers-should-write-terraform-too.md new file mode 100644 index 0000000000..88b28a1b16 --- /dev/null +++ b/runatlantis.io/blog/2018/putting-the-dev-into-devops-why-your-developers-should-write-terraform-too.md @@ -0,0 +1,244 @@ +--- +title: "Putting The Dev Into DevOps: Why Your Developers Should Write Terraform Too" +lang: en-US +--- + +# Putting The Dev Into DevOps: Why Your Developers Should Write Terraform Too + +::: info +This post was originally written on August 29th, 2018 + +Original post: +::: + +[Terraform](https://www.terraform.io/) is an amazing tool for provisioning infrastructure. Terraform enables your operators to perform their work faster and more reliably. + +**But if only your ops team is writing Terraform, you're missing out.** + +Terraform is not just a tool that makes ops teams more effective. Adopting Terraform is an opportunity to turn all of your developers into operators (at least for smaller tasks). This can make your entire engineering team more effective and create a better relationship between developers and operators. + +### Quick Aside — What is Terraform? + +Terraform is two things. It's a language for describing infrastructure: + +```tf +resource "aws_instance" "example" { + ami = "ami-2757f631" + instance_type = "t2.micro" +} +``` + +And it's a CLI tool that reads Terraform code and makes API calls to AWS (or any other cloud provider) to provision that infrastructure. + +In this example, we're using the CLI to run `terraform apply` which will create an EC2 instance: + +```sh +$ terraform apply + +Terraform will perform the following actions: + + # aws_instance.example + + aws_instance.example + ami: "ami-2757f631" + instance_type: "t2.micro" + ... + +Plan: 1 to add, 0 to change, 0 to destroy. + +Do you want to perform these actions? + Terraform will perform the actions described above. + Only 'yes' will be accepted to approve. + + Enter a value: yes + +aws_instance.example: Creating... + ami: "" => "ami-2757f631" + instance_type: "" => "t2.micro" + ... + +aws_instance.example: Still creating... (10s elapsed) +aws_instance.example: Creation complete + +Apply complete! Resources: 1 added, 0 changed, 0 destroyed. +``` + +## Terraform Adoption From A Dev's Perspective + +Adopting Terraform is great for your operations team's effectiveness but it doesn't change much for devs. Before Terraform adoption, devs typically interacted with an ops team like this: + +![](/blog/putting-the-dev-into-devops/pic1.webp) + +1. **Dev: Creates ticket asking for some ops work** +2. **Dev: Waits** +3. _Ops: Looks at ticket when in queue_ +4. _Ops: Does work_ +5. _Ops: Updates ticket_ +6. **Dev: Continues their work** + +After the Ops team adopts Terraform, the workflow from a dev's perspective is the same! + +![](/blog/putting-the-dev-into-devops/pic2.webp) + +1. **Dev: Creates ticket asking for some ops work** +2. **Dev: Waits** +3. _Ops: Looks at ticket when in queue_ +4. _Ops: Does work. This time using Terraform (TF)_ +5. _Ops: Updates ticket_ +6. **Dev: Continues their work** + +With Terraform, there's less of Step 2 (Dev: Waits) but apart from that, not much has changed. + +> If only ops is writing Terraform, your developers' experience is the same. + +## Devs Want To Help + +Developers would love to help out with operations work. They know that for small changes they should be able to do the work themselves (with a review from ops). For example: + +- Adding a new security group rule +- Increasing the size of an autoscaling group +- Using a larger instance because their app needs more memory + +Developers could make all of these changes because they're small and well defined. Also, previous examples of doing the same thing can guide them. + +## ...But Often They're Not Allowed + +In many organizations, devs are locked out of the cloud console. + +![](/blog/putting-the-dev-into-devops/pic3.webp) + +They might be locked out for good reasons: + +- Security — You can do a lot of damage with full access to a cloud console +- Compliance — Maybe your compliance requires only certain groups to have access +- Cost — Devs might spin up some expensive resources and then forget about them + +Even if they have access, operations can be complicated: + +- It's often difficult to do seemingly simple things (think adding a security group rule that also requires peering VPCs). This means that just having access sometimes isn't enough. Devs might need help from an expert to get things done. + +## Enter Terraform + +With Terraform, everything changes. Or at least it can. + +Now Devs can see in code how infrastructure is built. They can see the exact spot where security group rules are configured: + +```tf +resource "aws_security_group_rule" "allow_all" { + type = "ingress" + from_port = 0 + to_port = 65535 + protocol = "tcp" + cidr_blocks = ["0.0.0.0/0"] + security_group_id = "sg-123456" +} + +resource "aws_security_group_rule" "allow_office" { + ... +} +``` + +Or where the size of the autoscaling group is set: + +```tf +resource "aws_autoscaling_group" "asg" { + name = "my-asg" + max_size = 5 + desired_capacity = 4 + min_size = 2 + ... +} +``` + +Devs understand code (surprise!) so it's a lot easier for them to make those small changes. + +Here's the new workflow: + +![](/blog/putting-the-dev-into-devops/pic4.webp) + +1. **Dev: Writes Terraform code** +2. **Dev: Creates pull request** +3. _Ops: Reviews pull request_ +4. **Dev: Applies the change with Terraform (TF)** +5. **Dev: Continues their work** + +Now: + +- Devs are making small changes themselves. This saves time and increases the speed of the whole engineering organization. +- Devs can see exactly what is required to make the change. This means there's less back and forth over a ticket: “Okay so I know you need the security group opened between server A and B, but on which ports and with which protocol?” +- Devs start to see how infrastructure is built. This increases cooperation between dev and ops because they can understand each other's work. + +Great! But there's another problem. + +## Devs Are Locked Out Of Terraform Too + +In order to execute Terraform you need to have cloud credentials! It's really hard to write Terraform without being able to run `terraform init` and `terraform plan`, for the same reason it would be hard to write code if you could never run it locally! + +So are we back at square one? + +## Enter Atlantis + +[Atlantis](https://www.runatlantis.io/) is an [open source](https://github.com/runatlantis/atlantis) tool for running Terraform from pull requests. With Atlantis, Terraform is run on a separate server (Atlantis is self-hosted) so you don't need to give out credentials to everyone. Access is controlled through pull request approvals. + +Here's what the workflow looks like: + +### Step 1 — Create a Pull Request + +A developer creates a pull request with their change to add a security group rule. + +![](/blog/putting-the-dev-into-devops/pic5.webp) + +### Step 2 — Atlantis Runs Terraform Plan + +Atlantis automatically runs `terraform plan` and comments back on the pull request with the output. Now developers can fix their Terraform errors before asking for a review. + +![](/blog/putting-the-dev-into-devops/pic6.webp) + +### Step 3 — Fix The Terraform + +The developer pushes a new commit that fixes their error and Atlantis comments back with the valid `terraform plan` output. Now the developer can verify that the plan output looks good. + +![](/blog/putting-the-dev-into-devops/pic7.webp) + +### Step 4 — Get Approval + +You'll probably want to run Atlantis with the --require-approval flag that requires pull requests to be Approved before running atlantis apply. + +![](/blog/putting-the-dev-into-devops/pic8.webp) + +### Step 4a — Actually Get Approval + +An operator can now come along and review the changes and the output of `terraform plan`. This is much faster than doing the change themselves. + +![](/blog/putting-the-dev-into-devops/pic9.webp) + +### Step 5 — Apply + +To apply the changes, the developer or operator comments “atlantis apply”. + +![](/blog/putting-the-dev-into-devops/pic10.webp) + +## Success + +Now we've got a workflow that makes everyone happy: + +- Devs can write Terraform and iterate on the pull request until the `terraform plan` looks good +- Operators can review pull requests and approve the changes before they're applied + +Now developers can make small operations changes and learn more about how infrastructure is built. Everyone can work more effectively and with a shared understanding that enhances collaboration. + +## Does It Work In Practice? + +Atlantis has been used by my previous company, Hootsuite, for over 2 years. It's used daily by 20 operators but it's also used occasionally by over 60 developers! +Another company uses Atlantis to manage 600+ Terraform repos collaborated on by over 300 developers and operators. + +## Next Steps + +- If you'd like to learn more about Terraform, check out HashiCorp's [Introduction to Terraform](https://developer.hashicorp.com/terraform/intro) +- If you'd like to try out Atlantis, go to +- If you have any questions, reach out to me on Twitter ([at]lkysow) or in the comments below. + +## Credits + +- Thanks to [Seth Vargo](https://medium.com/@sethvargo) for his talk [Version-Controlled Infrastructure with GitHub](https://www.youtube.com/watch?v=2TWqi7dLSro) that inspired a lot of this post. +- Thanks to Isha for reading drafts of this post. +- Icons in graphics from made by [Freepik](http://freepik.com/) from [Flaticon](https://www.flaticon.com/) and licensed by [CC 3.0](https://creativecommons.org/licenses/by/3.0/) diff --git a/runatlantis.io/blog/2018/terraform-and-the-dangers-of-applying-locally.md b/runatlantis.io/blog/2018/terraform-and-the-dangers-of-applying-locally.md new file mode 100644 index 0000000000..081d2f1154 --- /dev/null +++ b/runatlantis.io/blog/2018/terraform-and-the-dangers-of-applying-locally.md @@ -0,0 +1,120 @@ +--- +title: Terraform And The Dangers Of Applying Locally +lang: en-US +--- + +# Terraform And The Dangers Of Applying Locally + +::: info +This post was originally written on July 13th, 2018 + +Original post: +::: + +If you're using Terraform then at some point you've likely ran a `terraform apply` that reverted someone else's change! + +Here's how that tends to happen: + +## The Setup + +Say we have two developers: Alice and Bob. Alice needs to add a new security group rule. She checks out a new branch, adds her rule and creates a pull request: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic1.webp) + +When she runs `terraform plan` locally she sees what she expects. + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic2.webp) + +Meanwhile, Bob is working on an emergency fix. He checks out a new branch and adds a different security group rule called `emergency`: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic3.webp) + +And, because it's an emergency, he **immediately runs apply**: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic4.webp) + +Now back to Alice. She's just gotten approval on her pull request change and so she runs `terraform apply`: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic5.webp) + +Did you catch what happened? Did you notice that the `apply` deleted Bob's rule? + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic6.webp) + +In this example, it wasn't too hard to see. However if the plan is much longer, or if the change is less obvious then it can be easy to miss. + +## Possible Solutions + +There are some ways to avoid this: + +### Use terraform plan `-out` + +If Alice had run `terraform plan -out plan.tfplan` then when she ran `terraform apply plan.tfplan` she would see: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic7.webp) + +The problem with this solution is that few people run `terraform plan` anymore, much less `terraform plan -out`! + + + +It's easier to just run `terraform apply` and humans will take the easier path most of the time. + +### Wrap `terraform apply` to ensure up to date with `master` + +Another possible solution is to write a wrapper script that ensures our branch is up to date with `master`. But this doesn't solve the problem of Bob running `apply` locally and not yet merging to `master`. In this case, Alice's branch would have been up to date with `master` but not the latest apply'd state. + +### Be more disciplined + +What if everyone: + +- ALWAYS created a branch, got a pull request review, merged to `master` and then ran apply. And also everyone +- ALWAYS checked to ensure their branch was rebased from `master`. And also everyone +- ALWAYS carefully inspected the `terraform plan` output and made sure it was exactly what they expected + +...then we wouldn't have a problem! + +Unfortunately this is not a real solution. We're all human and we're all going to make mistakes. Relying on people to follow a complicated process 100% of the time is not a solution because it doesn't work. + +## Core Problem + +The core problem is that everyone is applying from their own workstations and it's up to them to ensure that they're up to date and that they keep `master` up to date. This is like developers deploying to production from their laptops. + +### What if, instead of applying locally, a remote system did the apply's? + +This is why we built [Atlantis](https://www.runatlantis.io/) – an open source project for Terraform automation by pull request. You could also accomplished this with your own CI system or with [Terraform Enterprise](https://www.hashicorp.com/products/terraform). Here's how Atlantis solves this issue: + +When Alice makes her change, she creates a pull request and Atlantis automatically runs `terraform plan` and comments on the pull request. + +When Bob makes his change, he creates a pull request and Atlantis automatically runs `terraform plan` and comments on the pull request. + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic8.webp) + +Atlantis also **locks the directory** to ensure that no one else can run `plan` or `apply` until Alice's plan has been intentionally deleted or she merges the pull request. + +If Bob creates a pull request for his emergency change he'd see this error: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic9.webp) + +Alice can then comment `atlantis apply` and Atlantis will run the apply itself: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic10.webp) + +Finally, she merges the pull request and unlocks Bob's branch: + +![](/blog/terraform-and-the-dangers-of-applying-locally/pic11.webp) + +### But what if Bob ran `apply` locally? + +In that case, Alice is still okay because when Atlantis ran `terraform plan` it used `-out`. If Alice tries to apply that plan, Terraform will give an error because the plan was generated against an old state. + +### Why does Atlantis run `apply` on the branch and not after a merge to `master`? + +We do this because `terraform apply` fails quite often, despite `terraform plan` succeeding. Usually it's because of a dependency issue between resources or because the cloud provider requires a certain format or a certain field to be set. Regardless, in practice we've found that `apply` fails a lot. + +By locking the directory, we're essentially ensuring that the branch being `apply`'d is `"master"` since no one else can modify that state. We then get the benefit of being able to iterate on the pull request and push small fixes until we're sure that the changeset is `apply`'d. If `apply` failed after merging to `master`, we'd have to open new pull requests over and over again. There is definitely a tradeoff here, however we believe it's the right tradeoff. + +## Conclusion + +In conclusion, running `terraform apply` when you're working with a team of operators can be dangerous. Look to solutions like your own CI, Atlantis or Terraform Enterprise to ensure you're always working off the latest code that was `apply`'d. + +If you'd like to try Atlantis, you can get started here: diff --git a/runatlantis.io/blog/2019/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage.md b/runatlantis.io/blog/2019/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage.md new file mode 100644 index 0000000000..07bfd8ccf9 --- /dev/null +++ b/runatlantis.io/blog/2019/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage.md @@ -0,0 +1,157 @@ +--- +title: 4 Reasons To Try HashiCorp's (New) Free Terraform Remote State Storage +lang: en-US +--- + +# 4 Reasons To Try HashiCorp's (New) Free Terraform Remote State Storage + +::: info +This post was originally written on April 2nd, 2019 + +Original post: +::: + +Update (May 20/19) — Free State Storage is now called Terraform Cloud and is out of Beta, meaning anyone can sign up! + +HashiCorp is planning to offer free Terraform Remote State Storage and they have a beta version available now. In this article, I talk about 4 reasons you should try it (Disclosure: I work at HashiCorp). + +> _Sign up for Terraform Cloud [here](https://goo.gl/X5t5EM)._ + +## What is Terraform State? + +Before I get into why you should use the new remote state storage, let's talk about what exactly we mean by state in Terraform. + +Terraform uses _state_ to map your Terraform code to the real-world resources that it provisions. For example, if I have Terraform code to create an AWS EC2 instance: + +```tf +resource "aws_instance" "web" { + ami = "ami-e6d9d68c" + instance_type = "t2.micro" +} +``` + +When I run `terraform apply`, Terraform will make a “create EC2 instance” API call to AWS and AWS will return the unique ID of that instance (ex. `i-0ad17607e5ee026d0`). Terraform needs to record that ID somewhere so that later, it can make API calls to change or delete the instance. + +To store this information, Terraform uses a state file. For the above code, the state file will look something like: + +```json{4,7} +{ + ... + "resources": { + "aws_instance.web": { + "type": "aws_instance", + "primary": { + "id": "i-0ad17607e5ee026d0", + ... +} +``` + +Here you can see that the resource `aws_instance.web` from our Terraform code is mapped to the instance ID `i-0ad17607e5ee026d0`. + +So if Terraform state is just a file, then what is remote state? + +## Remote State + +By default, Terraform writes its state file to your local filesystem. This is okay for personal projects, but once you start working with a team, things get messy. In a team, you need to make sure everyone has an up to date version of the state file **and** ensure that two people aren't making concurrent changes. + +Enter remote state! Remote state is just storing the state file remotely, rather than on your filesystem. With remote state, there's only one copy so Terraform can ensure you're always up to date. To prevent team members from modifying state at the same time, Terraform can lock the remote state. + +> Remote state is just storing the state file remotely, rather than on your filesystem. + +Alright, so remote state is great, but unfortunately setting it up can be a bit tricky. In AWS, you can store it in an S3 bucket, but you need to create the bucket, configure it properly, set up its permissions properly, create a DynamoDB table for locking and then ensure everyone has proper credentials to write to it. It's much the same story in the other clouds. + +As a result, setting up remote state can be an annoying stumbling block as teams adopt Terraform. + +This brings us to the first reason to try HashiCorp's Free Remote State Storage... + +## Reason #1 — Easy To Set Up + +Unlike other remote state solutions that require complicated setup to get right, setting up free remote state storage is easy. + +> Setting up HashiCorp's free remote state storage is easy + +Step 1 — Sign up for your [free Terraform Cloud](https://app.terraform.io/signup) account + +Step 2 — When you log in, you'll land on this page where you'll create your organization: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic1.webp) + +Step 3 — Next, go into User Settings and generate a token: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic2.webp) + +Step 4 — Take this token and create a local ~/.terraformrc file: + +```tf +credentials "app.terraform.io" { + token = "mhVn15hHLylFvQ.atlasv1.jAH..." +} +``` + +Step 5 — That's it! Now you're ready to store your state. + +In your Terraform project, add a `terraform` block: + +```tf{3,5} +terraform { + backend "remote" { + organization = "my-org" # org name from step 2. + workspaces { + name = "my-app" # name for your app's state. + } + } +} +``` + +Run `terraform init` and tada! Your state is now being stored in Terraform Enterprise. You can see the state in the UI: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic3.webp) + +Speaking of seeing state in a UI... + +## Reason #2 — Fully Featured State Viewer + +The second reason to try Terraform Cloud is its fully featured state viewer: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic4.webp) + +If you've ever messed up your Terraform state and needed to download an old version or wanted an audit log to know who changed what, then you'll love this feature. + +You can view the full state file at each point in time: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic5.webp) + +You can also see the diff of what changed: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic6.webp) + +Of course, you can find a way to get this information from some of the other state backends, but it's difficult. With HashiCorp's remote state storage, you get it for free. + +## Reason #3 — Manual Locking + +The third reason to try Terraform Cloud is the ability to manually lock your state. + +Ever been working on a piece of infrastructure and wanted to ensure that no one could make any changes to it at the same time? + +Terraform Cloud comes with the ability to lock and unlock states from the UI: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic7.webp) + +While the state is locked, `terraform` operations will receive an error: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic8.webp) + +This saves you a lot of these: + +![](/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic9.webp) + +## Reason #4 — Works With Atlantis + +The final reason to try out Terraform Cloud is that it works flawlessly with [Atlantis](https://www.runatlantis.io/)! + +Set a `ATLANTIS_TFE_TOKEN` environment variable to a TFE token and you're ready to go. Head over to to learn more. + +Conclusion +I highly encourage you to try out the new free Remote State Storage backend. It's a compelling offering over other state backends thanks to its ease of set up, fully featured state viewer and locking capabilities. + +If you're not on the waitlist, sign up here: . diff --git a/runatlantis.io/blog/2024/april-2024-survey-results.md b/runatlantis.io/blog/2024/april-2024-survey-results.md new file mode 100644 index 0000000000..d4c92fa9df --- /dev/null +++ b/runatlantis.io/blog/2024/april-2024-survey-results.md @@ -0,0 +1,56 @@ +--- +title: Atlantis User Survey Results +lang: en-US +--- + +# Atlantis User Survey Results + +In April 2024, the Core Atlantis Team launched an anonymous survey of our users. Over the two months the survey was open we received 354 responses, which we will use to better understand our community's needs and help prioritize our roadmap. + +Overall, the results below show that we have a diverse set of enthusiastic users, and that though many are still the classic Atlantis setup (a handful of repos running terraform against AWS in GitHub), there are many different use cases and directions the community are going and would like to see Atlantis support. + +We are grateful for everyone who took the time to share their experiences with Atlantis. We plan to run this kind of survey on a semi-regular basis, stay tuned! + +## Anonymized Results + +### How do you interact with Atlantis? + +![](/blog/april-2024-survey-results/interact.webp) + +Unsurprisingly, most users of Atlantis wear multiple hats, involved throughout the development process. + +### How do you/your organization deploy Atlantis + +![](/blog/april-2024-survey-results/deploy.webp) + +Most users of terraform deploy using Kubernetes and/or AWS. "Other Docker" use docker but do not use EKS or Helm directly, while a minority use some other combination of technologies. + +### What Infrastructure as Code (IaC) tool(s) do you use with Atlantis? + +![](/blog/april-2024-survey-results/iac.webp) + +The vast majority of Atlantis users are still using terraform as some part of their deployment. About half of them are in addition using Terragrunt, and OpenTofu seems to be gaining some ground. + +### How many repositories does your Atlantis manage? + +![](/blog/april-2024-survey-results/repos.webp) + +Most users have relatively modest footprints to managed with Atlantis (though a few large monorepos could be obscured in the numbers). + +### Which Version Control Systems (VCSs) do you use? + +![](/blog/april-2024-survey-results/vcs.webp) + +Most users of Atlantis are using GitHub, with a sizeable chunk on GitLab, followed by Bitbucket and others. This is analogous to the support and feature requests that the maintainers see for the various VCSs in the codebase. + +### What is the most important feature you find missing from Atlantis? + +![](/blog/april-2024-survey-results/features.webp) + +This being a free form question, there was a long tail of responses, so the above only shows answers after normalizing that had three or more instances. + +Drift Detection as well as infrastructure improvements were the obvious winners here. After that, users focused on various integrations and improvements to the UI. + +## Conclusion + +It is always interesting and exciting for the core team to see the breadth of the use of Atlantis, and we look forward to using this information to understand the needs of the community. Atlantis has always been a community led effort, and we hope to continue to carry that spirit forward! diff --git a/runatlantis.io/blog/2024/integrating-atlantis-with-opentofu.md b/runatlantis.io/blog/2024/integrating-atlantis-with-opentofu.md new file mode 100644 index 0000000000..574017fb71 --- /dev/null +++ b/runatlantis.io/blog/2024/integrating-atlantis-with-opentofu.md @@ -0,0 +1,200 @@ +--- +title: Integrating Atlantis with Opentofu +lang: en-US +--- + +# Integrating Atlantis with Opentofu + +::: info +This post was originally written on May 27nd, 2024 +Original post: +::: + +## What was our motivation? + +Due to the Terraform license change, many companies are migrating their IAC processes to OpenTofu, with this in mind and knowing that many of them use Atlantis and Terraform as infrastructure delivery automation, I created this documentation showing what to do to integrate Atlantis with OpenTofu. + +Stack: Atlantis, Terragrunt, OpenTofu, Github, ALB, EKS. + +We will implement it with your [Helm chart](https://www.runatlantis.io/docs/deployment.html#kubernetes-helm-chart): + +**1** - Add the runatlantis repository. + +```sh +helm repo add runatlantis https://runatlantis.github.io/helm-charts +``` + +**2** - Create file values.yaml and run: + +```sh +helm inspect values runatlantis/atlantis > values.yaml +``` + +**3** - Edit the file values.yaml and add your credentials access and secret which will be used in the Atlantis webhook configuration: +See as create a [GitHubApp](https://docs.github.com/pt/apps/creating-github-apps/about-creating-github-apps). + +```yaml +githubApp: + id: "CHANGE ME" + key: | + -----BEGIN RSA PRIVATE KEY----- + "CHANGE ME" + -----END RSA PRIVATE KEY----- + slug: atlantis +# secret webhook Atlantis + secret: "CHANGE ME" +``` + +**4** - Enter the org and repository from github that Atlantis will interact in orgAllowlist: + +```yaml +# All repositories the org +orgAllowlist: github.com/MY-ORG/* + +or +# Just one repository +orgAllowlist: github.com/MY-ORG/MY-REPO-IAC + +or +# All repositories that start with MY-REPO-IAC- +orgAllowlist: github.com/MY-ORG/MY-REPO-IAC-* +``` + +**5** - Now let’s configure the script that will be executed upon startup of the Atlantis init pod. In this step we download and install Terragrunt and OpenTofu, as well as include their binaries in the shared dir ```/plugins```. + +```yaml +initConfig: + enabled: true + image: alpine:latest + imagePullPolicy: IfNotPresent + # sharedDir is set as env var INIT_SHARED_DIR + sharedDir: /plugins + workDir: /tmp + sizeLimit: 250Mi + # example of how the script can be configured to install tools/providers required by the atlantis pod + script: | + #!/bin/sh + set -eoux pipefail# terragrunt + TG_VERSION="0.55.10" + TG_SHA256_SUM="1ad609399352348a41bb5ea96fdff5c7a18ac223742f60603a557a54fc8c6cff" + TG_FILE="${INIT_SHARED_DIR}/terragrunt" + wget https://github.com/gruntwork-io/terragrunt/releases/download/v${TG_VERSION}/terragrunt_linux_amd64 -O "${TG_FILE}" + echo "${TG_SHA256_SUM} ${TG_FILE}" | sha256sum -c + chmod 755 "${TG_FILE}" + terragrunt -v + + # OpenTofu + TF_VERSION="1.6.2" + TF_FILE="${INIT_SHARED_DIR}/tofu" + wget https://github.com/opentofu/opentofu/releases/download/v${TF_VERSION}/tofu_${TF_VERSION}_linux_amd64.zip + unzip tofu_${TF_VERSION}_linux_amd64.zip + mv tofu ${INIT_SHARED_DIR} + chmod 755 "${TF_FILE}" + tofu -v +``` + +**6** - Here we configure the envs to avoid downloading alternative versions of Terraform and indicate to Terragrunt where it should fetch the OpenTofu binary. + +```yaml +# envs +environment: + ATLANTIS_TF_DOWNLOAD: false + TERRAGRUNT_TFPATH: /plugins/tofu +``` + +**7** - Last but not least, here we specify which Atlantis-side configurations we will have for the repositories. + +```yaml +# repository config +repoConfig: | + --- + repos: + - id: /.*/ + apply_requirements: [approved, mergeable] + allow_custom_workflows: true + allowed_overrides: [workflow, apply_requirements, delete_source_branch_on_merge] +``` + +**8** - Configure Atlantis webhook ingress, in the example below we are using the AWS ALB. + +```yaml +# ingress config +ingress: + annotations: + alb.ingress.kubernetes.io/backend-protocol: HTTP + alb.ingress.kubernetes.io/certificate-arn: arn:aws:acm:certificate + alb.ingress.kubernetes.io/group.name: external-atlantis + alb.ingress.kubernetes.io/healthcheck-path: /healthz + alb.ingress.kubernetes.io/healthcheck-port: "80" + alb.ingress.kubernetes.io/healthcheck-protocol: HTTP + alb.ingress.kubernetes.io/listen-ports: '[{"HTTPS":443}]' + alb.ingress.kubernetes.io/scheme: internet-facing + alb.ingress.kubernetes.io/ssl-redirect: "443" + alb.ingress.kubernetes.io/success-codes: "200" + alb.ingress.kubernetes.io/target-type: ip + apiVersion: networking.k8s.io/v1 + enabled: true + host: atlantis.your.domain + ingressClassName: aws-ingress-class-name + path: /* + pathType: ImplementationSpecific +``` + +Save all changes made to ```values.yaml``` + +**9** - Using one of the Atlantis options custom workflows, we can create a file ```atlantis.yaml``` in the root folder of your repository, the example below should meet most scenarios, adapt as needed. + +```yaml +version: 3 +automerge: true +parallel_plan: true +parallel_apply: false +projects: +- name: terragrunt + dir: . + workspace: terragrunt + delete_source_branch_on_merge: true + autoplan: + enabled: false + apply_requirements: [mergeable, approved] + workflow: terragrunt +workflows: + terragrunt: + plan: + steps: + - env: + name: TF_IN_AUTOMATION + value: 'true' + - run: find . -name '.terragrunt-cache' | xargs rm -rf + - run: terragrunt init -reconfigure + - run: + command: terragrunt plan -input=false -out=$PLANFILE + output: strip_refreshing + apply: + steps: + - run: terragrunt apply $PLANFILE +``` + +**10** - Now let’s go to the installation itself, search for the available versions of Atlantis: + +```sh +helm search repo runatlantis +``` + +Replace ```CHART-VERSION``` with the version you want to install and run the command below: + +```sh +helm upgrade -i atlantis runatlantis/atlantis --version CHART-VERSION -f values.yaml --create-namespace atlantis +``` + +Now, see as configure Atlantis [webhook on github](../../docs/configuring-webhooks.md) repository. + +See as Atlantis [work](../../docs/using-atlantis.md). + +Find out more at: + +- . +- . +- . + +Share it with your friends =) diff --git a/runatlantis.io/contributing.md b/runatlantis.io/contributing.md new file mode 100644 index 0000000000..3d8e24de16 --- /dev/null +++ b/runatlantis.io/contributing.md @@ -0,0 +1,17 @@ +--- +aside: false +--- +# Atlantis Contributing Documentation + +These docs are for users who want to contribute to the Atlantis project. This +can vary from writing documentation, helping the community on Slack, discussing +issues, or writing code. + +:::tip Looking to get started or use Atlantis? +If you're new, check out the [Guide](./guide.md) or the +[Documentation](./docs.md). +::: + +## Next Steps + +- [Events Controller](./contributing/events-controller.md)  â€“  How do the events work? diff --git a/runatlantis.io/contributing/events-controller.md b/runatlantis.io/contributing/events-controller.md new file mode 100644 index 0000000000..9827aa5cdb --- /dev/null +++ b/runatlantis.io/contributing/events-controller.md @@ -0,0 +1,108 @@ +# Events Controller + +Webhooks are the primary interaction between the Version Control System (VCS) +and Atlantis. Each VCS sends the requests to the `/events` endpoint. The +implementation of this endpoint can be found in the +[events_controller.go](https://github.com/runatlantis/atlantis/blob/main/server/controllers/events/events_controller.go) +file. This file contains the Post function `func (e *VCSEventsController) +Post(w http.ResponseWriter, r *http.Request`)` that parses the request +according to the configured VCS. + +Atlantis currently handles one of the following events: + +- Comment Event +- Pull Request Event + +All the other events are ignored. + +```mermaid +--- +title: events controller flowchart +--- +flowchart LR + events(/events - Endpoint) --> Comment_Event(Comment - Event) + events --> Pull_Request_Event(Pull Request - Event) + + Comment_Event --> pre_workflow(pre-workflow - Hook) + pre_workflow --> plan(plan - command) + pre_workflow --> apply(apply - command) + pre_workflow --> approve_policies(approve policies - command) + pre_workflow --> unlock(unlock - command) + pre_workflow --> version(version - command) + pre_workflow --> import(import - command) + pre_workflow --> state(state - command) + + plan --> post_workflow(post-workflow - Hook) + apply --> post_workflow + approve_policies --> post_workflow + unlock --> post_workflow + version --> post_workflow + import --> post_workflow + state --> post_workflow + + Pull_Request_Event --> Open_Update_PR(Open / Update Pull Request) + Pull_Request_Event --> Close_PR(Close Pull Request) + + Open_Update_PR --> pre_workflow(pre-workflow - Hook) + Close_PR --> plan(plan - command) + + pre_workflow --> plan + plan --> post_workflow(post-workflow - Hook) + + Close_PR --> CleanUpPull(CleanUpPull) + CleanUpPull --> post_workflow(post-workflow - Hook) +``` + +## Comment Event + +This event is triggered whenever a user enters a comment on the Pull Request, +Merge Request, or whatever it's called for the respective VCS. After parsing the +VCS-specific request, the code calls the `handleCommentEvent` function, which +then passes the processing to the `handleCommentEvent` function in the +[command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/command_runner.go) +file. This function first calls the pre-workflow hooks, then executes one of the +below-listed commands and, at last, the post-workflow hooks. + +- [plan_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/plan_command_runner.go) +- [apply_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/apply_command_runner.go) +- [approve_policies_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/approve_policies_command_runner.go) +- [unlock_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/unlock_command_runner.go) +- [version_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/version_command_runner.go) +- [import_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/import_command_runner.go) +- [state_command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/state_command_runner.go) + +## Pull Request Event + +To handle comment events on Pull Requests, they must be created first. Atlantis +also allows the running of commands for certain Pull Requests events. + +
+ Pull Request Webhooks + +The list below links to the supported VCSs and their Pull Request Webhook +documentation. + +- [Azure DevOps Pull Request Created](https://learn.microsoft.com/en-us/azure/devops/service-hooks/events?view=azure-devops#pull-request-created) +- [BitBucket Pull Request](https://support.atlassian.com/bitbucket-cloud/docs/event-payloads/#Pull-request-events) +- [GitHub Pull Request](https://docs.github.com/en/webhooks/webhook-events-and-payloads#pull_request) +- [GitLab Merge Request](https://docs.gitlab.com/ee/user/project/integrations/webhook_events.html#merge-request-events) +- [Gitea Webhooks](https://docs.gitea.com/next/usage/webhooks) + +
+ +The following list shows the supported events: + +- Opened Pull Request +- Updated Pull Request +- Closed Pull Request +- Other Pull Request event + +The `RunAutoPlanCommand` function in the +[command_runner.go](https://github.com/runatlantis/atlantis/blob/main/server/events/command_runner.go) +file is called for the _Open_ and _Update_ Pull Request events. When enabled on +the project, this automatically runs the `plan` for the specific repository. + +Whenever a Pull Request is closed, the `CleanUpPull` function in the +[instrumented_pull_closed_executor.go](https://github.com/runatlantis/atlantis/blob/main/server/events/instrumented_pull_closed_executor.go) +file is called. This function cleans up all the closed Pull Request files, +locks, and other related information. diff --git a/runatlantis.io/contributing/glossary.md b/runatlantis.io/contributing/glossary.md new file mode 100644 index 0000000000..99c1e73287 --- /dev/null +++ b/runatlantis.io/contributing/glossary.md @@ -0,0 +1,26 @@ +# Glossary + +The Atlantis community uses many words and phrases to work more efficiently. +You will find the most common ones and their meaning on this page. + +## Pull / Merge Request Event + +The different VCSs have different names for merging changes. Atlantis uses the +name Pull Request as the abstraction. The VCS provider implements this +abstraction and forwards the call to the respective function. + +## VCS + +VCS stands for Version Control System. + +Atlantis supports only git as a Version Control System. However, there is +support for multiple VCS Providers. Currently, it supports the following +providers: + +- [Azure DevOps](https://azure.microsoft.com/en-us/products/devops) +- [BitBucket](https://bitbucket.org/) +- [GitHub](https://github.com/) +- [GitLab](https://gitlab.com/) +- [Gitea](https://gitea.com/) + +The term VCS is used for both git and the different VCS providers. diff --git a/runatlantis.io/docs.md b/runatlantis.io/docs.md new file mode 100644 index 0000000000..23b27f1c32 --- /dev/null +++ b/runatlantis.io/docs.md @@ -0,0 +1,18 @@ +--- +aside: false +--- +# Atlantis Documentation + +These docs are for users that are ready to get Atlantis installed and start using it. + +:::tip Looking to get started? +If you're new here, check out the [Guide](./guide.md) +where you can try our [Test Drive](./guide/test-drive.md) or [Run Atlantis Locally](./guide/testing-locally.md). +::: + +## Next Steps + +* [Installing Atlantis](./docs/installation-guide.md)  â€“  Get Atlantis up and running +* [Configuring Atlantis](./docs/configuring-atlantis.md)  â€“  Configure how Atlantis works for your specific use-cases +* [Using Atlantis](./docs/using-atlantis.md)  â€“  How do you use Atlantis? +* [How Atlantis Works](./docs/how-atlantis-works.md)  â€“  Internals of what Atlantis is doing diff --git a/runatlantis.io/docs/README.md b/runatlantis.io/docs/README.md deleted file mode 100644 index 5527692cf5..0000000000 --- a/runatlantis.io/docs/README.md +++ /dev/null @@ -1,14 +0,0 @@ -# Atlantis Documentation - -These docs are for users that are ready to get Atlantis installed and start using it. - -:::tip Looking to get started? -If you're new here, check out the [Guide](/guide/) -where you can try our [Test Drive](/guide/test-drive.html) or [Run Atlantis Locally](/guide/testing-locally.html). -::: - -### Next Steps -* [Installing Atlantis](/docs/installation-guide.html)  â€“  Get Atlantis up and running -* [Configuring Atlantis](configuring-atlantis.html)  â€“  Configure how Atlantis works for your specific use-cases -* [Using Atlantis](using-atlantis.html)  â€“  How do you use Atlantis? -* [How Atlantis Works](how-atlantis-works.html)  â€“  Internals of what Atlantis is doing diff --git a/runatlantis.io/docs/access-credentials.md b/runatlantis.io/docs/access-credentials.md index 9cd514fb70..7d7410ee03 100644 --- a/runatlantis.io/docs/access-credentials.md +++ b/runatlantis.io/docs/access-credentials.md @@ -1,10 +1,11 @@ # Git Host Access Credentials -This page describes how to create credentials for your Git host (GitHub, GitLab, Bitbucket, or Azure DevOps) + +This page describes how to create credentials for your Git host (GitHub, GitLab, Gitea, Bitbucket, or Azure DevOps) that Atlantis will use to make API calls. -[[toc]] ## Create an Atlantis user (optional) + We recommend creating a new user named **@atlantis** (or something close) or using a dedicated CI user. This isn't required (you can use an existing user or github app credentials), however all the comments that Atlantis writes @@ -14,19 +15,23 @@ will come from that user so it might be confusing if its coming from a personal

An example comment coming from the @atlantisbot user

## Generating an Access Token + Once you've created a new user (or decided to use an existing one), you need to generate an access token. Read on for the instructions for your specific Git host: + * [GitHub](#github-user) * [GitHub app](#github-app) * [GitLab](#gitlab) +* [Gitea](#gitea) * [Bitbucket Cloud (bitbucket.org)](#bitbucket-cloud-bitbucket-org) * [Bitbucket Server (aka Stash)](#bitbucket-server-aka-stash) * [Azure DevOps](#azure-devops) ### GitHub user -- Create a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-fine-grained-personal-access-token) -- Create the token with **repo** scope -- Record the access token + +* Create a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-fine-grained-personal-access-token) +* Create the token with **repo** scope +* Record the access token ::: warning Your Atlantis user must also have "Write permissions" (for repos in an organization) or be a "Collaborator" (for repos in a user account) to be able to set commit statuses: ![Atlantis status](./images/status.png) @@ -35,18 +40,18 @@ Your Atlantis user must also have "Write permissions" (for repos in an organizat ### GitHub app #### Create the GitHub App Using Atlantis + ::: warning Available in Atlantis versions **newer** than 0.13.0. ::: +* Start Atlantis with fake github username and token (`atlantis server --gh-user fake --gh-token fake --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`). If installing as an **Organization**, remember to add `--gh-org your-github-org` to this command. +* Visit `https://$ATLANTIS_HOST/github-app/setup` and click on **Setup** to create the app on GitHub. You'll be redirected back to Atlantis +* A link to install your app, along with its secrets, will be shown on the screen. Record your app's credentials and install your app for your user/org by following said link. +* Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem` +* Restart Atlantis with new flags: `atlantis server --gh-app-id --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`. -- Start Atlantis with fake github username and token (`atlantis server --gh-user fake --gh-token fake --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`). If installing as an **Organization**, remember to add `--gh-org your-github-org` to this command. -- Visit `https://$ATLANTIS_HOST/github-app/setup` and click on **Setup** to create the app on GitHub. You'll be redirected back to Atlantis -- A link to install your app, along with its secrets, will be shown on the screen. Record your app's credentials and install your app for your user/org by following said link. -- Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem` -- Restart Atlantis with new flags: `atlantis server --gh-app-id --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`. - - NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](/docs/server-configuration.html#config-file). + NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](server-configuration.md#config-file). ::: warning Only a single installation per GitHub App is supported at the moment. @@ -58,13 +63,13 @@ GitHub App handles the webhook calls by itself, hence there is no need to create #### Manually Creating the GitHub app -- Create the GitHub app as an Administrator - - Ensure the app is registered / installed with the organization / user - - See the GitHub app [documentation](https://docs.github.com/en/apps/creating-github-apps/about-creating-github-apps/about-creating-github-apps) -- Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem` -- Start Atlantis with the following flags: `atlantis server --gh-app-id --gh-installation-id --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`. +* Create the GitHub app as an Administrator + * Ensure the app is registered / installed with the organization / user + * See the GitHub app [documentation](https://docs.github.com/en/apps/creating-github-apps/about-creating-github-apps/about-creating-github-apps) +* Create a file with the contents of the GitHub App Key, e.g. `atlantis-app-key.pem` +* Start Atlantis with the following flags: `atlantis server --gh-app-id --gh-installation-id --gh-app-key-file atlantis-app-key.pem --gh-webhook-secret --write-git-creds --repo-allowlist 'github.com/your-org/*' --atlantis-url https://$ATLANTIS_HOST`. - NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](/docs/server-configuration.html#config-file). + NOTE: Instead of using a file for the GitHub App Key you can also pass the key value directly using `--gh-app-key`. You can also create a config file instead of using flags. See [Server Configuration](server-configuration.md#config-file). ::: tip NOTE Manually installing the GitHub app means that the credentials can be shared by many Atlantis installations. This has the benefit of centralizing repository access for shared modules / code. @@ -92,47 +97,62 @@ Since v0.19.7, a new permission for `Administration` has been added. If you have Since v0.22.3, a new permission for `Members` has been added, which is required for features that apply permissions to an organizations team members rather than individual users. Like the `Administration` permission above, updating Atlantis will not automatically add this permission, so if you wish to use features that rely on checking team membership you will need to add this manually. ::: -| Type | Access | -| --------------- | ------------------- | -| Administration | Read-only | -| Checks | Read and write | -| Commit statuses | Read and write | -| Contents | Read and write | -| Issues | Read and write | -| Metadata | Read-only (default) | -| Pull requests | Read and write | -| Webhooks | Read and write | -| Members | Read-only | +| Type | Access | +| --------------- | ------------------- | +| Administration | Read-only | +| Checks | Read and write | +| Commit statuses | Read and write | +| Contents | Read and write | +| Issues | Read and write | +| Metadata | Read-only (default) | +| Pull requests | Read and write | +| Webhooks | Read and write | +| Members | Read-only | ### GitLab -- Follow: [https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token) -- Create a token with **api** scope -- Record the access token + +* Follow: [GitLab: Create a personal access token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token) +* Create a token with **api** scope +* Record the access token + +### Gitea + +* Go to "Profile and Settings" > "Settings" in Gitea (top-right) +* Go to "Applications" under "User Settings" in Gitea +* Create a token under the "Manage Access Tokens" with the following permissions: + * issue: Read and Write + * repository: Read and Write + * user: Read +* Record the access token ### Bitbucket Cloud (bitbucket.org) -- Create an App Password by following [https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/) -- Label the password "atlantis" -- Select **Pull requests**: **Read** and **Write** so that Atlantis can read your pull requests and write comments to them -- Record the access token + +* Create an App Password by following [BitBucket Cloud: Create an app password](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/) +* Label the password "atlantis" +* Select **Pull requests**: **Read** and **Write** so that Atlantis can read your pull requests and write comments to them +* Record the access token ### Bitbucket Server (aka Stash) -- Click on your avatar in the top right and select **Manage account** -- Click **Personal access tokens** in the sidebar -- Click **Create a token** -- Name the token **atlantis** -- Give the token **Read** Project permissions and **Write** Pull request permissions -- Click **Create** and record the access token + +* Click on your avatar in the top right and select **Manage account** +* Click **Personal access tokens** in the sidebar +* Click **Create a token** +* Name the token **atlantis** +* Give the token **Read** Project permissions and **Write** Pull request permissions +* Click **Create** and record the access token NOTE: Atlantis will send the token as a [Bearer Auth to the Bitbucket API](https://confluence.atlassian.com/bitbucketserver/http-access-tokens-939515499.html#HTTPaccesstokens-UsingHTTPaccesstokens) instead of using Basic Auth. ### Azure DevOps -- Create a Personal access token by following [https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops](https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops) -- Label the password "atlantis" -- The minimum scopes required for this token are: - - Code (Read & Write) - - Code (Status) - - Member Entitlement Management (Read) -- Record the access token + +* Create a Personal access token by following [Azure DevOps: Use personal access tokens to authenticate](https://docs.microsoft.com/en-us/azure/devops/organizations/accounts/use-personal-access-tokens-to-authenticate?view=azure-devops) +* Label the password "atlantis" +* The minimum scopes required for this token are: + * Code (Read & Write) + * Code (Status) + * Member Entitlement Management (Read) +* Record the access token ## Next Steps -Once you've got your user and access token, you're ready to create a webhook secret. See [Creating a Webhook Secret](webhook-secrets.html). + +Once you've got your user and access token, you're ready to create a webhook secret. See [Creating a Webhook Secret](webhook-secrets.md). diff --git a/runatlantis.io/docs/api-endpoints.md b/runatlantis.io/docs/api-endpoints.md index 96dd6d0b51..ce622979da 100644 --- a/runatlantis.io/docs/api-endpoints.md +++ b/runatlantis.io/docs/api-endpoints.md @@ -9,7 +9,7 @@ To enable the API endpoints, `api-secret` should be configured. :::tip Prerequisites -* Set `api-secret` as part of the [Server Configuration](server-configuration.html#api-secret) +* Set `api-secret` as part of the [Server Configuration](server-configuration.md#api-secret) * Pass `X-Atlantis-Token` with the same secret in the request header ::: @@ -17,22 +17,22 @@ To enable the API endpoints, `api-secret` should be configured. #### Description -Execute [atlantis plan](using-atlantis.html#atlantis-plan) on the specified repository. +Execute [atlantis plan](using-atlantis.md#atlantis-plan) on the specified repository. #### Parameters -| Name | Type | Required | Description | -|------------|-------------------------------------|----------|------------------------------------------| -| Repository | string | Yes | Name of the Terraform repository | -| Ref | string | Yes | Git reference, like a branch name | -| Type | string | Yes | Type of the VCS provider (Github/Gitlab) | -| Paths | [ [Path](api-endpoints.html#path) ] | Yes | Paths to the projects to run the plan | -| PR | int | No | Pull Request number | +| Name | Type | Required | Description | +|------------|---------|----------|------------------------------------------| +| Repository | string | Yes | Name of the Terraform repository | +| Ref | string | Yes | Git reference, like a branch name | +| Type | string | Yes | Type of the VCS provider (Github/Gitlab) | +| Paths | Path | Yes | Paths to the projects to run the plan | +| PR | int | No | Pull Request number | -##### Path +#### Path -Similar to the [Options](using-atlantis.html#options) of `atlantis plan`. Path specifies which directory/workspace -within the repository to run the plan. +Similar to the [Options](using-atlantis.md#options) of `atlantis plan`. Path specifies which directory/workspace +within the repository to run the plan. At least one of `Directory` or `Workspace` should be specified. | Name | Type | Required | Description | @@ -92,22 +92,22 @@ curl --request POST 'https:///api/plan' \ #### Description -Execute [atlantis apply](using-atlantis.html#atlantis-apply) on the specified repository. +Execute [atlantis apply](using-atlantis.md#atlantis-apply) on the specified repository. #### Parameters -| Name | Type | Required | Description | -|------------|---------------------------------------|----------|------------------------------------------| -| Repository | string | Yes | Name of the Terraform repository | -| Ref | string | Yes | Git reference, like a branch name | -| Type | string | Yes | Type of the VCS provider (Github/Gitlab) | -| Paths | [ [Path](api-endpoints.html#path-1) ] | Yes | Paths to the projects to run the apply | -| PR | int | No | Pull Request number | +| Name | Type | Required | Description | +|------------|--------|----------|------------------------------------------| +| Repository | string | Yes | Name of the Terraform repository | +| Ref | string | Yes | Git reference, like a branch name | +| Type | string | Yes | Type of the VCS provider (Github/Gitlab) | +| Paths | Path | Yes | Paths to the projects to run the apply | +| PR | int | No | Pull Request number | -##### Path +#### Path -Similar to the [Options](using-atlantis.html#options-1) of `atlantis apply`. Path specifies which directory/workspace -within the repository to run the apply. +Similar to the [Options](using-atlantis.md#options-1) of `atlantis apply`. Path specifies which directory/workspace +within the repository to run the apply. At least one of `Directory` or `Workspace` should be specified. | Name | Type | Required | Description | diff --git a/runatlantis.io/docs/apply-requirements.md b/runatlantis.io/docs/apply-requirements.md index 870ac4972e..166931851d 100644 --- a/runatlantis.io/docs/apply-requirements.md +++ b/runatlantis.io/docs/apply-requirements.md @@ -1,5 +1,5 @@ # Apply Requirements :::warning REDIRECT -This page is moved to [Command Requirements](/docs/command-requirements.html). +This page is moved to [Command Requirements](command-requirements.md). ::: diff --git a/runatlantis.io/docs/automerging.md b/runatlantis.io/docs/automerging.md index 1e0b21ba77..2716a572ee 100644 --- a/runatlantis.io/docs/automerging.md +++ b/runatlantis.io/docs/automerging.md @@ -1,44 +1,53 @@ # Automerging + Atlantis can be configured to automatically merge a pull request after all plans have been successfully applied. - ![Automerge](./images/automerge.png) ## How To Enable + Automerging can be enabled either by: + 1. Passing the `--automerge` flag to `atlantis server`. This sets the parameter globally; however, explicit declaration in the repo config will be respected and take priority. 1. Setting `automerge: true` in the repo's `atlantis.yaml` file: + ```yaml version: 3 automerge: true projects: - dir: . ``` + :::tip NOTE If a repo has an `atlantis.yaml` file, then each project in the repo needs to be configured under the `projects` key. ::: ## How to Disable + If automerge is enabled, you can disable it for a single `atlantis apply` command with the `--auto-merge-disabled` option. ## Requirements ### All Plans Must Succeed + When automerge is enabled, **all plans** in a pull request **must succeed** before **any** plans can be applied. For example, imagine this scenario: + 1. I open a pull request that makes changes to two Terraform projects, in `dir1/` and `dir2/`. 1. The plan for `dir2/` fails because my Terraform syntax is wrong. In this scenario, I can't run -``` + +```shell atlantis apply -d dir1 ``` + Even though that plan succeeded, because **all** plans must succeed for **any** plans to be saved. @@ -47,8 +56,9 @@ autoplan. Then I will be able to apply both plans. ### All Plans must be applied -If multiple projects/dirs/workspaces are configured to be planned automatically, +If multiple projects/dirs/workspaces are configured to be planned automatically, then they should all be applied before Atlantis automatically merges the PR. ## Permissions + The Atlantis VCS user must have the ability to merge pull requests. diff --git a/runatlantis.io/docs/autoplanning.md b/runatlantis.io/docs/autoplanning.md index 2183219703..b4657d801a 100644 --- a/runatlantis.io/docs/autoplanning.md +++ b/runatlantis.io/docs/autoplanning.md @@ -1,8 +1,10 @@ # Autoplanning + On any **new** pull request or **new commit** to an existing pull request, Atlantis will attempt to run `terraform plan` in the directories it thinks hold modified Terraform projects. The algorithm it uses is as follows: + 1. Get list of all modified files in pull request 1. Filter to those containing `.tf` 1. Get the directories that those files are in @@ -11,8 +13,10 @@ The algorithm it uses is as follows: contains a `main.tf` run plan in that directory, otherwise ignore the change (see below for exceptions). ## Example + Given the directory structure: -``` + +```plain . ├── modules │   └── module1 @@ -26,21 +30,25 @@ Given the directory structure: * If `project1/main.tf` were modified, we would run `plan` in `project1` * If `modules/module1/main.tf` were modified, we would not automatically run `plan` because we couldn't determine the location of the terraform project - * You could use an [atlantis.yaml](repo-level-atlantis-yaml.html#configuring-planning) file to specify which projects to plan when this module changed - * You could enable [module autoplanning](server-configuration.html#autoplan-modules) which indexes projects to their local module dependencies. - * Or you could manually plan with `atlantis plan -d ` + * You could use an [atlantis.yaml](repo-level-atlantis-yaml.md#configuring-planning) file to specify which projects to plan when this module changed + * You could enable [module autoplanning](server-configuration.md#autoplan-modules) which indexes projects to their local module dependencies. + * Or you could manually plan with `atlantis plan -d ` * If `project1/modules/module1/main.tf` were modified, we would look one level above `project1/modules` into `project1/`, see that there was a `main.tf` file and so run plan in `project1/` ## Bitbucket-Specific Notes + Bitbucket does not have a webhook that triggers only upon a new PR or commit. To fix this we cache the last commit to see if it has changed. If the cache is emptied, Atlantis will think your commit is new and you may see extra plans. This scenario can happen if: + * Atlantis restarts * You are running multiple Atlantis instances behind a load balancer ## Customizing + If you would like to customize how Atlantis determines which directory to run in or disable it all together you need to create an `atlantis.yaml` file. See -* [Disabling Autoplanning](repo-level-atlantis-yaml.html#disabling-autoplanning) -* [Configuring Planning](repo-level-atlantis-yaml.html#configuring-planning) + +* [Disabling Autoplanning](repo-level-atlantis-yaml.md#disabling-autoplanning) +* [Configuring Planning](repo-level-atlantis-yaml.md#configuring-planning) diff --git a/runatlantis.io/docs/checkout-strategy.md b/runatlantis.io/docs/checkout-strategy.md index 066f7444f0..5c38586a4c 100644 --- a/runatlantis.io/docs/checkout-strategy.md +++ b/runatlantis.io/docs/checkout-strategy.md @@ -7,6 +7,7 @@ variable that get passed to the `atlantis server` command. Atlantis supports `branch` and `merge` strategies. ## Branch + If set to `branch` (the default), Atlantis will check out the source branch of the pull request. @@ -17,6 +18,7 @@ If the pull request was asking to merge `branch` into `main`, Atlantis would check out `branch` at commit `C3`. ## Merge + The problem with the `branch` strategy, is that if users push branches that are out of date with `main`, then their `terraform plan` could be deleting some resources that were configured in the main branch. @@ -49,9 +51,9 @@ commit is pushed to `main` **after** Atlantis runs `plan`, nothing will happen. To optimize cloning time, Atlantis can perform a shallow clone by specifying the `--checkout-depth` flag. The cloning is performed in a following manner: -- Shallow clone of the default branch is performed with depth of `--checkout-depth` value of zero (full clone). -- `branch` is retrieved, including the same amount of commits. -- Merge base of the default branch and `branch` is checked for existence in the shallow clone. -- If the merge base is not present, it means that either of the branches are ahead of the merge base by more than `--checkout-depth` commits. In this case full repo history is fetched. +* Shallow clone of the default branch is performed with depth of `--checkout-depth` value of zero (full clone). +* `branch` is retrieved, including the same amount of commits. +* Merge base of the default branch and `branch` is checked for existence in the shallow clone. +* If the merge base is not present, it means that either of the branches are ahead of the merge base by more than `--checkout-depth` commits. In this case full repo history is fetched. -If the commit history often diverges by more than the default checkout depth then the `--checkout-depth` flag should be tuned to avoid full fetches. \ No newline at end of file +If the commit history often diverges by more than the default checkout depth then the `--checkout-depth` flag should be tuned to avoid full fetches. diff --git a/runatlantis.io/docs/command-requirements.md b/runatlantis.io/docs/command-requirements.md index e3aea4ea21..bbf12ede00 100644 --- a/runatlantis.io/docs/command-requirements.md +++ b/runatlantis.io/docs/command-requirements.md @@ -1,7 +1,7 @@ # Command Requirements -[[toc]] ## Intro + Atlantis requires certain conditions be satisfied **before** `atlantis apply` and `atlantis import` commands can be run: @@ -10,31 +10,41 @@ commands can be run: * [UnDiverged](#undiverged) - requires pull requests to be ahead of the base branch ## What Happens If The Requirement Is Not Met? + If the requirement is not met, users will see an error if they try to run `atlantis apply`: ![Mergeable Apply Requirement](./images/apply-requirement.png) ## Supported Requirements + ### Approved + The `approved` requirement will prevent applies unless the pull request is approved by at least one person other than the author. #### Usage + The `approved` requirement by: + 1. Creating a `repos.yaml` file with the `apply_requirements` key: + ```yaml repos: - id: /.*/ apply_requirements: [approved] ``` + 1. Or by allowing an `atlantis.yaml` file to specify the `apply_requirements` key in the `repos.yaml` config: - #### repos.yaml + + **repos.yaml** + ```yaml repos: - id: /.*/ allowed_overrides: [apply_requirements] ``` - #### atlantis.yaml + **atlantis.yaml** + ```yaml version: 3 projects: @@ -43,7 +53,9 @@ The `approved` requirement by: ``` #### Meaning + Each VCS provider has different rules around who can approve: + * **GitHub** – **Any user with read permissions** to the repo can approve a pull request * **GitLab** – The user who can approve can be set in the [repo settings](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/) * **Bitbucket Cloud (bitbucket.org)** – A user can approve their own pull request but @@ -57,11 +69,15 @@ To require **certain people** to approve the pull request, look at the ::: ### Mergeable + The `mergeable` requirement will prevent applies unless a pull request is able to be merged. #### Usage + Set the `mergeable` requirement by: + 1. Creating a `repos.yaml` file with the `apply_requirements` key: + ```yaml repos: - id: /.*/ @@ -69,14 +85,17 @@ Set the `mergeable` requirement by: ``` 1. Or by allowing an `atlantis.yaml` file to specify `plan_requirements`, `apply_requirements` and `import_requirements` keys in the `repos.yaml` config: - #### repos.yaml + + **repos.yaml** + ```yaml repos: - id: /.*/ allowed_overrides: [plan_requirements, apply_requirements, import_requirements] ``` - #### atlantis.yaml + **atlantis.yaml** + ```yaml version: 3 projects: @@ -87,25 +106,28 @@ Set the `mergeable` requirement by: ``` #### Meaning + Each VCS provider has a different concept of "mergeability": ::: warning Some VCS providers have a feature for branch protection to control "mergeability". To use it, limit the base branch so to not bypass the branch protection. -See also the `branch` keyword in [Server Side Repo Config](server-side-repo-config.html#reference) for more details. +See also the `branch` keyword in [Server Side Repo Config](server-side-repo-config.md#reference) for more details. ::: #### GitHub + In GitHub, if you're not using [Protected Branches](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches) then all pull requests are mergeable unless there is a conflict. If you set up Protected Branches then you can enforce: + * Requiring certain status checks to be passing * Requiring certain people to have reviewed and approved the pull request * Requiring `CODEOWNERS` to have reviewed and approved the pull request -* Requiring that the branch is up to date with `main` +* Requiring that the branch is up-to-date with `main` -See [https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches) +See [GitHub: About protected branches](https://docs.github.com/en/repositories/configuring-branches-and-merges-in-your-repository/defining-the-mergeability-of-pull-requests/about-protected-branches) for more details. ::: warning @@ -119,6 +141,7 @@ If you set `atlantis/apply` to the mergeable requirement, use the `--gh-allow-me ::: #### GitLab + For GitLab, a merge request will be merged if there are no conflicts, no unresolved discussions if it is a project requirement and if all necessary approvers have approved the pull request. For pipelines, if the project requires that pipelines must succeed, all builds except the apply command status will be checked. @@ -126,6 +149,7 @@ For pipelines, if the project requires that pipelines must succeed, all builds e For Jobs with allow_failure setting set to true, will be ignored. If the pipeline has been skipped and the project allows merging, it will be marked as mergeable. #### Bitbucket.org (Bitbucket Cloud) and Bitbucket Server (Stash) + For Bitbucket, we just check if there is a conflict that is preventing a merge. We don't check anything else because Bitbucket's API doesn't support it. @@ -133,9 +157,11 @@ If you need a specific check, please [open an issue](https://github.com/runatlantis/atlantis/issues/new). #### Azure DevOps + In Azure DevOps, all pull requests are mergeable unless there is a conflict. You can set a pull request to "Complete" right away, or set "Auto-Complete", which will merge after all branch policies are met. See [Review code with pull requests](https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops). [Branch policies](https://docs.microsoft.com/en-us/azure/devops/repos/git/branch-policies?view=azure-devops) can: + * Require a minimum number of reviewers * Allow users to approve their own changes * Allow completion even if some reviewers vote "Waiting" or "Reject" @@ -147,12 +173,16 @@ At this time, the Azure DevOps client only supports merging using the default 'n ::: ### UnDiverged + Prevent applies if there are any changes on the base branch since the most recent plan. Applies to `merge` checkout strategy only which you need to set via `--checkout-strategy` flag. #### Usage + You can set the `undiverged` requirement by: + 1. Creating a `repos.yaml` file with `plan_requirements`, `apply_requirements` and `import_requirements` keys: + ```yaml repos: - id: /.*/ @@ -160,15 +190,19 @@ You can set the `undiverged` requirement by: apply_requirements: [undiverged] import_requirements: [undiverged] ``` + 1. Or by allowing an `atlantis.yaml` file to specify the `plan_requirements`, `apply_requirements` and `import_requirements` keys in your `repos.yaml` config: - #### repos.yaml + + **repos.yaml** + ```yaml repos: - id: /.*/ allowed_overrides: [plan_requirements, apply_requirements, import_requirements] ``` - #### atlantis.yaml + **atlantis.yaml** + ```yaml version: 3 projects: @@ -177,7 +211,9 @@ You can set the `undiverged` requirement by: apply_requirements: [undiverged] import_requirements: [undiverged] ``` + #### Meaning + The `merge` checkout strategy creates a temporary merge commit and runs the `plan` on the Atlantis local version of the PR source and destination branch. The local destination branch can become out of date since changes to the destination branch are not fetched if there are no changes to the source branch. `undiverged` enforces that Atlantis local version of main is up to date @@ -185,16 +221,21 @@ with remote so that the state of the source during the `apply` is identical to t time. ## Setting Command Requirements + As mentioned above, you can set command requirements via flags, in `repos.yaml`, or in `atlantis.yaml` if `repos.yaml` allows the override. ### Flags Override + Flags **override** any `repos.yaml` or `atlantis.yaml` settings so they are equivalent to always having that apply requirement set. ### Project-Specific Settings + If you only want some projects/repos to have apply requirements, then you must + 1. Specifying which repos have which requirements via the `repos.yaml` file. + ```yaml repos: - id: /.*/ @@ -221,7 +262,9 @@ If you only want some projects/repos to have apply requirements, then you must config. For example if I have two directories, `staging` and `production`, I might use: - #### repos.yaml + + **repos.yaml:** + ```yaml repos: - id: /.*/ @@ -229,7 +272,8 @@ If you only want some projects/repos to have apply requirements, then you must # Allow any repo to specify apply_requirements in atlantis.yaml ``` - #### atlantis.yaml + **atlantis.yaml:** + ```yaml version: 3 projects: @@ -248,14 +292,17 @@ If you only want some projects/repos to have apply requirements, then you must ``` ### Multiple Requirements + You can set any or all of `approved`, `mergeable`, and `undiverged` requirements. ## Who Can Apply? + Once the apply requirement is satisfied, **anyone** that can comment on the pull request can run the actual `atlantis apply` command. ## Next Steps -* For more information on GitHub pull request reviews and approvals see: [https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews) -* For more information on GitLab merge request reviews and approvals (only supported on GitLab Enterprise) see: [https://docs.gitlab.com/ee/user/project/merge_requests/approvals/](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/). -* For more information on Bitbucket pull request reviews and approvals see: [https://confluence.atlassian.com/bitbucket/pull-requests-and-code-review-223220593.html](https://confluence.atlassian.com/bitbucket/pull-requests-and-code-review-223220593.html) -* For more information on Azure DevOps pull request reviews and approvals see: [https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops&tabs=browser](https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops&tabs=browser) + +* For more information on GitHub pull request reviews and approvals see: [GitHub: About pull request reviews](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/reviewing-changes-in-pull-requests/about-pull-request-reviews) +* For more information on GitLab merge request reviews and approvals (only supported on GitLab Enterprise) see: [GitLab: Merge request approvals](https://docs.gitlab.com/ee/user/project/merge_requests/approvals/). +* For more information on Bitbucket pull request reviews and approvals see: [BitBucket: Use pull requests for code review](https://confluence.atlassian.com/bitbucket/pull-requests-and-code-review-223220593.html) +* For more information on Azure DevOps pull request reviews and approvals see: [Azure DevOps: Create pull requests](https://docs.microsoft.com/en-us/azure/devops/repos/git/pull-requests?view=azure-devops&tabs=browser) diff --git a/runatlantis.io/docs/configuring-atlantis.md b/runatlantis.io/docs/configuring-atlantis.md index 46edbbbc3c..c40e55560c 100644 --- a/runatlantis.io/docs/configuring-atlantis.md +++ b/runatlantis.io/docs/configuring-atlantis.md @@ -1,25 +1,29 @@ # Configuring Atlantis There are three methods for configuring Atlantis: + 1. Passing flags to the `atlantis server` command 1. Creating a server-side repo config file and using the `--repo-config` flag 1. Placing an `atlantis.yaml` file at the root of your Terraform repositories ## Flags + Flags to `atlantis server` are used to configure the global operation of Atlantis, for example setting credentials for your Git Host or configuring SSL certs. -See [Server Configuration](server-configuration.html) for more details. +See [Server Configuration](server-configuration.md) for more details. ## Server-Side Repo Config + A Server-Side Repo Config file is used to control per-repo behaviour and what users can do in repo-level `atlantis.yaml` files. -See [Server-Side Repo Config](server-side-repo-config.html) for more details. +See [Server-Side Repo Config](server-side-repo-config.md) for more details. ## Repo-Level `atlantis.yaml` Files + `atlantis.yaml` files placed at the root of your Terraform repos can be used to change the default Atlantis behaviour for each repo. -See [Repo-Level atlantis.yaml Files](repo-level-atlantis-yaml.html) for more details. +See [Repo-Level atlantis.yaml Files](repo-level-atlantis-yaml.md) for more details. diff --git a/runatlantis.io/docs/configuring-webhooks.md b/runatlantis.io/docs/configuring-webhooks.md index be285ef6bc..295b50f437 100644 --- a/runatlantis.io/docs/configuring-webhooks.md +++ b/runatlantis.io/docs/configuring-webhooks.md @@ -1,16 +1,18 @@ # Configuring Webhooks + Atlantis needs to receive Webhooks from your Git host so that it can respond to pull request events. :::tip Prerequisites + * You have created an [access credential](access-credentials.md) * You have created a [webhook secret](webhook-secrets.md) * You have [deployed](deployment.md) Atlantis and have a url for it ::: See the instructions for your specific provider below. -[[toc]] ## GitHub/GitHub Enterprise + You can install your webhook at the [organization](https://docs.github.com/en/get-started/learning-about-github/types-of-github-accounts) level, or for each individual repository. ::: tip NOTE @@ -22,101 +24,130 @@ When authenticating as a GitHub App, Webhooks are automatically created and need If you're installing on the organization, navigate to your organization's page and click **Settings**. If installing on a single repository, navigate to the repository home page and click **Settings**. -- Select **Webhooks** or **Hooks** in the sidebar -- Click **Add webhook** -- set **Payload URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- double-check you added `/events` to the end of your URL. -- set **Content type** to `application/json` -- set **Secret** to the Webhook Secret you generated previously - - **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. -- select **Let me select individual events** -- check the boxes - - **Pull request reviews** - - **Pushes** - - **Issue comments** - - **Pull requests** -- leave **Active** checked -- click **Add webhook** -- See [Next Steps](#next-steps) + +* Select **Webhooks** or **Hooks** in the sidebar +* Click **Add webhook** +* set **Payload URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* double-check you added `/events` to the end of your URL. +* set **Content type** to `application/json` +* set **Secret** to the Webhook Secret you generated previously + * **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. +* select **Let me select individual events** +* check the boxes + * **Pull request reviews** + * **Pushes** + * **Issue comments** + * **Pull requests** +* leave **Active** checked +* click **Add webhook** +* See [Next Steps](#next-steps) ## GitLab + If you're using GitLab, navigate to your project's home page in GitLab -- Click **Settings > Webhooks** in the sidebar -- set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- double-check you added `/events` to the end of your URL. -- set **Secret Token** to the Webhook Secret you generated previously - - **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. -- check the boxes - - **Push events** - - **Comments** - - **Merge Request events** -- leave **Enable SSL verification** checked -- click **Add webhook** -- See [Next Steps](#next-steps) + +* Click **Settings > Webhooks** in the sidebar +* set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* double-check you added `/events` to the end of your URL. +* set **Secret Token** to the Webhook Secret you generated previously + * **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. +* check the boxes + * **Push events** + * **Comments** + * **Merge Request events** +* leave **Enable SSL verification** checked +* click **Add webhook** +* See [Next Steps](#next-steps) + +## Gitea + +If you're using Gitea, navigate to your project's home page in Gitea + +* Click **Settings > Webhooks** in the top- and then sidebar +* Click **Add webhook > Gitea** (Gitea webhooks are service specific, but this works) +* set **Target URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* double-check you added `/events` to the end of your URL. +* set **Secret** to the Webhook Secret you generated previously + * **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. +* Select **Custom Events...** +* Check the boxes + * **Repository events > Push** + * **Issue events > Issue Comment** + * **Pull Request events > Pull Request** + * **Pull Request events > Pull Request Comment** + * **Pull Request events > Pull Request Reviewed** + * **Pull Request events > Pull Request Synchronized** +* Leave **Active** checked +* Click **Add Webhook** +* See [Next Steps](#next-steps) ## Bitbucket Cloud (bitbucket.org) -- Go to your repo's home page -- Click **Settings** in the sidebar -- Click **Webhooks** under the **WORKFLOW** section -- Click **Add webhook** -- Enter "Atlantis" for **Title** -- set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- double-check you added `/events` to the end of your URL. -- Keep **Status** as Active -- Don't check **Skip certificate validation** because NGROK has a valid cert. -- Select **Choose from a full list of triggers** -- Under **Repository** **un**check everything -- Under **Issues** leave everything **un**checked -- Under **Pull Request**, select: Created, Updated, Merged, Declined and Comment created -- Click **Save** + +* Go to your repo's home page +* Click **Settings** in the sidebar +* Click **Webhooks** under the **WORKFLOW** section +* Click **Add webhook** +* Enter "Atlantis" for **Title** +* set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* double-check you added `/events` to the end of your URL. +* Keep **Status** as Active +* Don't check **Skip certificate validation** because NGROK has a valid cert. +* Select **Choose from a full list of triggers** +* Under **Repository** **un**check everything +* Under **Issues** leave everything **un**checked +* Under **Pull Request**, select: Created, Updated, Merged, Declined and Comment created +* Click **Save** Bitbucket Webhook -- See [Next Steps](#next-steps) +* See [Next Steps](#next-steps) ## Bitbucket Server (aka Stash) -- Go to your repo's home page -- Click **Settings** in the sidebar -- Click **Webhooks** under the **WORKFLOW** section -- Click **Create webhook** -- Enter "Atlantis" for **Name** -- set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** -- Double-check you added `/events` to the end of your URL. -- Set **Secret** to the Webhook Secret you generated previously - - **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. -- Under **Pull Request**, select: Opened, Source branch updated, Merged, Declined, Deleted and Comment added -- Click **Save**Bitbucket Webhook -- See [Next Steps](#next-steps) + +* Go to your repo's home page +* Click **Settings** in the sidebar +* Click **Webhooks** under the **WORKFLOW** section +* Click **Create webhook** +* Enter "Atlantis" for **Name** +* set **URL** to `http://$URL/events` (or `https://$URL/events` if you're using SSL) where `$URL` is where Atlantis is hosted. **Be sure to add `/events`** +* Double-check you added `/events` to the end of your URL. +* Set **Secret** to the Webhook Secret you generated previously + * **NOTE** If you're adding a webhook to multiple repositories, each repository will need to use the **same** secret. +* Under **Pull Request**, select: Opened, Source branch updated, Merged, Declined, Deleted and Comment added +* Click **Save**Bitbucket Webhook +* See [Next Steps](#next-steps) ## Azure DevOps + Webhooks are installed at the [team project](https://docs.microsoft.com/en-us/azure/devops/organizations/projects/about-projects?view=azure-devops) level, but may be restricted to only fire based on events pertaining to [specific repos](https://docs.microsoft.com/en-us/azure/devops/service-hooks/services/webhooks?view=azure-devops) within the team project. -- Navigate anywhere within a team project, ie: `https://dev.azure.com/orgName/projectName/_git/repoName` -- Select **Project settings** in the lower-left corner -- Select **Service hooks** - - If you see the message "You do not have sufficient permissions to view or configure subscriptions." you need to ensure your user is a member of either the organization's "Project Collection Administrators" group or the project's "Project Administrators" group. - - To add your user to the Project Collection Build Administrators group, navigate to the organization level, click **Organization Settings** and then click **Permissions**. You should be at `https://dev.azure.com//_settings/groups`. Now click on the **\/Project Collection Administrators** group and add your user as a member. - - To add your user to the Project Administrators group, navigate to the project level, click **Project Settings** and then click **Permissions**. You should be at `https://dev.azure.com///_settings/permissions`. Now click on the **\/Project Administrators** group and add your user as a member. -- Click **Create subscription** or the green plus icon to add a new webhook -- Scroll to the bottom of the list and select **Web Hooks** -- Click **Next** -- Under "Trigger on this type of event", select **Pull request created** - - Optionally, select a repository under **Filters** to restrict the scope of this webhook subscription to a specific repository -- Click **Next** -- Set **URL** to `http://$URL/events` where `$URL` is where Atlantis is hosted. Note that SSL, or `https://$URL/events`, is required if you set a Basic username and password for the webhook). **Be sure to add `/events`** -- It is strongly recommended to set a Basic Username and Password for all webhooks -- Leave all three drop-down menus for `...to send` set to **All** -- Resource version should be set to **1.0** for `Pull request created` and `Pull request updated` event types and **2.0** for `Pull request commented on` -- **NOTE** If you're adding a webhook to multiple team projects or repositories (using filters), each repository will need to use the **same** basic username and password. -- Click **Finish** +* Navigate anywhere within a team project, ie: `https://dev.azure.com/orgName/projectName/_git/repoName` +* Select **Project settings** in the lower-left corner +* Select **Service hooks** + * If you see the message "You do not have sufficient permissions to view or configure subscriptions." you need to ensure your user is a member of either the organization's "Project Collection Administrators" group or the project's "Project Administrators" group. + * To add your user to the Project Collection Build Administrators group, navigate to the organization level, click **Organization Settings** and then click **Permissions**. You should be at `https://dev.azure.com//_settings/groups`. Now click on the **\/Project Collection Administrators** group and add your user as a member. + * To add your user to the Project Administrators group, navigate to the project level, click **Project Settings** and then click **Permissions**. You should be at `https://dev.azure.com///_settings/permissions`. Now click on the **\/Project Administrators** group and add your user as a member. +* Click **Create subscription** or the green plus icon to add a new webhook +* Scroll to the bottom of the list and select **Web Hooks** +* Click **Next** +* Under "Trigger on this type of event", select **Pull request created** + * Optionally, select a repository under **Filters** to restrict the scope of this webhook subscription to a specific repository +* Click **Next** +* Set **URL** to `http://$URL/events` where `$URL` is where Atlantis is hosted. Note that SSL, or `https://$URL/events`, is required if you set a Basic username and password for the webhook). **Be sure to add `/events`** +* It is strongly recommended to set a Basic Username and Password for all webhooks +* Leave all three drop-down menus for `...to send` set to **All** +* Resource version should be set to **1.0** for `Pull request created` and `Pull request updated` event types and **2.0** for `Pull request commented on` +* **NOTE** If you're adding a webhook to multiple team projects or repositories (using filters), each repository will need to use the **same** basic username and password. +* Click **Finish** Repeat the process above until you have webhook subscriptions for the following event types that will trigger on all repositories Atlantis will manage: -- Pull request created (you just added this one) -- Pull request updated -- Pull request commented on +* Pull request created (you just added this one) +* Pull request updated +* Pull request commented on -- See [Next Steps](#next-steps) +* See [Next Steps](#next-steps) ## Next Steps + * To verify that Atlantis is receiving your webhooks, create a test pull request to your repo. * You should see the request show up in the Atlantis logs at an `INFO` level. * You'll now need to configure Atlantis to add your [Provider Credentials](provider-credentials.md) diff --git a/runatlantis.io/docs/custom-policy-checks.md b/runatlantis.io/docs/custom-policy-checks.md index 9c6d362803..4c353335c7 100644 --- a/runatlantis.io/docs/custom-policy-checks.md +++ b/runatlantis.io/docs/custom-policy-checks.md @@ -1,9 +1,11 @@ # Custom Policy Checks -If you want to run custom policy tools or scripts instead of the built-in Conftest integration, you can do so by setting the `custom_policy_check` option and running it in a custom workflow. Note: custom policy tool output is simply parsed for "fail" substrings to determine if the policy set passed. -This option can be configured either at the server-level in a [repos.yaml config file](server-configuration.md) or at the repo-level in an [atlantis.yaml file.](repo-level-atlantis-yaml.md). +If you want to run custom policy tools or scripts instead of the built-in Conftest integration, you can do so by setting the `custom_policy_check` option and running it in a custom workflow. Note: custom policy tool output is simply parsed for "fail" substrings to determine if the policy set passed. + +This option can be configured either at the server-level in a [repos.yaml config file](server-configuration.md) or at the repo-level in an [atlantis.yaml file.](repo-level-atlantis-yaml.md). ## Server-side config example + Set the `policy_check` and `custom_policy_check` options to true, and run the custom tool in the policy check steps as seen below. ```yaml @@ -30,8 +32,8 @@ policies: source: local ``` - ## Repo-level atlantis.yaml example + First, you will need to ensure `custom_policy_check` is within the `allowed_overrides` field of the server-side config. Next, just set the custom option to true on the specific project you want as shown in the example `atlantis.yaml` below: ```yaml diff --git a/runatlantis.io/docs/custom-workflows.md b/runatlantis.io/docs/custom-workflows.md index 61f7ae78ef..2e26e74b07 100644 --- a/runatlantis.io/docs/custom-workflows.md +++ b/runatlantis.io/docs/custom-workflows.md @@ -3,23 +3,25 @@ Custom workflows can be defined to override the default commands that Atlantis runs. -[[toc]] - ## Usage + Custom workflows can be specified in the Server-Side Repo Config or in the Repo-Level `atlantis.yaml` files. -**Notes** +**Notes:** + * If you want to allow repos to select their own workflows, they must have the -`allowed_overrides: [workflow]` setting. See [server-side repo config use cases](server-side-repo-config.html#allow-repos-to-choose-a-server-side-workflow) for more details. +`allowed_overrides: [workflow]` setting. See [server-side repo config use cases](server-side-repo-config.md#allow-repos-to-choose-a-server-side-workflow) for more details. * If in addition you also want to allow repos to define their own workflows, they must have the -`allow_custom_workflows: true` setting. See [server-side repo config use cases](server-side-repo-config.html#allow-repos-to-define-their-own-workflows) for more details. - +`allow_custom_workflows: true` setting. See [server-side repo config use cases](server-side-repo-config.md#allow-repos-to-define-their-own-workflows) for more details. ## Use Cases + ### .tfvars files + Given the structure: -``` + +```plain . └── project1 ├── main.tf @@ -29,6 +31,7 @@ Given the structure: If you wanted Atlantis to automatically run plan with `-var-file staging.tfvars` and `-var-file production.tfvars` you could define two workflows: + ```yaml # repos.yaml or atlantis.yaml workflows: @@ -62,7 +65,9 @@ workflows: - state_rm: extra_args: ["-lock=false"] ``` + Then in your repo-level `atlantis.yaml` file, you would reference the workflows: + ```yaml # atlantis.yaml version: 3 @@ -80,20 +85,27 @@ workflows: # If you didn't define the workflows in your server-side repos.yaml config, # you would define them here instead. ``` + When you want to apply the plans, you can comment -``` + +```shell atlantis apply -p project1-staging ``` + and -``` + +```shell atlantis apply -p project1-production ``` + Where `-p` refers to the project name. ### Adding extra arguments to Terraform commands + If you need to append flags to `terraform plan` or `apply` temporarily, you can append flags on a comment following `--`, for example commenting: -``` + +```shell atlantis plan -- -lock=false ``` @@ -117,7 +129,7 @@ workflows: extra_args: ["-lock=false"] ``` -If [policy checking](/docs/policy-checking.html#how-it-works) is enabled, `extra_args` can also be used to change the default behaviour of conftest. +If [policy checking](policy-checking.md#how-it-works) is enabled, `extra_args` can also be used to change the default behaviour of conftest. ```yaml workflows: @@ -130,6 +142,7 @@ workflows: ``` ### Custom init/plan/apply Commands + If you want to customize `terraform init`, `plan` or `apply` in ways that aren't supported by `extra_args`, you can completely override those commands. @@ -162,14 +175,15 @@ workflows: ``` ### CDKTF + Here are the requirements to enable [CDKTF](https://developer.hashicorp.com/terraform/cdktf) -- A custom image with `CDKTF` installed -- Add `**/cdk.tf.json` to the list of Atlantis autoplan files. -- Set the `atlantis-include-git-untracked-files` flag so that the Terraform files dynamically generated +* A custom image with `CDKTF` installed +* Add `**/cdk.tf.json` to the list of Atlantis autoplan files. +* Set the `atlantis-include-git-untracked-files` flag so that the Terraform files dynamically generated by CDKTF will be add to the Atlantis modified file list. -- Use `pre_workflow_hooks` to run `cdktf synth` -- Optional: There isn't a requirement to use a repo `atlantis.yaml` but one can be leveraged if needed. +* Use `pre_workflow_hooks` to run `cdktf synth` +* Optional: There isn't a requirement to use a repo `atlantis.yaml` but one can be leveraged if needed. #### Custom Image @@ -192,6 +206,7 @@ ATLANTIS_INCLUDE_GIT_UNTRACKED_FILES=true OR `atlantis server --config config.yaml` + ```yaml # config.yaml autoplan-file-list: "**/*.tf,**/*.tfvars,**/*.tfvars.json,**/cdk.tf.json" @@ -203,6 +218,7 @@ include-git-untracked-files: true Use `pre_workflow_hooks` `atlantis server --repo-config="repos.yaml"` + ```yaml # repos.yaml repos: @@ -243,6 +259,7 @@ generating the `cdk.tf.json` Terraform files. 1. Atlantis then runs `terraform` workflows in the respective directories as usual. ### Terragrunt + Atlantis supports running custom commands in place of the default Atlantis commands. We can use this functionality to enable [Terragrunt](https://github.com/gruntwork-io/terragrunt). @@ -250,7 +267,8 @@ commands. We can use this functionality to enable You can either use your repo's `atlantis.yaml` file or the Atlantis server's `repos.yaml` file. Given a directory structure: -``` + +```plain . └── live    ├── prod @@ -315,6 +333,7 @@ workflows: ``` If using the repo's `atlantis.yaml` file you would use the following config: + ```yaml version: 3 projects: @@ -353,10 +372,9 @@ workflows: **NOTE:** If using the repo's `atlantis.yaml` file, you will need to specify each directory that is a Terragrunt project. - ::: warning Atlantis will need to have the `terragrunt` binary in its PATH. -If you're using Docker you can build your own image, see [Customization](/docs/deployment.html#customization). +If you're using Docker you can build your own image, see [Customization](deployment.md#customization). ::: If you don't want to create/manage the repo's `atlantis.yaml` file yourself, you can use the tool [terragrunt-atlantis-config](https://github.com/transcend-io/terragrunt-atlantis-config) to generate it. @@ -364,6 +382,7 @@ If you don't want to create/manage the repo's `atlantis.yaml` file yourself, you The `terragrunt-atlantis-config` tool is a community project and not maintained by the Atlantis team. ### Running custom commands + Atlantis supports running completely custom commands. In this example, we want to run a script after every `apply`: @@ -378,6 +397,7 @@ workflows: ``` ::: tip Notes + * We don't need to write a `plan` key under `myworkflow`. If `plan` isn't set, Atlantis will use the default plan workflow which is what we want in this case. * A custom command will only terminate if all output file descriptors are closed. @@ -389,6 +409,7 @@ the redirect, the script would block the Atlantis workflow. ::: ### Custom Backend Config + If you need to specify the `-backend-config` flag to `terraform init` you'll need to use a custom workflow. In this example, we're using custom backend files to configure two remote states, one for each environment. We're then using `.tfvars` files to load different variables for each environment. @@ -413,12 +434,14 @@ workflows: - plan: extra_args: [-var-file=production.tfvars] ``` + ::: warning NOTE We have to use a custom `run` step to `rm -rf .terraform` because otherwise Terraform will complain in-between commands since the backend config has changed. ::: You would then reference the workflows in your repo-level `atlantis.yaml`: + ```yaml version: 3 projects: @@ -431,7 +454,9 @@ projects: ``` ## Reference + ### Workflow + ```yaml plan: apply: @@ -447,6 +472,7 @@ state_rm: | state_rm | [Stage](#stage) | `steps: [init, state_rm]` | no | How to run state rm for this project. | ### Stage + ```yaml steps: - run: custom-command @@ -460,8 +486,11 @@ steps: | steps | array[[Step](#step)] | `[]` | no | List of steps for this stage. If the steps key is empty, no steps will be run for this stage. | ### Step + #### Built-In Commands + Steps can be a single string for a built-in command. + ```yaml - init - plan @@ -469,12 +498,15 @@ Steps can be a single string for a built-in command. - import - state_rm ``` + | Key | Type | Default | Required | Description | |---------------------------------|--------|---------|----------|------------------------------------------------------------------------------------------------------------------------------| | init/plan/apply/import/state_rm | string | none | no | Use a built-in command without additional configuration. Only `init`, `plan`, `apply`, `import` and `state_rm` are supported | #### Built-In Command With Extra Args + A map from string to `extra_args` for a built-in command with extra arguments. + ```yaml - init: extra_args: [arg1, arg2] @@ -487,64 +519,71 @@ A map from string to `extra_args` for a built-in command with extra arguments. - state_rm: extra_args: [arg1, arg2] ``` + | Key | Type | Default | Required | Description | |---------------------------------|------------------------------------|---------|----------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| init/plan/apply/import/state_rm | map[`extra_args` -> array[string]] | none | no | Use a built-in command and append `extra_args`. Only `init`, `plan`, `apply`, `import` and `state_rm` are supported as keys and only `extra_args` is supported as a value | +| init/plan/apply/import/state_rm | map\[`extra_args` -> array\[string\]\] | none | no | Use a built-in command and append `extra_args`. Only `init`, `plan`, `apply`, `import` and `state_rm` are supported as keys and only `extra_args` is supported as a value | #### Custom `run` Command + A custom command can be written in 2 ways Compact: + ```yaml - run: custom-command arg1 arg2 ``` + | Key | Type | Default | Required | Description | |-----|--------|---------|----------|----------------------| | run | string | none | no | Run a custom command | Full + ```yaml - run: command: custom-command arg1 arg2 output: show custom_regex: .* ``` + | Key | Type | Default | Required | Description | |-------------------|-----------------------|---------|-----------|-----------------------| | run | map[string -> string] | none | no | Run a custom command | | run.command | string | none | yes | Shell command to run | -| run.output | string | "show" | no | How to post-process the output of this command when posted in the PR comment. The options are
* `show` - preserve the full output
* `hide` - hide output from comment (still visible in the real-time streaming output)
* `strip_refreshing` - hide all output up until and including the last line containing "Refreshing...". This matches the behavior of the built-in `plan` command
* `custom_regex` - filters the comment output based on the regex specified on `run.regex_filter` by replacing matched patterns with the text ` * `strip_refreshing_with_custom_regex` - applies `strip_refreshing` and `custom_regex` to the output | +| run.output | string | "show" | no | How to post-process the output of this command when posted in the PR comment. The options are
*`show` - preserve the full output
* `hide` - hide output from comment (still visible in the real-time streaming output)
*`strip_refreshing` - hide all output up until and including the last line containing "Refreshing...". This matches the behavior of the built-in `plan` command
* `custom_regex` - filters the comment output based on the regex specified on `run.regex_filter` by replacing matched patterns with the text ` * `strip_refreshing_with_custom_regex` - applies `strip_refreshing` and `custom_regex` to the output | | run.custom_regex | string | none | no | Regex filter to be applied to output. Required when `run.output` is `custom_regex` or `strip_refreshing_with_custom_regex` | ::: tip Notes + * `run` steps in the main `workflow` are executed with the following environment variables: note: these variables are not available to `pre` or `post` workflows - * `WORKSPACE` - The Terraform workspace used for this project, ex. `default`. + * `WORKSPACE` - The Terraform workspace used for this project, ex. `default`. NOTE: if the step is executed before `init` then Atlantis won't have switched to this workspace yet. - * `ATLANTIS_TERRAFORM_VERSION` - The version of Terraform used for this project, ex. `0.11.0`. - * `DIR` - Absolute path to the current directory. - * `PLANFILE` - Absolute path to the location where Atlantis expects the plan to + * `ATLANTIS_TERRAFORM_VERSION` - The version of Terraform used for this project, ex. `0.11.0`. + * `DIR` - Absolute path to the current directory. + * `PLANFILE` - Absolute path to the location where Atlantis expects the plan to either be generated (by plan) or already exist (if running apply). Can be used to override the built-in `plan`/`apply` commands, ex. `run: terraform plan -out $PLANFILE`. - * `SHOWFILE` - Absolute path to the location where Atlantis expects the plan in json format to + * `SHOWFILE` - Absolute path to the location where Atlantis expects the plan in json format to either be generated (by show) or already exist (if running policy checks). Can be used to override the built-in `plan`/`apply` commands, ex. `run: terraform show -json $PLANFILE > $SHOWFILE`. - * `POLICYCHECKFILE` - Absolute path to the location of policy check output if Atlantis runs policy checks. - See [policy checking](/docs/policy-checking.html#data-for-custom-run-steps) for information of data structure. - * `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`. - * `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`. - * `HEAD_REPO_NAME` - Name of the repository that is getting merged into the base repository, ex. `atlantis`. - * `HEAD_REPO_OWNER` - Owner of the repository that is getting merged into the base repository, ex. `acme-corp`. - * `HEAD_BRANCH_NAME` - Name of the head branch of the pull request (the branch that is getting merged into the base) - * `HEAD_COMMIT` - The sha256 that points to the head of the branch that is being pull requested into the base. If the pull request is from Bitbucket Cloud the string will only be 12 characters long because Bitbucket Cloud truncates its commit IDs. - * `BASE_BRANCH_NAME` - Name of the base branch of the pull request (the branch that the pull request is getting merged into) - * `PROJECT_NAME` - Name of the project configured in `atlantis.yaml`. If no project name is configured this will be an empty string. - * `PULL_NUM` - Pull request number or ID, ex. `2`. - * `PULL_URL` - Pull request URL, ex. `https://github.com/runatlantis/atlantis/pull/2`. - * `PULL_AUTHOR` - Username of the pull request author, ex. `acme-user`. - * `REPO_REL_DIR` - The relative path of the project in the repository. For example if your project is in `dir1/dir2/` then this will be set to `"dir1/dir2"`. If your project is at the root this will be `"."`. - * `USER_NAME` - Username of the VCS user running command, ex. `acme-user`. During an autoplan, the user will be the Atlantis API user, ex. `atlantis`. - * `COMMENT_ARGS` - Any additional flags passed in the comment on the pull request. Flags are separated by commas and + * `POLICYCHECKFILE` - Absolute path to the location of policy check output if Atlantis runs policy checks. + See [policy checking](policy-checking.md#data-for-custom-run-steps) for information of data structure. + * `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`. + * `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`. + * `HEAD_REPO_NAME` - Name of the repository that is getting merged into the base repository, ex. `atlantis`. + * `HEAD_REPO_OWNER` - Owner of the repository that is getting merged into the base repository, ex. `acme-corp`. + * `HEAD_BRANCH_NAME` - Name of the head branch of the pull request (the branch that is getting merged into the base) + * `HEAD_COMMIT` - The sha256 that points to the head of the branch that is being pull requested into the base. If the pull request is from Bitbucket Cloud the string will only be 12 characters long because Bitbucket Cloud truncates its commit IDs. + * `BASE_BRANCH_NAME` - Name of the base branch of the pull request (the branch that the pull request is getting merged into) + * `PROJECT_NAME` - Name of the project configured in `atlantis.yaml`. If no project name is configured this will be an empty string. + * `PULL_NUM` - Pull request number or ID, ex. `2`. + * `PULL_URL` - Pull request URL, ex. `https://github.com/runatlantis/atlantis/pull/2`. + * `PULL_AUTHOR` - Username of the pull request author, ex. `acme-user`. + * `REPO_REL_DIR` - The relative path of the project in the repository. For example if your project is in `dir1/dir2/` then this will be set to `"dir1/dir2"`. If your project is at the root this will be `"."`. + * `USER_NAME` - Username of the VCS user running command, ex. `acme-user`. During an autoplan, the user will be the Atlantis API user, ex. `atlantis`. + * `COMMENT_ARGS` - Any additional flags passed in the comment on the pull request. Flags are separated by commas and every character is escaped, ex. `atlantis plan -- arg1 arg2` will result in `COMMENT_ARGS=\a\r\g\1,\a\r\g\2`. * A custom command will only terminate if all output file descriptors are closed. Therefore a custom command can only be sent to the background (e.g. for an SSH tunnel during @@ -556,12 +595,14 @@ the redirect, the script would block the Atlantis workflow. ::: #### Environment Variable `env` Command + The `env` command allows you to set environment variables that will be available to all steps defined **below** the `env` step. You can set hard coded values via the `value` key, or set dynamic values via the `command` key which allows you to run any command and uses the output as the environment variable value. + ```yaml - env: name: ENV_NAME @@ -570,34 +611,56 @@ as the environment variable value. name: ENV_NAME_2 command: 'echo "dynamic-value-$(date)"' ``` + | Key | Type | Default | Required | Description | |-----------------|-----------------------|---------|----------|-----------------------------------------------------------------------------------------------------------------| -| env | map[string -> string] | none | no | Set environment variables for subsequent steps | +| env | map\[string -> string\] | none | no | Set environment variables for subsequent steps | | env.name | string | none | yes | Name of the environment variable | | env.value | string | none | no | Set the value of the environment variable to a hard-coded string. Cannot be set at the same time as `command` | | env.command | string | none | no | Set the value of the environment variable to the output of a command. Cannot be set at the same time as `value` | ::: tip Notes + * `env` `command`'s can use any of the built-in environment variables available to `run` commands. ::: #### Multiple Environment Variables `multienv` Command + The `multienv` command allows you to set dynamic number of multiple environment variables that will be available to all steps defined **below** the `multienv` step. + +Compact: + ```yaml - multienv: custom-command ``` -| Key | Type | Default | Required | Description | -|----------|--------|---------|----------|--------------------------------------------------------------------------------| -| multienv | string | none | no | Run a custom command and add set environment variables according to the result | -The result of the executed command must have a fixed format: -EnvVar1Name=value1,EnvVar2Name=value2,EnvVar3Name=value3 +| Key | Type | Default | Required | Description | +|----------|--------|---------|----------|------------------------------------------------------------| +| multienv | string | none | no | Run a custom command and add printed environment variables | -The name-value pairs in the result are added as environment variables if success is true otherwise the workflow execution stops with error and the errorMessage is getting displayed. +Full: + +```yaml +- multienv: + command: custom-command + output: show +``` + +| Key | Type | Default | Required | Description | +|------------------|-----------------------|---------|----------|-------------------------------------------------------------------------------------| +| multienv | map[string -> string] | none | no | Run a custom command and add printed environment variables | +| multienv.command | string | none | yes | Name of the custom script to run | +| multienv.output | string | "show" | no | Setting output to "hide" will supress the message obout added environment variables | + +The output of the command execution must have the following format: +`EnvVar1Name=value1,EnvVar2Name=value2,EnvVar3Name=value3` + +The name-value pairs in the output are added as environment variables if command execution is successful, otherwise the workflow execution is interrupted with an error and the errorMessage is returned. ::: tip Notes + * `multienv` `command`'s can use any of the built-in environment variables available to `run` commands. ::: diff --git a/runatlantis.io/docs/deployment.md b/runatlantis.io/docs/deployment.md index 05e91b5e70..dfe5ae27cc 100644 --- a/runatlantis.io/docs/deployment.md +++ b/runatlantis.io/docs/deployment.md @@ -1,29 +1,33 @@ # Deployment + This page covers getting Atlantis up and running in your infrastructure. ::: tip Prerequisites -* You have created [access credentials](access-credentials.html) for your Atlantis user -* You have created a [webhook secret](webhook-secrets.html) -::: -[[toc]] +* You have created [access credentials](access-credentials.md) for your Atlantis user +* You have created a [webhook secret](webhook-secrets.md) +::: ## Architecture Overview + ### Runtime + Atlantis is a simple [Go](https://golang.org/) app. It receives webhooks from your Git host and executes Terraform commands locally. There is an official Atlantis [Docker image](https://ghcr.io/runatlantis/atlantis). ### Routing + Atlantis and your Git host need to be able to route and communicate with one another. Your Git host needs to be able to send webhooks to Atlantis and Atlantis needs to be able to make API calls to your Git host. If you're using -a public Git host like github.com, gitlab.com, bitbucket.org, or dev.azure.com then you'll need to +a public Git host like github.com, gitlab.com, gitea.com, bitbucket.org, or dev.azure.com then you'll need to expose Atlantis to the internet. -If you're using a private Git host like GitHub Enterprise, GitLab Enterprise or +If you're using a private Git host like GitHub Enterprise, GitLab Enterprise, self-hosted Gitea or Bitbucket Server, then Atlantis needs to be routable from the private host and Atlantis will need to be able to route to the private host. ### Data + Atlantis has no external database. Atlantis stores Terraform plan files on disk. If Atlantis loses that data in between a `plan` and `apply` cycle, then users will have to re-run `plan`. Because of this, you may want to provision a persistent disk @@ -32,6 +36,7 @@ for Atlantis. ## Deployment Pick your deployment type: + * [Kubernetes Helm Chart](#kubernetes-helm-chart) * [Kubernetes Manifests](#kubernetes-manifests) * [Kubernetes Kustomize](#kubernetes-kustomize) @@ -41,21 +46,27 @@ Pick your deployment type: * [Docker](#docker) * [Roll Your Own](#roll-your-own) - ### Kubernetes Helm Chart + Atlantis has an [official Helm chart](https://github.com/runatlantis/helm-charts/tree/main/charts/atlantis) To install: + 1. Add the runatlantis helm chart repository to helm + ```bash helm repo add runatlantis https://runatlantis.github.io/helm-charts ``` + 1. `cd` into a directory where you're going to configure your Atlantis Helm chart 1. Create a `values.yaml` file by running + ```bash helm inspect values runatlantis/atlantis > values.yaml ``` + 1. Edit `values.yaml` and add your access credentials and webhook secret + ```yaml # for example github: @@ -63,28 +74,33 @@ To install: token: bar secret: baz ``` + 1. Edit `values.yaml` and set your `orgAllowlist` (see [Repo Allowlist](server-configuration.md#repo-allowlist) for more information) + ```yaml orgAllowlist: github.com/runatlantis/* ``` - **Note**: For helm chart version < `4.0.2`, `orgWhitelist` must be used instead. -1. Configure any other variables (see [https://github.com/runatlantis/helm-charts#customization](https://github.com/runatlantis/helm-charts#customization) + + **Note**: For helm chart version < `4.0.2`, `orgWhitelist` must be used instead. +1. Configure any other variables (see [Atlantis Helm Chart: Customization](https://github.com/runatlantis/helm-charts#customization) for documentation) 1. Run + ```sh helm install atlantis runatlantis/atlantis -f values.yaml ``` If you are using helm v2, run: + ```sh helm install -f values.yaml runatlantis/atlantis ``` - Atlantis should be up and running in minutes! See [Next Steps](#next-steps) for what to do next. ### Kubernetes Manifests + If you'd like to use a raw Kubernetes manifest, we offer either a [Deployment](https://kubernetes.io/docs/concepts/workloads/controllers/deployment/) or a [Statefulset](https://kubernetes.io/docs/concepts/workloads/controllers/statefulset/) with persistent storage. @@ -94,35 +110,42 @@ or you upgrade Atlantis, you won't lose plans that haven't been applied. If you do lose that data, you just need to run `atlantis plan` again so it's not the end of the world. Regardless of whether you choose a Deployment or StatefulSet, first create a Secret with the webhook secret and access token: + ```bash echo -n "yourtoken" > token echo -n "yoursecret" > webhook-secret kubectl create secret generic atlantis-vcs --from-file=token --from-file=webhook-secret ``` + ::: tip Note If you're using Bitbucket Cloud then there is no webhook secret since it's not supported. ::: Next, edit the manifests below as follows: -1. Replace `` in `image: ghcr.io/runatlantis/atlantis:` with the most recent version from [https://github.com/runatlantis/atlantis/releases/latest](https://github.com/runatlantis/atlantis/releases/latest). + +1. Replace `` in `image: ghcr.io/runatlantis/atlantis:` with the most recent version from [GitHub: Atlantis latest release](https://github.com/runatlantis/atlantis/releases/latest). * NOTE: You never want to run with `:latest` because if your Pod moves to a new node, Kubernetes will pull the latest image and you might end up upgrading Atlantis by accident! 2. Replace `value: github.com/yourorg/*` under `name: ATLANTIS_REPO_ALLOWLIST` with the allowlist pattern -for your Terraform repos. See [Repo Allowlist](server-configuration.html#repo-allowlist) for more details. +for your Terraform repos. See [--repo-allowlist](server-configuration.md#repo-allowlist) for more details. 3. If you're using GitHub: 1. Replace `` with the username of your Atlantis GitHub user without the `@`. - 2. Delete all the `ATLANTIS_GITLAB_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. + 2. Delete all the `ATLANTIS_GITLAB_*`, `ATLANTIS_GITEA_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. 4. If you're using GitLab: 1. Replace `` with the username of your Atlantis GitLab user without the `@`. - 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. -5. If you're using Bitbucket: + 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITEA_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. +5. If you're using Gitea: + 1. Replace `` with the username of your Atlantis Gitea user without the `@`. + 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, `ATLANTIS_BITBUCKET_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. +6. If you're using Bitbucket: 1. Replace `` with the username of your Atlantis Bitbucket user without the `@`. - 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. -6. If you're using Azure DevOps: + 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, `ATLANTIS_GITEA_*`, and `ATLANTIS_AZUREDEVOPS_*` environment variables. +7. If you're using Azure DevOps: 1. Replace `` with the username of your Atlantis Azure DevOps user without the `@`. - 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, and `ATLANTIS_BITBUCKET_*` environment variables. + 2. Delete all the `ATLANTIS_GH_*`, `ATLANTIS_GITLAB_*`, `ATLANTIS_GITEA_*`, and `ATLANTIS_BITBUCKET_*` environment variables. #### StatefulSet Manifest +
Show... @@ -185,6 +208,21 @@ spec: key: webhook-secret ### End GitLab Config ### + ### Gitea Config ### + - name: ATLANTIS_GITEA_USER + value: # 4i. If you're using Gitea replace with the username of your Atlantis Gitea user without the `@`. + - name: ATLANTIS_GITEA_TOKEN + valueFrom: + secretKeyRef: + name: atlantis-vcs + key: token + - name: ATLANTIS_GITEA_WEBHOOK_SECRET + valueFrom: + secretKeyRef: + name: atlantis-vcs + key: webhook-secret + ### End Gitea Config ### + ### Bitbucket Config ### - name: ATLANTIS_BITBUCKET_USER value: # 5i. If you're using Bitbucket replace with the username of your Atlantis Bitbucket user without the `@`. @@ -272,10 +310,11 @@ spec: selector: app.kubernetes.io/name: atlantis ``` -
+ #### Deployment Manifest +
Show... @@ -333,6 +372,21 @@ spec: key: webhook-secret ### End GitLab Config ### + ### Gitea Config ### + - name: ATLANTIS_GITEA_USER + value: # 4i. If you're using Gitea replace with the username of your Atlantis Gitea user without the `@`. + - name: ATLANTIS_GITEA_TOKEN + valueFrom: + secretKeyRef: + name: atlantis-vcs + key: token + - name: ATLANTIS_GITEA_WEBHOOK_SECRET + valueFrom: + secretKeyRef: + name: atlantis-vcs + key: webhook-secret + ### End Gitea Config ### + ### Bitbucket Config ### - name: ATLANTIS_BITBUCKET_USER value: # 5i. If you're using Bitbucket replace with the username of your Atlantis Bitbucket user without the `@`. @@ -405,14 +459,16 @@ spec: selector: app.kubernetes.io/name: atlantis ``` +
#### Routing and SSL + The manifests above create a Kubernetes `Service` of `type: ClusterIP` which isn't accessible outside your cluster. Depending on how you're doing routing into Kubernetes, you may want to use a Service of `type: LoadBalancer` so that Atlantis is accessible to GitHub/GitLab and your internal users. -If you want to add SSL you can use something like [https://github.com/jetstack/cert-manager](https://github.com/jetstack/cert-manager) to generate SSL +If you want to add SSL you can use something like [cert-manager](https://github.com/cert-manager/cert-manager) to generate SSL certs and mount them into the Pod. Then set the `ATLANTIS_SSL_CERT_FILE` and `ATLANTIS_SSL_KEY_FILE` environment variables to enable SSL. You could also set up SSL at your LoadBalancer. @@ -425,6 +481,7 @@ A `kustomization.yaml` file is provided in the directory `kustomize/`, so you ma You will need to provide a secret (with the default name of `atlantis-vcs`) to configure Atlantis with access credentials for your remote repositories. Example: + ```yaml bases: - github.com/runatlantis/atlantis//kustomize @@ -450,7 +507,6 @@ patchesStrategicMerge: #### Required - ```yaml ... containers: @@ -481,6 +537,26 @@ containers: key: webhook-secret ``` +#### Gitea + +```yaml +containers: +- name: atlantis + env: + - name: ATLANTIS_GITEA_USER + value: # 4i. If you're using Gitea replace with the username of your Atlantis Gitea user without the `@`. + - name: ATLANTIS_GITEA_TOKEN + valueFrom: + secretKeyRef: + name: atlantis-vcs + key: token + - name: ATLANTIS_GITEA_WEBHOOK_SECRET + valueFrom: + secretKeyRef: + name: atlantis-vcs + key: webhook-secret +``` + #### GitHub ```yaml @@ -519,37 +595,44 @@ containers: ``` ### OpenShift + The Helm chart and Kubernetes manifests above are compatible with OpenShift, however you need to run with an additional environment variable: `HOME=/home/atlantis`. This is required because OpenShift runs Docker images with random user id's that use `/` as their home directory. ### AWS Fargate + If you'd like to run Atlantis on [AWS Fargate](https://aws.amazon.com/fargate/) check out the Atlantis module on the [Terraform Module Registry](https://registry.terraform.io/modules/terraform-aws-modules/atlantis/aws/latest) and then check out the [Next Steps](#next-steps). ### Google Kubernetes Engine (GKE) + You can run Atlantis on GKE using the [Helm chart](#kubernetes-helm-chart) or the [manifests](#kubernetes-manifests). There is also a set of full Terraform configurations that create a GKE Cluster, -Cloud Storage Backend and TLS certs: [https://github.com/sethvargo/atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke). +Cloud Storage Backend and TLS certs: [sethvargo atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke). Once you're done, see [Next Steps](#next-steps). ### Google Compute Engine (GCE) -Atlantis can be run on Google Compute Engine using a Terraform module that deploys it as a Docker container on a managed Compute Engine instance. -This [Terraform module](https://registry.terraform.io/modules/bschaatsbergen/atlantis/gce/latest) features the creation of a Cloud load balancer, a Container-Optimized OS-based VM, a persistent data disk, and a managed instance group. +Atlantis can be run on Google Compute Engine using a Terraform module that deploys it as a Docker container on a managed Compute Engine instance. + +This [Terraform module](https://registry.terraform.io/modules/runatlantis/atlantis/gce/latest) features the creation of a Cloud load balancer, a Container-Optimized OS-based VM, a persistent data disk, and a managed instance group. After it is deployed, see [Next Steps](#next-steps). ### Docker + Atlantis has an [official](https://ghcr.io/runatlantis/atlantis) Docker image: `ghcr.io/runatlantis/atlantis`. #### Customization + If you need to modify the Docker image that we provide, for instance to add the terragrunt binary, you can do something like this: 1. Create a custom docker file + ```dockerfile FROM ghcr.io/runatlantis/atlantis:{latest version} @@ -563,32 +646,37 @@ Additionally, the /docker-entrypoint.d/ directory offers a flexible option for i **Important Notice**: There is a critical update regarding the data directory in Atlantis. In versions prior to 0.26.0, the directory was configured to be accessible by the root user. However, with the transition to the atlantis user in newer versions, it is imperative to update the directory permissions accordingly in your current deployment when upgrading to a version later than 0.26.0. This step ensures seamless access and functionality for the atlantis user. 1. Build your Docker image + ```bash docker build -t {YOUR_DOCKER_ORG}/atlantis-custom . ``` 1. Run your image + ```bash docker run {YOUR_DOCKER_ORG}/atlantis-custom server --gh-user=GITHUB_USERNAME --gh-token=GITHUB_TOKEN ``` ### Microsoft Azure -The standard [Kubernetes Helm Chart](#kubernetes-helm-chart) should work fine on [Azure Kubernetes Service](https://docs.microsoft.com/en-us/azure/aks/intro-kubernetes). +The standard [Kubernetes Helm Chart](#kubernetes-helm-chart) should work fine on [Azure Kubernetes Service](https://docs.microsoft.com/en-us/azure/aks/intro-kubernetes). -Another option is [Azure Container Instances](https://docs.microsoft.com/en-us/azure/container-instances/). See this community member's [repo](https://github.com/jplane/atlantis-on-aci) or the new and more up-to-date [Terraform module](https://github.com/getindata/terraform-azurerm-atlantis) for install scripts and more information on running Atlantis on ACI. +Another option is [Azure Container Instances](https://docs.microsoft.com/en-us/azure/container-instances/). See this community member's [repo](https://github.com/jplane/atlantis-on-aci) or the new and more up-to-date [Terraform module](https://github.com/getindata/terraform-azurerm-atlantis) for install scripts and more information on running Atlantis on ACI. **Note on ACI Deployment:** Due to a bug in earlier Docker releases, Docker v23.0.0 or later is required for straightforward deployment. Alternatively, the Atlantis Docker image can be pushed to a private registry such as ACR and then used. ### Roll Your Own + If you want to roll your own Atlantis installation, you can get the `atlantis` -binary from [https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases) +binary from [GitHub](https://github.com/runatlantis/atlantis/releases) or use the [official Docker image](https://ghcr.io/runatlantis/atlantis). #### Startup Command + The exact flags to `atlantis server` depends on your Git host: ##### GitHub + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -599,6 +687,7 @@ atlantis server \ ``` ##### GitHub Enterprise + ```bash HOSTNAME=YOUR_GITHUB_ENTERPRISE_HOSTNAME # ex. github.runatlantis.io atlantis server \ @@ -611,6 +700,7 @@ atlantis server \ ``` ##### GitLab + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -621,6 +711,7 @@ atlantis server \ ``` ##### GitLab Enterprise + ```bash HOSTNAME=YOUR_GITLAB_ENTERPRISE_HOSTNAME # ex. gitlab.runatlantis.io atlantis server \ @@ -632,7 +723,20 @@ atlantis server \ --repo-allowlist="$REPO_ALLOWLIST" ``` +##### Gitea + +```bash +atlantis server \ +--atlantis-url="$URL" \ +--gitea-user="$USERNAME" \ +--gitea-token="$TOKEN" \ +--gitea-webhook-secret="$SECRET" \ +--gitea-page-size=30 \ +--repo-allowlist="$REPO_ALLOWLIST" +``` + ##### Bitbucket Cloud (bitbucket.org) + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -642,6 +746,7 @@ atlantis server \ ``` ##### Bitbucket Server (aka Stash) + ```bash BASE_URL=YOUR_BITBUCKET_SERVER_URL # ex. http://bitbucket.mycorp:7990 atlantis server \ @@ -670,21 +775,23 @@ atlantis server \ ``` Where -- `$URL` is the URL that Atlantis can be reached at -- `$USERNAME` is the GitHub/GitLab/Bitbucket/AzureDevops username you generated the token for -- `$TOKEN` is the access token you created. If you don't want this to be passed + +* `$URL` is the URL that Atlantis can be reached at +* `$USERNAME` is the GitHub/GitLab/Gitea/Bitbucket/AzureDevops username you generated the token for +* `$TOKEN` is the access token you created. If you don't want this to be passed in as an argument for security reasons you can specify it in a config file - (see [Configuration](/docs/server-configuration.html#environment-variables)) - or as an environment variable: `ATLANTIS_GH_TOKEN` or `ATLANTIS_GITLAB_TOKEN` + (see [Configuration](server-configuration.md#environment-variables)) + or as an environment variable: `ATLANTIS_GH_TOKEN` or `ATLANTIS_GITLAB_TOKEN` or `ATLANTIS_GITEA_TOKEN` or `ATLANTIS_BITBUCKET_TOKEN` or `ATLANTIS_AZUREDEVOPS_TOKEN` -- `$SECRET` is the random key you used for the webhook secret. +* `$SECRET` is the random key you used for the webhook secret. If you don't want this to be passed in as an argument for security reasons you can specify it in a config file - (see [Configuration](/docs/server-configuration.html#environment-variables)) - or as an environment variable: `ATLANTIS_GH_WEBHOOK_SECRET` or `ATLANTIS_GITLAB_WEBHOOK_SECRET` -- `$REPO_ALLOWLIST` is which repos Atlantis can run on, ex. + (see [Configuration](server-configuration.md#environment-variables)) + or as an environment variable: `ATLANTIS_GH_WEBHOOK_SECRET` or `ATLANTIS_GITLAB_WEBHOOK_SECRET` or + `ATLANTIS_GITEA_WEBHOOK_SECRET` +* `$REPO_ALLOWLIST` is which repos Atlantis can run on, ex. `github.com/runatlantis/*` or `github.enterprise.corp.com/*`. - See [Repo Allowlist](server-configuration.html#repo-allowlist) for more details. + See [--repo-allowlist](server-configuration.md#repo-allowlist) for more details. Atlantis is now running! ::: tip @@ -693,5 +800,6 @@ restart it in case of failure. ::: ## Next Steps + * To ensure Atlantis is running, load its UI. By default Atlantis runs on port `4141`. -* Now you're ready to add Webhooks to your repos. See [Configuring Webhooks](configuring-webhooks.html). +* Now you're ready to add Webhooks to your repos. See [Configuring Webhooks](configuring-webhooks.md). diff --git a/runatlantis.io/docs/faq.md b/runatlantis.io/docs/faq.md index 2cea8e8c92..1764719d97 100644 --- a/runatlantis.io/docs/faq.md +++ b/runatlantis.io/docs/faq.md @@ -1,4 +1,5 @@ # FAQ + **Q: Does Atlantis affect Terraform [remote state](https://developer.hashicorp.com/terraform/language/state/remote)?** A: No. Atlantis does not interfere with Terraform remote state in any way. Under the hood, Atlantis is simply executing `terraform plan` and `terraform apply`. diff --git a/runatlantis.io/docs/how-atlantis-works.md b/runatlantis.io/docs/how-atlantis-works.md index ed57d988f5..f486091b3b 100644 --- a/runatlantis.io/docs/how-atlantis-works.md +++ b/runatlantis.io/docs/how-atlantis-works.md @@ -1,7 +1,8 @@ # How Atlantis Works + This section of docs talks about how Atlantis at deeper level. -* [Locking](locking.html) -* [Autoplanning](autoplanning.html) -* [Automerging](automerging.html) -* [Security](security.html) +* [Locking](locking.md) +* [Autoplanning](autoplanning.md) +* [Automerging](automerging.md) +* [Security](security.md) diff --git a/runatlantis.io/docs/installation-guide.md b/runatlantis.io/docs/installation-guide.md index fafa5d5b90..f5f1bd71d1 100644 --- a/runatlantis.io/docs/installation-guide.md +++ b/runatlantis.io/docs/installation-guide.md @@ -1,20 +1,22 @@ # Installation Guide + This guide is for installing a **production-ready** instance of Atlantis onto your infrastructure: + 1. First, ensure your Terraform setup meets the Atlantis **requirements** - * See [Requirements](requirements.html) -1. Create **access credentials** for your Git host (GitHub, GitLab, Bitbucket, Azure DevOps) - * See [Generating Git Host Access Credentials](access-credentials.html) + * See [Requirements](requirements.md) +1. Create **access credentials** for your Git host (GitHub, GitLab, Gitea, Bitbucket, Azure DevOps) + * See [Generating Git Host Access Credentials](access-credentials.md) 1. Create a **webhook secret** so Atlantis can validate webhooks - * See [Creating a Webhook Secret](webhook-secrets.html) + * See [Creating a Webhook Secret](webhook-secrets.md) 1. **Deploy** Atlantis into your infrastructure - * See [Deployment](deployment.html) + * See [Deployment](deployment.md) 1. Configure **Webhooks** on your Git host so Atlantis can respond to your pull requests - * See [Configuring Webhooks](configuring-webhooks.html) + * See [Configuring Webhooks](configuring-webhooks.md) 1. Configure **provider credentials** so Atlantis can actually run Terraform commands - * See [Provider Credentials](provider-credentials.html) + * See [Provider Credentials](provider-credentials.md) :::tip -If you want to test out Atlantis first, check out [Test Drive](../guide/test-drive.html) -and [Testing Locally](../guide/testing-locally.html). +If you want to test out Atlantis first, check out [Test Drive](../guide/test-drive.md) +and [Testing Locally](../guide/testing-locally.md). ::: diff --git a/runatlantis.io/docs/locking.md b/runatlantis.io/docs/locking.md index 65836d3b70..c75e2b3fce 100644 --- a/runatlantis.io/docs/locking.md +++ b/runatlantis.io/docs/locking.md @@ -1,4 +1,5 @@ # Locking + When `plan` is run, the directory and Terraform workspace are **Locked** until the pull request is merged or closed, or the plan is manually deleted. If another user attempts to `plan` for the same directory and workspace in a different pull request @@ -12,9 +13,8 @@ Which links them to the pull request that holds the lock. Only the directory in the repo and Terraform workspace are locked, not the whole repo. ::: -[[toc]] - ## Why + 1. Because `atlantis apply` is being done before the pull request is merged, after an apply your `main` branch does not represent the most up to date version of your infrastructure anymore. With locking, you can ensure that no other changes will be made until the @@ -30,6 +30,7 @@ but with the added ability to re-plan/apply multiple times if things don't work. will be made invalid after the in-progress plan is applied. ## Viewing Locks + To view locks, go to the URL that Atlantis is hosted at: ![Locks View](./images/locks-ui.png) @@ -41,6 +42,7 @@ You can click on a lock to view its details:

## Unlocking + The project and workspace will be automatically unlocked when the PR is merged or closed. To unlock the project and workspace without completing an `apply` and merging, comment `atlantis unlock` on the PR, @@ -59,6 +61,7 @@ to delete the lock. Once a plan is discarded, you'll need to run `plan` again prior to running `apply` when you go back to that pull request. ## Relationship to Terraform State Locking + Atlantis does not conflict with [Terraform State Locking](https://developer.hashicorp.com/terraform/language/state/locking). Under the hood, all Atlantis is doing is running `terraform plan` and `apply` and so all of the locking built in to those commands by Terraform isn't affected. diff --git a/runatlantis.io/docs/policy-checking.md b/runatlantis.io/docs/policy-checking.md index c996ef7ee0..60d5b306e0 100644 --- a/runatlantis.io/docs/policy-checking.md +++ b/runatlantis.io/docs/policy-checking.md @@ -10,7 +10,7 @@ for using this step include: ## How it works? -Enabling "policy checking" in addition to the [mergeable apply requirement](/docs/command-requirements.html#supported-requirements) blocks applies on plans that fail any of the defined conftest policies. +Enabling "policy checking" in addition to the [mergeable apply requirement](command-requirements.md#supported-requirements) blocks applies on plans that fail any of the defined conftest policies. ![Policy Check Apply Failure](./images/policy-check-apply-failure.png) @@ -20,9 +20,9 @@ Any failures need to either be addressed in a successive commit, or approved by ![Policy Check Approval](./images/policy-check-approval.png) - Policy approvals may be cleared either by re-planing, or by issuing the following command: -``` + +```shell atlantis approve_policies --clear-policy-approval ``` @@ -44,11 +44,11 @@ All repositories will have policy checking enabled. ### Step 2: Define the policy configuration -Policy Configuration is defined in the [server-side repo configuration](https://www.runatlantis.io/docs/server-side-repo-config.html#reference). +Policy Configuration is defined in the [server-side repo configuration](server-side-repo-config.md#reference). In this example we will define one policy set with one owner: -``` +```yaml policies: owners: users: @@ -72,11 +72,11 @@ policies: - `owners` - Defines the users/teams which are able to approve a specific policy set. - `approve_count` - Defines the number of approvals needed to bypass policy checks. Defaults to the top-level policies configuration, if not specified. -By default conftest is configured to only run the `main` package. If you wish to run specific/multiple policies consider passing `--namespace` or `--all-namespaces` to conftest with [`extra_args`](https://www.runatlantis.io/docs/custom-workflows.html#adding-extra-arguments-to-terraform-commands) via a custom workflow as shown in the below example. +By default conftest is configured to only run the `main` package. If you wish to run specific/multiple policies consider passing `--namespace` or `--all-namespaces` to conftest with [`extra_args`](custom-workflows.md#adding-extra-arguments-to-terraform-commands) via a custom workflow as shown in the below example. Example Server Side Repo configuration using `--all-namespaces` and a local src dir. -``` +```yaml repos: - id: github.com/myorg/example-repo workflow: custom @@ -104,7 +104,7 @@ workflows: Conftest policies are based on [Open Policy Agent (OPA)](https://www.openpolicyagent.org/) and written in [rego](https://www.openpolicyagent.org/docs/latest/policy-language/#what-is-rego). Following our example, simply create a `rego` file in `null_resource_warning` folder with following code, the code below a simple policy that will fail for plans containing newly created `null_resource`s. -``` +```rego package main resource_types = {"null_resource"} @@ -144,7 +144,7 @@ That's it! Now your Atlantis instance is configured to run policies on your Terr ### Pulling policies from a remote location -Conftest supports [pulling policies](https://www.conftest.dev/sharing/#pulling) from remote locations such as S3, git, OCI, and other protocols supported by the [go-getter](https://github.com/hashicorp/go-getter) library. The key [`extra_args`](https://www.runatlantis.io/docs/custom-workflows.html#adding-extra-arguments-to-terraform-commands) can be used to pass in the [`--update`](https://www.conftest.dev/sharing/#-update-flag) flag to tell `conftest` to pull the policies into the project folder before running the policy check. +Conftest supports [pulling policies](https://www.conftest.dev/sharing/#pulling) from remote locations such as S3, git, OCI, and other protocols supported by the [go-getter](https://github.com/hashicorp/go-getter) library. The key [`extra_args`](custom-workflows.md#adding-extra-arguments-to-terraform-commands) can be used to pass in the [`--update`](https://www.conftest.dev/sharing/#-update-flag) flag to tell `conftest` to pull the policies into the project folder before running the policy check. ```yaml workflows: @@ -163,7 +163,7 @@ Note that authentication may need to be configured separately if pulling policie ### Running policy check against Terraform source code -By default, Atlantis runs the policy check against the [`SHOWFILE`](https://www.runatlantis.io/docs/custom-workflows.html#custom-run-command). In order to run the policy test against Terraform files directly, override the default `conftest` command used and pass in `*.tf` as one of the inputs to `conftest`. The `show` step is required so that Atlantis will generate the `SHOWFILE`. +By default, Atlantis runs the policy check against the [`SHOWFILE`](custom-workflows.md#custom-run-command). In order to run the policy test against Terraform files directly, override the default `conftest` command used and pass in `*.tf` as one of the inputs to `conftest`. The `show` step is required so that Atlantis will generate the `SHOWFILE`. ```yaml workflows: @@ -171,13 +171,12 @@ workflows: policy_check: steps: - show - - run: conftest test $SHOWFILE *.tf + - run: conftest test $SHOWFILE *.tf --no-fail ``` ### Quiet policy checks -By default, Atlantis will add a comment to all pull requests with the policy check result - both successes and failures. Version 0.21.0 added the [`--quiet-policy-checks`](server-configuration.html#quiet-policy-checks) option, which will instead only add comments when policy checks fail, significantly reducing the number of comments when most policy check results succeed. - +By default, Atlantis will add a comment to all pull requests with the policy check result - both successes and failures. Version 0.21.0 added the [`--quiet-policy-checks`](server-configuration.md#quiet-policy-checks) option, which will instead only add comments when policy checks fail, significantly reducing the number of comments when most policy check results succeed. ### Data for custom run steps @@ -198,9 +197,10 @@ When the policy check workflow runs, a file is created in the working directory ## Running policy check only on some repositories -When policy checking is enabled it will be enforced on all repositories, in order to disable policy checking on some repositories first [enable policy checks](https://www.runatlantis.io/docs/policy-checking.html#getting-started) and then disable it explicitly on each repository with the `policy_check` flag. +When policy checking is enabled it will be enforced on all repositories, in order to disable policy checking on some repositories first [enable policy checks](policy-checking.md#getting-started) and then disable it explicitly on each repository with the `policy_check` flag. For server side config: + ```yml # repos.yaml repos: @@ -216,6 +216,7 @@ repos: ``` For repo level `atlantis.yaml` config: + ```yml version: 3 projects: diff --git a/runatlantis.io/docs/post-workflow-hooks.md b/runatlantis.io/docs/post-workflow-hooks.md index a9f1e05e94..91ba0b7aa7 100644 --- a/runatlantis.io/docs/post-workflow-hooks.md +++ b/runatlantis.io/docs/post-workflow-hooks.md @@ -2,12 +2,10 @@ Post workflow hooks can be defined to run scripts after default or custom workflows are executed. Post workflow hooks differ from [custom -workflows](custom-workflows.html#custom-run-command) in that they are run +workflows](custom-workflows.md#custom-run-command) in that they are run outside of Atlantis commands. Which means they do not surface their output back to the PR as a comment. -[[toc]] - ## Usage Post workflow hooks can only be specified in the Server-Side Repo Config under @@ -44,7 +42,6 @@ have finished. In this example we use a custom workflow to generate cost estimates for each workflow using [Infracost](https://www.infracost.io/docs/integrations/cicd/#cicd-integrations), then create a summary report after all workflows have completed. - ```yaml # repos.yaml workflows: @@ -88,7 +85,7 @@ repos: ### Custom `run` Command This is very similar to [custom workflow run -command](custom-workflows.html#custom-run-command). +command](custom-workflows.md#custom-run-command). ```yaml - run: custom-command @@ -102,6 +99,7 @@ command](custom-workflows.html#custom-run-command). | shellArgs | string | '-c' | no | The shell arguments to use for running the command | ::: tip Notes + * `run` commands are executed with the following environment variables: * `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`. * `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`. diff --git a/runatlantis.io/docs/pre-workflow-hooks.md b/runatlantis.io/docs/pre-workflow-hooks.md index 9087be24c7..dce3f2fe7d 100644 --- a/runatlantis.io/docs/pre-workflow-hooks.md +++ b/runatlantis.io/docs/pre-workflow-hooks.md @@ -2,15 +2,13 @@ Pre workflow hooks can be defined to run scripts before default or custom workflows are executed. Pre workflow hooks differ from [custom -workflows](custom-workflows.html#custom-run-command) in several ways. +workflows](custom-workflows.md#custom-run-command) in several ways. 1. Pre workflow hooks do not require the repository configuration to be - present. This can be utilized to [dynamically generate repo configs](pre-workflow-hooks.html#dynamic-repo-config-generation). + present. This can be utilized to [dynamically generate repo configs](pre-workflow-hooks.md#dynamic-repo-config-generation). 2. Pre workflow hooks are run outside of Atlantis commands. Which means they do not surface their output back to the PR as a comment. -[[toc]] - ## Usage Pre workflow hooks can only be specified in the Server-Side Repo Config under the @@ -19,9 +17,9 @@ Pre workflow hooks can only be specified in the Server-Side Repo Config under th ::: tip Note By default, `pre-workflow-hooks` do not prevent Atlantis from executing its workflows(`plan`, `apply`) even if a `run` command exits with an error. This -behavior can be changed by setting the [fail-on-pre-workflow-hook-error](server-configuration.html#fail-on-pre-workflow-hook-error) +behavior can be changed by setting the [fail-on-pre-workflow-hook-error](server-configuration.md#fail-on-pre-workflow-hook-error) flag in the Atlantis server configuration. -::: +::: ## Atlantis Command Targetting @@ -84,7 +82,7 @@ repos: ### Custom `run` Command This is very similar to the [custom workflow run -command](custom-workflows.html#custom-run-command). +command](custom-workflows.md#custom-run-command). ```yaml - run: custom-command @@ -98,6 +96,7 @@ command](custom-workflows.html#custom-run-command). | shellArgs | string | '-c' | no | The shell arguments to use for running the command | ::: tip Notes + * `run` commands are executed with the following environment variables: * `BASE_REPO_NAME` - Name of the repository that the pull request will be merged into, ex. `atlantis`. * `BASE_REPO_OWNER` - Owner of the repository that the pull request will be merged into, ex. `runatlantis`. @@ -109,7 +108,7 @@ command](custom-workflows.html#custom-run-command). * `PULL_NUM` - Pull request number or ID, ex. `2`. * `PULL_URL` - Pull request URL, ex. `https://github.com/runatlantis/atlantis/pull/2`. * `PULL_AUTHOR` - Username of the pull request author, ex. `acme-user`. - * `DIR` - The absolute path to the root of the cloned repository. + * `DIR` - The absolute path to the root of the cloned repository. * `USER_NAME` - Username of the VCS user running command, ex. `acme-user`. During an autoplan, the user will be the Atlantis API user, ex. `atlantis`. * `COMMENT_ARGS` - Any additional flags passed in the comment on the pull request. Flags are separated by commas and every character is escaped, ex. `atlantis plan -- arg1 arg2` will result in `COMMENT_ARGS=\a\r\g\1,\a\r\g\2`. diff --git a/runatlantis.io/docs/provider-credentials.md b/runatlantis.io/docs/provider-credentials.md index 793c082e94..09dd289759 100644 --- a/runatlantis.io/docs/provider-credentials.md +++ b/runatlantis.io/docs/provider-credentials.md @@ -1,17 +1,19 @@ # Provider Credentials + Atlantis runs Terraform by simply executing `terraform plan` and `apply` commands on the server Atlantis is hosted on. Just like when you run Terraform locally, Atlantis needs credentials for your specific provider. It's up to you how you provide credentials for your specific provider to Atlantis: -* The Atlantis [Helm Chart](deployment.html#kubernetes-helm-chart) and - [AWS Fargate Module](deployment.html#aws-fargate) have their own mechanisms for provider + +* The Atlantis [Helm Chart](deployment.md#kubernetes-helm-chart) and + [AWS Fargate Module](deployment.md#aws-fargate) have their own mechanisms for provider credentials. Read their docs. * If you're running Atlantis in a cloud then many clouds have ways to give cloud API access to applications running on them, ex: - * [AWS EC2 Roles](https://registry.terraform.io/providers/hashicorp/aws/latest/docs) (Search for "EC2 Role") - * [GCE Instance Service Accounts](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/provider_reference) + * [AWS EC2 Roles](https://registry.terraform.io/providers/hashicorp/aws/latest/docs) (Search for "EC2 Role") + * [GCE Instance Service Accounts](https://registry.terraform.io/providers/hashicorp/google/latest/docs/guides/provider_reference) * Many users set environment variables, ex. `AWS_ACCESS_KEY`, where Atlantis is running. * Others create the necessary config files, ex. `~/.aws/credentials`, where Atlantis is running. * Use the [HashiCorp Vault Provider](https://registry.terraform.io/providers/hashicorp/vault/latest/docs) @@ -22,10 +24,10 @@ As a general rule, if you can `ssh` or `exec` into the server where Atlantis is running and run `terraform` commands like you would locally, then Atlantis will work. ::: - ## AWS Specific Info ### Multiple AWS Accounts + Atlantis supports multiple AWS accounts through the use of Terraform's [AWS Authentication](https://registry.terraform.io/providers/hashicorp/aws/latest/docs) (Search for "Authentication"). @@ -41,6 +43,7 @@ won't work for multiple accounts since Atlantis wouldn't know which environment Terraform with. ### Assume Role Session Names + If you're using Terraform < 0.12, Atlantis injects 5 Terraform variables that can be used to dynamically name the assume role session name. Setting the `session_name` allows you to trace API calls made through Atlantis back to a specific user and repo via CloudWatch: @@ -89,5 +92,6 @@ You can still set these variables yourself using the `extra_args` configuration. ::: ## Next Steps -* If you want to configure Atlantis further, read [Configuring Atlantis](configuring-atlantis.html) -* If you're ready to use Atlantis, read [Using Atlantis](using-atlantis.html) + +* If you want to configure Atlantis further, read [Configuring Atlantis](configuring-atlantis.md) +* If you're ready to use Atlantis, read [Using Atlantis](using-atlantis.md) diff --git a/runatlantis.io/docs/repo-level-atlantis-yaml.md b/runatlantis.io/docs/repo-level-atlantis-yaml.md index c4c6ed3792..11feb31224 100644 --- a/runatlantis.io/docs/repo-level-atlantis-yaml.md +++ b/runatlantis.io/docs/repo-level-atlantis-yaml.md @@ -1,31 +1,33 @@ # Repo Level atlantis.yaml Config + An `atlantis.yaml` file specified at the root of a Terraform repo allows you to instruct Atlantis on the structure of your repo and set custom workflows. -[[toc]] - ## Do I need an atlantis.yaml file? + `atlantis.yaml` files are only required if you wish to customize some aspect of Atlantis. The default Atlantis config works for many users without changes. Read through the [use-cases](#use-cases) to determine if you need it. ## Enabling atlantis.yaml + By default, all repos are allowed to have an `atlantis.yaml` file, but some of the keys are restricted by default. Restricted keys can be set in the server-side `repos.yaml` repo config file. You can enable `atlantis.yaml` to override restricted -keys by setting the `allowed_overrides` key there. See the [Server Side Repo Config](server-side-repo-config.html) for +keys by setting the `allowed_overrides` key there. See the [Server Side Repo Config](server-side-repo-config.md) for more details. -**Notes** +**Notes:** + * By default, repo root `atlantis.yaml` file is used. -* You can change this behaviour by setting [Server Side Repo Config](server-side-repo-config.html) +* You can change this behaviour by setting [Server Side Repo Config](server-side-repo-config.md) ::: danger DANGER Atlantis uses the `atlantis.yaml` version from the pull request, similar to other -CI/CD systems. If you're allowing users to [create custom workflows](server-side-repo-config.html#allow-repos-to-define-their-own-workflows) +CI/CD systems. If you're allowing users to [create custom workflows](server-side-repo-config.md#allow-repos-to-define-their-own-workflows) then this means anyone that can create a pull request to your repo can run arbitrary code on the Atlantis server. @@ -66,7 +68,9 @@ projects: workspace: default terraform_version: v0.11.0 delete_source_branch_on_merge: true - repo_locking: true + repo_locking: true # deprecated: use repo_locks instead + repo_locks: + mode: on_plan custom_policy_check: false autoplan: when_modified: ["*.tf", "../modules/**/*.tf", ".terraform.lock.hcl"] @@ -74,6 +78,7 @@ projects: plan_requirements: [mergeable, approved, undiverged] apply_requirements: [mergeable, approved, undiverged] import_requirements: [mergeable, approved, undiverged] + silence_pr_comments: ["apply"] execution_order_group: 1 depends_on: - project-1 @@ -147,7 +152,9 @@ grep -P 'backend[\s]+"s3"' **/*.tf | ``` ## Use Cases + ### Disabling Autoplanning + ```yaml version: 3 projects: @@ -155,6 +162,7 @@ projects: autoplan: enabled: false ``` + This will stop Atlantis automatically running plan when `project1/` is updated in a pull request. @@ -178,7 +186,7 @@ Parallel plans and applies work across both multiple directories and multiple wo Given the directory structure: -``` +```plain . ├── modules │   └── module1 @@ -193,7 +201,6 @@ Given the directory structure: If you want Atlantis to plan `project1/` whenever any `.tf` files under `module1/` change or any `.tf` or `.tfvars` files under `project1/` change you could use the following configuration: - ```yaml version: 3 projects: @@ -203,12 +210,14 @@ projects: ``` Note: + * `when_modified` uses the [`.dockerignore` syntax](https://docs.docker.com/engine/reference/builder/#dockerignore-file) * The paths are relative to the project's directory. * `when_modified` will be used by both automatic and manually run plans. * `when_modified` will continue to work for manually run plans even when autoplan is disabled. ### Supporting Terraform Workspaces + ```yaml version: 3 projects: @@ -217,34 +226,44 @@ projects: - dir: project1 workspace: production ``` + With the above config, when Atlantis determines that the configuration for the `project1` dir has changed, it will run plan for both the `staging` and `production` workspaces. If you want to `plan` or `apply` for a specific workspace you can use -``` + +```shell atlantis plan -w staging -d project1 ``` + and -``` + +```shell atlantis apply -w staging -d project1 ``` ### Using .tfvars files -See [Custom Workflow Use Cases: Using .tfvars files](custom-workflows.html#tfvars-files) + +See [Custom Workflow Use Cases: Using .tfvars files](custom-workflows.md#tfvars-files) ### Adding extra arguments to Terraform commands -See [Custom Workflow Use Cases: Adding extra arguments to Terraform commands](custom-workflows.html#adding-extra-arguments-to-terraform-commands) + +See [Custom Workflow Use Cases: Adding extra arguments to Terraform commands](custom-workflows.md#adding-extra-arguments-to-terraform-commands) ### Custom init/plan/apply Commands -See [Custom Workflow Use Cases: Custom init/plan/apply Commands](custom-workflows.html#custom-init-plan-apply-commands) + +See [Custom Workflow Use Cases: Custom init/plan/apply Commands](custom-workflows.md#custom-init-plan-apply-commands) ### Terragrunt -See [Custom Workflow Use Cases: Terragrunt](custom-workflows.html#terragrunt) + +See [Custom Workflow Use Cases: Terragrunt](custom-workflows.md#terragrunt) ### Running custom commands -See [Custom Workflow Use Cases: Running custom commands](custom-workflows.html#running-custom-commands) + +See [Custom Workflow Use Cases: Running custom commands](custom-workflows.md#running-custom-commands) ### Terraform Versions + If you'd like to use a different version of Terraform than what is in Atlantis' `PATH` or is set by the `--default-tf-version` flag, then set the `terraform_version` key: @@ -258,7 +277,9 @@ projects: Atlantis will automatically download and use this version. ### Requiring Approvals For Production + In this example, we only want to require `apply` approvals for the `production` directory. + ```yaml version: 3 projects: @@ -268,12 +289,14 @@ projects: apply_requirements: [approved] import_requirements: [approved] ``` + :::warning `plan_requirements`, `apply_requirements` and `import_requirements` are restricted keys so this repo will need to be configured -to be allowed to set this key. See [Server-Side Repo Config Use Cases](server-side-repo-config.html#repos-can-set-their-own-apply-an-applicable-subcommand). +to be allowed to set this key. See [Server-Side Repo Config Use Cases](server-side-repo-config.md#repos-can-set-their-own-apply-an-applicable-subcommand). ::: ### Order of planning/applying + ```yaml version: 3 abort_on_execution_order_fail: true @@ -283,12 +306,13 @@ projects: - dir: project2 execution_order_group: 1 ``` + With this config above, Atlantis runs planning/applying for project2 first, then for project1. Several projects can have same `execution_order_group`. Any order in one group isn't guaranteed. `parallel_plan` and `parallel_apply` respect these order groups, so parallel planning/applying works -in each group one by one. +in each group one by one. -If any plan/apply fails and `abort_on_execution_order_fail` is set to true on a repo level, all the +If any plan/apply fails and `abort_on_execution_order_fail` is set to true on a repo level, all the following groups will be aborted. For this example, if project2 fails then project1 will not run. Execution order groups are useful when you have dependencies between projects. However, they are only applicable in the case where @@ -296,6 +320,7 @@ you initiate a global apply for all of your projects, i.e `atlantis apply`. If y Thus, the `depends_on` key is more useful in this case. and can be used in conjunction with execution order groups. The following configuration is an example of how to use execution order groups and depends_on together to enforce dependencies between projects. + ```yaml version: 3 projects: @@ -323,6 +348,7 @@ projects: workspace: production workflow: infra ``` + the `depends_on` feature will make sure that `production` is not applied before `staging` for example. ::: tip @@ -331,11 +357,14 @@ What Happens if one or more project's dependencies are not applied? If there's one or more projects in the dependency list which is not in applied status, users will see an error message like this: `Can't apply your project unless you apply its dependencies` ::: + ### Autodiscovery Config + ```yaml autodiscover: mode: "auto" ``` + The above is the default configuration for `autodiscover.mode`. When `autodiscover.mode` is auto, projects will be discovered only if the repo has no `projects` configured. @@ -343,14 +372,16 @@ projects will be discovered only if the repo has no `projects` configured. autodiscover: mode: "disabled" ``` + With the config above, Atlantis will never try to discover projects, even when there are no `projects` configured. This is useful if dynamically generating Atlantis config in pre_workflow hooks. -See [Dynamic Repo Config Generation](pre-workflow-hooks.html#dynamic-repo-config-generation). +See [Dynamic Repo Config Generation](pre-workflow-hooks.md#dynamic-repo-config-generation). ```yaml autodiscover: mode: "enabled" ``` + With the config above, Atlantis will unconditionally try to discover projects based on modified_files, even when the directory of the project is missing from the configured `projects` in the repo configuration. If a discovered project has the same directory as a project which was manually configured in `projects`, @@ -360,10 +391,13 @@ Use this feature when some projects require specific configuration in a repo wit it's still desirable for Atlantis to plan/apply for projects not enumerated in the config. ### Custom Backend Config -See [Custom Workflow Use Cases: Custom Backend Config](custom-workflows.html#custom-backend-config) + +See [Custom Workflow Use Cases: Custom Backend Config](custom-workflows.md#custom-backend-config) ## Reference + ### Top-Level Keys + ```yaml version: 3 automerge: false @@ -372,16 +406,18 @@ projects: workflows: allowed_regexp_prefixes: ``` -| Key | Type | Default | Required | Description | -|-------------------------------|----------------------------------------------------------|---------|----------|--------------------------------------------------------------------------------------------------------------------------------------| -| version | int | none | **yes** | This key is required and must be set to `3`. | -| automerge | bool | `false` | no | Automatically merges pull request when all plans are applied. | -| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. | -| projects | array[[Project](repo-level-atlantis-yaml.html#project)] | `[]` | no | Lists the projects in this repo. | -| workflows
*(restricted)* | map[string: [Workflow](custom-workflows.html#reference)] | `{}` | no | Custom workflows. | -| allowed_regexp_prefixes | array[string] | `[]` | no | Lists the allowed regexp prefixes to use when the [`--enable-regexp-cmd`](server-configuration.html#enable-regexp-cmd) flag is used. | + +| Key | Type | Default | Required | Description | +|-------------------------------|--------------------------------------------------------|---------|----------|------------------------------------------------------------------------------------------------------------------------------------| +| version | int | none | **yes** | This key is required and must be set to `3`. | +| automerge | bool | `false` | no | Automatically merges pull request when all plans are applied. | +| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. | +| projects | array[[Project](repo-level-atlantis-yaml.md#project)] | `[]` | no | Lists the projects in this repo. | +| workflows
*(restricted)* | map[string: [Workflow](custom-workflows.md#reference)] | `{}` | no | Custom workflows. | +| allowed_regexp_prefixes | array\[string\] | `[]` | no | Lists the allowed regexp prefixes to use when the [`--enable-regexp-cmd`](server-configuration.md#enable-regexp-cmd) flag is used. | ### Project + ```yaml name: myname branch: /mybranch/ @@ -389,45 +425,62 @@ dir: mydir workspace: myworkspace execution_order_group: 0 delete_source_branch_on_merge: false -repo_locking: true +repo_locking: true # deprecated: use repo_locks instead +repo_locks: + mode: on_plan custom_policy_check: false autoplan: terraform_version: 0.11.0 plan_requirements: ["approved"] apply_requirements: ["approved"] import_requirements: ["approved"] +silence_pr_comments: ["apply"] workflow: myworkflow ``` -| Key | Type | Default | Required | Description | -|------------------------------------------|-----------------------|-------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| name | string | none | maybe | Required if there is more than one project with the same `dir` and `workspace`. This project name can be used with the `-p` flag. | -| branch | string | none | no | Regex matching projects by the base branch of pull request (the branch the pull request is getting merged into). Only projects that match the PR's branch will be considered. By default, all branches are matched. | -| dir | string | none | **yes** | The directory of this project relative to the repo root. For example if the project was under `./project1` then use `project1`. Use `.` to indicate the repo root. | -| workspace | string | `"default"` | no | The [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces) for this project. Atlantis will switch to this workplace when planning/applying and will create it if it doesn't exist. | -| execution_order_group | int | `0` | no | Index of execution order group. Projects will be sort by this field before planning/applying. | -| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. | -| repo_locking | bool | `true` | no | Get a repository lock in this project when plan. | -| custom_policy_check | bool | `false` | no | Enable using policy check tools other than Conftest | -| autoplan | [Autoplan](#autoplan) | none | no | A custom autoplan configuration. If not specified, will use the autoplan config. See [Autoplanning](autoplanning.html). | -| terraform_version | string | none | no | A specific Terraform version to use when running commands for this project. Must be [Semver compatible](https://semver.org/), ex. `v0.11.0`, `0.12.0-beta1`. | -| plan_requirements
*(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| apply_requirements
*(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| import_requirements
*(restricted)* | array[string] | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| workflow
*(restricted)* | string | none | no | A custom workflow. If not specified, Atlantis will use its default workflow. | +| Key | Type | Default | Required | Description | +|-----------------------------------------|-------------------------|-----------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| name | string | none | maybe | Required if there is more than one project with the same `dir` and `workspace`. This project name can be used with the `-p` flag. | +| branch | string | none | no | Regex matching projects by the base branch of pull request (the branch the pull request is getting merged into). Only projects that match the PR's branch will be considered. By default, all branches are matched. | +| dir | string | none | **yes** | The directory of this project relative to the repo root. For example if the project was under `./project1` then use `project1`. Use `.` to indicate the repo root. | +| workspace | string | `"default"` | no | The [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces) for this project. Atlantis will switch to this workplace when planning/applying and will create it if it doesn't exist. | +| execution_order_group | int | `0` | no | Index of execution order group. Projects will be sort by this field before planning/applying. | +| delete_source_branch_on_merge | bool | `false` | no | Automatically deletes the source branch on merge. | +| repo_locking | bool | `true` | no | (deprecated) Get a repository lock in this project when plan. | +| repo_locks | [RepoLocks](#repolocks) | `mode: on_plan` | no | Get a repository lock in this project on plan or apply. See [RepoLocks](#repolocks) for more details. | +| custom_policy_check | bool | `false` | no | Enable using policy check tools other than Conftest | +| autoplan | [Autoplan](#autoplan) | none | no | A custom autoplan configuration. If not specified, will use the autoplan config. See [Autoplanning](autoplanning.md). | +| terraform_version | string | none | no | A specific Terraform version to use when running commands for this project. Must be [Semver compatible](https://semver.org/), ex. `v0.11.0`, `0.12.0-beta1`. | +| plan_requirements
*(restricted)* | array\[string\] | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| apply_requirements
*(restricted)* | array\[string\] | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| import_requirements
*(restricted)* | array\[string\] | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| silence_pr_comments | array\[string\] | none | no | Silence PR comments from defined stages while preserving PR status checks. Supported values are: `plan`, `apply`. | +| workflow
*(restricted)* | string | none | no | A custom workflow. If not specified, Atlantis will use its default workflow. | ::: tip A project represents a Terraform state. Typically, there is one state per directory and workspace however it's possible to have multiple states in the same directory using `terraform init -backend-config=custom-config.tfvars`. -Atlantis supports this but requires the `name` key to be specified. See [Custom Backend Config](custom-workflows.html#custom-backend-config) for more details. +Atlantis supports this but requires the `name` key to be specified. See [Custom Backend Config](custom-workflows.md#custom-backend-config) for more details. ::: ### Autoplan + ```yaml enabled: true when_modified: ["*.tf", "terragrunt.hcl", ".terraform.lock.hcl"] ``` -| Key | Type | Default | Required | Description | -|-----------------------|---------------|----------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| enabled | boolean | `true` | no | Whether autoplanning is enabled for this project. | -| when_modified | array[string] | `["**/*.tf*"]` | no | Uses [.dockerignore](https://docs.docker.com/engine/reference/builder/#dockerignore-file) syntax. If any modified file in the pull request matches, this project will be planned. See [Autoplanning](autoplanning.html). Paths are relative to the project's dir. | + +| Key | Type | Default | Required | Description | +|-----------------------|-----------------|----------------|----------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| enabled | boolean | `true` | no | Whether autoplanning is enabled for this project. | +| when_modified | array\[string\] | `["**/*.tf*"]` | no | Uses [.dockerignore](https://docs.docker.com/engine/reference/builder/#dockerignore-file) syntax. If any modified file in the pull request matches, this project will be planned. See [Autoplanning](autoplanning.md). Paths are relative to the project's dir. | + +### RepoLocks + +```yaml +mode: on_apply +``` + +| Key | Type | Default | Required | Description | +|------|--------|-----------|----------|---------------------------------------------------------------------------------------------------------------------------------------| +| mode | `Mode` | `on_plan` | no | Whether or not repository locks are enabled for this project on plan or apply. Valid values are `disabled`, `on_plan` and `on_apply`. | diff --git a/runatlantis.io/docs/requirements.md b/runatlantis.io/docs/requirements.md index e300e63fe7..a661ac3508 100644 --- a/runatlantis.io/docs/requirements.md +++ b/runatlantis.io/docs/requirements.md @@ -1,19 +1,21 @@ # Requirements + Atlantis works with most Git hosts and Terraform setups. Read on to confirm it works with yours. -[[toc]] - ## Git Host + Atlantis integrates with the following Git hosts: * GitHub (public, private or enterprise) * GitLab (public, private or enterprise) +* Gitea (public, private and compatible forks like Forgejo) * Bitbucket Cloud aka bitbucket.org (public or private) * Bitbucket Server aka Stash * Azure DevOps ## Terraform State + Atlantis supports all backend types **except for local state**. We don't support local state because Atlantis does not have permanent storage and it doesn't commit the new statefile back to version control. @@ -24,17 +26,20 @@ storage from Terraform Cloud. This is fully supported by Atlantis. ::: ## Repository Structure + Atlantis supports any Terraform repository structure, for example: ### Single Terraform Project At Repo Root -``` + +```plain . ├── main.tf └── ... ``` ### Multiple Project Folders -``` + +```plain . ├── project1 │   ├── main.tf @@ -45,7 +50,8 @@ Atlantis supports any Terraform repository structure, for example: ``` ### Modules -``` + +```plain . ├── project1 │   ├── main.tf @@ -55,35 +61,42 @@ Atlantis supports any Terraform repository structure, for example:    ├── main.tf └── ... ``` + With modules, if you want `project1` automatically planned when `module1` is modified -you need to create an `atlantis.yaml` file. See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.html#configuring-planning) for more details. +you need to create an `atlantis.yaml` file. See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.md#configuring-planning) for more details. + +### Terraform Workspaces -### Terraform Workspaces *See [Terraform's docs](https://developer.hashicorp.com/terraform/language/state/workspaces) if you are unfamiliar with workspaces.* If you're using Terraform `>= 0.9.0`, Atlantis supports workspaces through an `atlantis.yaml` file that tells Atlantis the names of your workspaces -(see [atlantis.yaml Use Cases](repo-level-atlantis-yaml.html#supporting-terraform-workspaces) for more details) +(see [atlantis.yaml Use Cases](repo-level-atlantis-yaml.md#supporting-terraform-workspaces) for more details) ### .tfvars Files -``` + +```plain . ├── production.tfvars │── staging.tfvars └── main.tf ``` + For Atlantis to be able to plan automatically with `.tfvars files`, you need to create an `atlantis.yaml` file to tell it to use `-var-file={YOUR_FILE}`. -See [atlantis.yaml Use Cases](custom-workflows.html#tfvars-files) for more details. +See [atlantis.yaml Use Cases](custom-workflows.md#tfvars-files) for more details. ### Multiple Repos + Atlantis supports multiple repos as well–as long as there is a webhook configured for each repo. ## Terraform Versions + Atlantis supports all Terraform versions (including 0.12) and can be configured -to use different versions for different repositories/projects. See [Terraform Versions](terraform-versions.html). +to use different versions for different repositories/projects. See [Terraform Versions](terraform-versions.md). ## Next Steps + * If your Terraform setup meets the Atlantis requirements, continue the installation - guide and set up your [Git Host Access Credentials](access-credentials.html) + guide and set up your [Git Host Access Credentials](access-credentials.md) diff --git a/runatlantis.io/docs/security.md b/runatlantis.io/docs/security.md index a6bafda5a8..0f5d8df4c6 100644 --- a/runatlantis.io/docs/security.md +++ b/runatlantis.io/docs/security.md @@ -1,13 +1,16 @@ # Security -[[toc]] + ## Exploits + Because you usually run Atlantis on a server with credentials that allow access to your infrastructure it's important that you deploy Atlantis securely. Atlantis could be exploited by + * An attacker submitting a pull request that contains a malicious Terraform file that uses a malicious provider or an [`external` data source](https://registry.terraform.io/providers/hashicorp/external/latest/docs/data-sources/data_source) that Atlantis then runs `terraform plan` on (which it does automatically unless you've turned off automatic plans). * Running `terraform apply` on a malicious Terraform file with [local-exec](https://developer.hashicorp.com/terraform/language/resources/provisioners/local-exec) + ```tf resource "null_resource" "null" { provisioner "local-exec" { @@ -15,10 +18,12 @@ Atlantis could be exploited by } } ``` + * Running malicious custom build commands specified in an `atlantis.yaml` file. Atlantis uses the `atlantis.yaml` file from the pull request branch, **not** `main`. * Someone adding `atlantis plan/apply` comments on your valid pull requests causing terraform to run when you don't want it to. ## Bitbucket Cloud (bitbucket.org) + ::: danger Bitbucket Cloud does not support webhook secrets. This could allow attackers to spoof requests from Bitbucket. Ensure you are allowing only Bitbucket IPs. ::: @@ -33,16 +38,21 @@ To prevent this, allowlist [Bitbucket's IP addresses](https://confluence.atlassi (see Outbound IPv4 addresses). ## Mitigations + ### Don't Use On Public Repos + Because anyone can comment on public pull requests, even with all the security mitigations available, it's still dangerous to run Atlantis on public repos without proper configuration of the security settings. ### Don't Use `--allow-fork-prs` + If you're running on a public repo (which isn't recommended, see above) you shouldn't set `--allow-fork-prs` (defaults to false) because anyone can open up a pull request from their fork to your repo. ### `--repo-allowlist` + Atlantis requires you to specify a allowlist of repositories it will accept webhooks from via the `--repo-allowlist` flag. For example: + * Specific repositories: `--repo-allowlist=github.com/runatlantis/atlantis,github.com/runatlantis/atlantis-tests` * Your whole organization: `--repo-allowlist=github.com/runatlantis/*` * Every repository in your GitHub Enterprise install: `--repo-allowlist=github.yourcompany.com/*` @@ -52,19 +62,22 @@ For example: This flag ensures your Atlantis install isn't being used with repositories you don't control. See `atlantis server --help` for more details. ### Protect Terraform Planning + If attackers submitting pull requests with malicious Terraform code is in your threat model then you must be aware that `terraform apply` approvals are not enough. It is possible to run malicious code in a `terraform plan` using the [`external` data source](https://registry.terraform.io/providers/hashicorp/external/latest/docs/data-sources/data_source) or by specifying a malicious provider. This code could then exfiltrate your credentials. To prevent this, you could: + 1. Bake providers into the Atlantis image or host and deny egress in production. 1. Implement the provider registry protocol internally and deny public egress, that way you control who has write access to the registry. -1. Modify your [server-side repo configuration](https://www.runatlantis.io/docs/server-side-repo-config.html)'s `plan` step to validate against the +1. Modify your [server-side repo configuration](server-side-repo-config.md)'s `plan` step to validate against the use of disallowed providers or data sources or PRs from not allowed users. You could also add in extra validation at this point, e.g. requiring a "thumbs-up" on the PR before allowing the `plan` to continue. Conftest could be of use here. ### `--var-file-allowlist` + The files on your Atlantis install may be accessible as [variable definition files](https://developer.hashicorp.com/terraform/language/values/variables#variable-definitions-tfvars-files) from pull requests by adding `atlantis plan -- -var-file=/path/to/file` comments. To mitigate this security risk, Atlantis has limited such access @@ -72,6 +85,7 @@ only to the files allowlisted by the `--var-file-allowlist` flag. If this argume Atlantis' data directory. ### Webhook Secrets + Atlantis should be run with Webhook secrets set via the `$ATLANTIS_GH_WEBHOOK_SECRET`/`$ATLANTIS_GITLAB_WEBHOOK_SECRET` environment variables. Even with the `--repo-allowlist` flag set, without a webhook secret, attackers could make requests to Atlantis posing as a repository that is allowlisted. Webhook secrets ensure that the webhook requests are actually coming from your VCS provider (GitHub or GitLab). @@ -81,17 +95,20 @@ If you are using Azure DevOps, instead of webhook secrets add a [basic username ::: ### Azure DevOps Basic Authentication + Azure DevOps supports sending a basic authentication header in all webhook events. This requires using an HTTPS URL for your webhook location. ### SSL/HTTPS + If you're using webhook secrets but your traffic is over HTTP then the webhook secrets could be stolen. Enable SSL/HTTPS using the `--ssl-cert-file` and `--ssl-key-file` flags. ### Enable Authentication on Atlantis Web Server + It is very recommended to enable authentication in the web service. Enable BasicAuth using the `--web-basic-auth=true` and setup a username and a password using `--web-username=yourUsername` and `--web-password=yourPassword` flags. -You can also pass these as environment variables `ATLANTIS_WEB_BASIC_AUTH=true` `ATLANTIS_WEB_USERNAME=yourUsername` and `ATLANTIS_WEB_PASSWORD=yourPassword`. +You can also pass these as environment variables `ATLANTIS_WEB_BASIC_AUTH=true` `ATLANTIS_WEB_USERNAME=yourUsername` and `ATLANTIS_WEB_PASSWORD=yourPassword`. :::tip Tip We do encourage the usage of complex passwords in order to prevent basic bruteforcing attacks. diff --git a/runatlantis.io/docs/server-configuration.md b/runatlantis.io/docs/server-configuration.md index e2722f5478..6e421c0f3d 100644 --- a/runatlantis.io/docs/server-configuration.md +++ b/runatlantis.io/docs/server-configuration.md @@ -1,12 +1,12 @@ # Server Configuration + This page explains how to configure the `atlantis server` command. Configuration to `atlantis server` can be specified via command line flags, environment variables, a config file or a mix of the three. -[[toc]] - ## Environment Variables + All flags can be specified as environment variables. 1. Take the flag name, ex. `--gh-user` @@ -24,11 +24,13 @@ The flag `--atlantis-url` is set by the environment variable `ATLANTIS_ATLANTIS_ ::: ## Config File + All flags can also be specified via a YAML config file. To use a YAML config file, run `atlantis server --config /path/to/config.yaml`. The keys of your config file should be the same as the flag names, ex. + ```yaml gh-token: ... log-level: ... @@ -40,40 +42,49 @@ The `--config` config file is only used as an alternate way of setting `atlantis ::: ## Precedence + Values are chosen in this order: + 1. Flags 1. Environment Variables 1. Config File - ## Flags + ### `--allow-commands` + ```bash atlantis server --allow-commands=version,plan,apply,unlock,approve_policies # or ATLANTIS_ALLOW_COMMANDS='version,plan,apply,unlock,approve_policies' ``` + List of allowed commands to be run on the Atlantis server, Defaults to `version,plan,apply,unlock,approve_policies` Notes: - * Accepts a comma separated list, ex. `command1,command2`. - * `version`, `plan`, `apply`, `unlock`, `approve_policies`, `import`, `state` and `all` are available. - * `all` is a special keyword that allows all commands. If pass `all` then all other commands will be ignored. + +* Accepts a comma separated list, ex. `command1,command2`. +* `version`, `plan`, `apply`, `unlock`, `approve_policies`, `import`, `state` and `all` are available. +* `all` is a special keyword that allows all commands. If pass `all` then all other commands will be ignored. ### `--allow-draft-prs` + ```bash atlantis server --allow-draft-prs # or ATLANTIS_ALLOW_DRAFT_PRS=true ``` + Respond to pull requests from draft prs. Defaults to `false`. ### `--allow-fork-prs` + ```bash atlantis server --allow-fork-prs # or ATLANTIS_ALLOW_FORK_PRS=true ``` + Respond to pull requests from forks. Defaults to `false`. :::warning SECURITY WARNING @@ -85,33 +96,40 @@ Values are chosen in this order: ::: ### `--api-secret` + ```bash atlantis server --api-secret="secret" # or (recommended) ATLANTIS_API_SECRET="secret" ``` - Required secret used to validate requests made to the [`/api/*` endpoints](api-endpoints.html). + + Required secret used to validate requests made to the [`/api/*` endpoints](api-endpoints.md). ### `--atlantis-url` + ```bash atlantis server --atlantis-url="https://my-domain.com:9090/basepath" # or ATLANTIS_ATLANTIS_URL=https://my-domain.com:9090/basepath ``` + Specify the URL that Atlantis is accessible from. Used in the Atlantis UI and in links from pull request comments. Defaults to `http://$(hostname):$port` where `$port` is from the [`--port`](#port) flag. Supports a basepath if you're hosting Atlantis under a path. Notes: - * If a load balancer with a non http/https port (not the one defined in the `--port` flag) is used, update the URL to include the port like in the example above. - * This URL is used as the `details` link next to each atlantis job to view the job's logs. + +* If a load balancer with a non http/https port (not the one defined in the `--port` flag) is used, update the URL to include the port like in the example above. +* This URL is used as the `details` link next to each atlantis job to view the job's logs. ### `--autodiscover-mode` + ```bash atlantis server --autodiscover-mode="" # or ATLANTIS_AUTODISCOVER_MODE="" ``` + Sets auto discover mode, default is `auto`. When set to `auto`, projects in a repo will be discovered by Atlantis when there are no projects configured in the repo config. If one or more projects are defined in the repo config then auto discovery will be completely disabled. @@ -123,40 +141,45 @@ Values are chosen in this order: When set to `disabled` projects will never be discovered, even if there are no projects configured in the repo config. ### `--automerge` + ```bash atlantis server --automerge # or ATLANTIS_AUTOMERGE=true ``` + Automatically merge pull requests after all plans have been successfully applied. - Defaults to `false`. See [Automerging](automerging.html) for more details. + Defaults to `false`. See [Automerging](automerging.md) for more details. ### `--autoplan-file-list` + ```bash # NOTE: Use single quotes to avoid shell expansion of *. atlantis server --autoplan-file-list='**/*.tf,project1/*.pkr.hcl' # or ATLANTIS_AUTOPLAN_FILE_LIST='**/*.tf,project1/*.pkr.hcl' ``` + List of file patterns that Atlantis will use to check if a directory contains modified files that should trigger project planning. Notes: - * Accepts a comma separated list, ex. `pattern1,pattern2`. - * Patterns use the [`.dockerignore` syntax](https://docs.docker.com/engine/reference/builder/#dockerignore-file) - * List of file patterns will be used by both automatic and manually run plans. - * When not set, defaults to all `.tf`, `.tfvars`, `.tfvars.json`, `terragrunt.hcl` and `.terraform.lock.hcl` files + +* Accepts a comma separated list, ex. `pattern1,pattern2`. +* Patterns use the [`.dockerignore` syntax](https://docs.docker.com/engine/reference/builder/#dockerignore-file) +* List of file patterns will be used by both automatic and manually run plans. +* When not set, defaults to all `.tf`, `.tfvars`, `.tfvars.json`, `terragrunt.hcl` and `.terraform.lock.hcl` files (`--autoplan-file-list='**/*.tf,**/*.tfvars,**/*.tfvars.json,**/terragrunt.hcl,**/.terraform.lock.hcl'`). - * Setting `--autoplan-file-list` will override the defaults. You **must** add `**/*.tf` and other defaults if you want to include them. - * A custom [Workflow](repo-level-atlantis-yaml.html#configuring-planning) that uses autoplan `when_modified` will ignore this value. +* Setting `--autoplan-file-list` will override the defaults. You **must** add `**/*.tf` and other defaults if you want to include them. +* A custom [Workflow](repo-level-atlantis-yaml.md#configuring-planning) that uses autoplan `when_modified` will ignore this value. Examples: - * Autoplan when any `*.tf` or `*.tfvars` file is modified. - * `--autoplan-file-list='**/*.tf,**/*.tfvars'` - * Autoplan when any `*.tf` file is modified except in `project2/` directory - * `--autoplan-file-list='**/*.tf,!project2'` - * Autoplan when any `*.tf` files or `.yml` files in subfolder of `project1` is modified. - * `--autoplan-file-list='**/*.tf,project2/**/*.yml'` +* Autoplan when any `*.tf` or `*.tfvars` file is modified. + * `--autoplan-file-list='**/*.tf,**/*.tfvars'` +* Autoplan when any `*.tf` file is modified except in `project2/` directory + * `--autoplan-file-list='**/*.tf,!project2'` +* Autoplan when any `*.tf` files or `.yml` files in subfolder of `project1` is modified. + * `--autoplan-file-list='**/*.tf,project2/**/*.yml'` ::: warning NOTE By default, changes to modules will not trigger autoplanning. See the flags below. @@ -202,8 +225,8 @@ Current default is "" (disabled). Examples: - * `**/*.tf` - will index all projects that have a `.tf` file in their directory, and plan them whenever an in-repo module dependency has changed. - * `**/*.tf,!foo,!bar` - will index all projects containing `.tf` except `foo` and `bar` and plan them whenever an in-repo module dependency has changed. +* `**/*.tf` - will index all projects that have a `.tf` file in their directory, and plan them whenever an in-repo module dependency has changed. +* `**/*.tf,!foo,!bar` - will index all projects containing `.tf` except `foo` and `bar` and plan them whenever an in-repo module dependency has changed. This allows projects to opt-out of auto-planning when a module dependency changes. ::: warning NOTE @@ -217,35 +240,43 @@ and set `--autoplan-modules` to `false`. ::: ### `--azuredevops-hostname` + ```bash atlantis server --azuredevops-hostname="dev.azure.com" # or ATLANTIS_AZUREDEVOPS_HOSTNAME="dev.azure.com" ``` + Azure DevOps hostname to support cloud and self hosted instances. Defaults to `dev.azure.com`. ### `--azuredevops-token` + ```bash atlantis server --azuredevops-token="RandomStringProducedByAzureDevOps" # or (recommended) ATLANTIS_AZUREDEVOPS_TOKEN="RandomStringProducedByAzureDevOps" ``` + Azure DevOps token of API user. ### `--azuredevops-user` + ```bash atlantis server --azuredevops-user="username@example.com" # or ATLANTIS_AZUREDEVOPS_USER="username@example.com" ``` + Azure DevOps username of API user. ### `--azuredevops-webhook-password` + ```bash atlantis server --azuredevops-webhook-password="password123" # or (recommended) ATLANTIS_AZUREDEVOPS_WEBHOOK_PASSWORD="password123" ``` + Azure DevOps basic authentication password for inbound webhooks (see [docs](https://docs.microsoft.com/en-us/azure/devops/service-hooks/authorize?view=azure-devops)). @@ -258,47 +289,57 @@ and set `--autoplan-modules` to `false`. ::: ### `--azuredevops-webhook-user` + ```bash atlantis server --azuredevops-webhook-user="username@example.com" # or ATLANTIS_AZUREDEVOPS_WEBHOOK_USER="username@example.com" ``` + Azure DevOps basic authentication username for inbound webhooks. ### `--bitbucket-base-url` + ```bash atlantis server --bitbucket-base-url="http://bitbucket.corp:7990/basepath" # or ATLANTIS_BITBUCKET_BASE_URL="http://bitbucket.corp:7990/basepath" ``` + Base URL of Bitbucket Server (aka Stash) installation. Must include `http://` or `https://`. If using Bitbucket Cloud (bitbucket.org), do not set. Defaults to `https://api.bitbucket.org`. ### `--bitbucket-token` + ```bash atlantis server --bitbucket-token="token" # or (recommended) ATLANTIS_BITBUCKET_TOKEN="token" ``` + Bitbucket app password of API user. ### `--bitbucket-user` + ```bash atlantis server --bitbucket-user="myuser" # or ATLANTIS_BITBUCKET_USER="myuser" ``` + Bitbucket username of API user. ### `--bitbucket-webhook-secret` + ```bash atlantis server --bitbucket-webhook-secret="secret" # or (recommended) ATLANTIS_BITBUCKET_WEBHOOK_SECRET="secret" ``` + Secret used to validate Bitbucket webhooks. Only Bitbucket Server supports webhook secrets. - For Bitbucket.org, see [Security](security.html#bitbucket-cloud-bitbucket-org) for mitigations. + For Bitbucket.org, see [Security](security.md#bitbucket-cloud-bitbucket-org) for mitigations. ::: warning SECURITY WARNING If not specified, Atlantis won't be able to validate that the incoming webhook call came from Bitbucket. @@ -306,143 +347,173 @@ and set `--autoplan-modules` to `false`. ::: ### `--checkout-depth` + ```bash atlantis server --checkout-depth=0 # or ATLANTIS_CHECKOUT_DEPTH=0 ``` + The number of commits to fetch from the branch. Used if `--checkout-strategy=merge` since the `--checkout-strategy=branch` (default) checkout strategy always defaults to a shallow clone using a depth of 1. - Defaults to `0`. See [Checkout Strategy](checkout-strategy.html) for more details. + Defaults to `0`. See [Checkout Strategy](checkout-strategy.md) for more details. ### `--checkout-strategy` + ```bash atlantis server --checkout-strategy="" # or ATLANTIS_CHECKOUT_STRATEGY="" ``` + How to check out pull requests. Use either `branch` or `merge`. - Defaults to `branch`. See [Checkout Strategy](checkout-strategy.html) for more details. + Defaults to `branch`. See [Checkout Strategy](checkout-strategy.md) for more details. ### `--config` + ```bash atlantis server --config="my/config/file.yaml" # or ATLANTIS_CONFIG="my/config/file.yaml" ``` + YAML config file where flags can also be set. See [Config File](#config-file) for more details. ### `--data-dir` + ```bash atlantis server --data-dir="path/to/data/dir" # or ATLANTIS_DATA_DIR="path/to/data/dir" ``` + Directory where Atlantis will store its data. Will be created if it doesn't exist. Defaults to `~/.atlantis`. Atlantis will store its database, checked out repos, Terraform plans and downloaded - Terraform binaries here. If Atlantis loses this directory, [locks](locking.html) + Terraform binaries here. If Atlantis loses this directory, [locks](locking.md) will be lost and unapplied plans will be lost. Note that the atlantis user is restricted to `~/.atlantis`. If you set the `--data-dir` flag to a path outside of Atlantis its home directory, ensure that you grant the atlantis user the correct permissions. ### `--default-tf-version` + ```bash atlantis server --default-tf-version="v0.12.31" # or ATLANTIS_DEFAULT_TF_VERSION="v0.12.31" ``` + Terraform version to default to. Will download to `/bin/terraform` - if not in `PATH`. See [Terraform Versions](terraform-versions.html) for more details. + if not in `PATH`. See [Terraform Versions](terraform-versions.md) for more details. ### `--disable-apply-all` + ```bash atlantis server --disable-apply-all # or ATLANTIS_DISABLE_APPLY_ALL=true ``` + Disable `atlantis apply` command so a specific project/workspace/directory has to be specified for applies. ### `--disable-autoplan` + ```bash atlantis server --disable-autoplan # or ATLANTIS_DISABLE_AUTOPLAN=true ``` + Disable atlantis auto planning. ### `--disable-autoplan-label` + ```bash atlantis server --disable-autoplan-label="no-autoplan" # or ATLANTIS_DISABLE_AUTOPLAN_LABEL="no-autoplan" ``` + Disable atlantis auto planning only on pull requests with the specified label. If `disable-autoplan` property is `true`, this flag has no effect. ### `--disable-markdown-folding` + ```bash atlantis server --disable-markdown-folding # or - ATLANTIS_DISABLE_MARKDOWN_FOLDER=true + ATLANTIS_DISABLE_MARKDOWN_FOLDING=true ``` + Disable folding in markdown output using the `
` html tag. ### `--disable-repo-locking` + ```bash atlantis server --disable-repo-locking # or ATLANTIS_DISABLE_REPO_LOCKING=true ``` + Stops atlantis from locking projects and or workspaces when running terraform. ### `--disable-unlock-label` + ```bash atlantis server --disable-unlock-label do-not-unlock # or ATLANTIS_DISABLE_UNLOCK_LABEL="do-not-unlock" ``` + Stops atlantis from unlocking a pull request with this label. Defaults to "" (feature disabled). ### `--emoji-reaction` + ```bash atlantis server --emoji-reaction thumbsup # or ATLANTIS_EMOJI_REACTION=thumbsup ``` - The emoji reaction to use for marking processed comments. Currently supported on Azure DevOps, GitHub and GitLab. - Defaults to `eyes`. + + The emoji reaction to use for marking processed comments. Currently supported on Azure DevOps, GitHub and GitLab. If not specified, Atlantis will not use an emoji reaction. + Defaults to "" (empty string). ### `--enable-diff-markdown-format` + ```bash atlantis server --enable-diff-markdown-format # or ATLANTIS_ENABLE_DIFF_MARKDOWN_FORMAT=true ``` + Enable Atlantis to format Terraform plan output into a markdown-diff friendly format for color-coding purposes. Useful to enable for use with GitHub. ### `--enable-policy-checks` + ```bash atlantis server --enable-policy-checks # or ATLANTIS_ENABLE_POLICY_CHECKS=true ``` - Enables atlantis to run server side policies on the result of a terraform plan. Policies are defined in [server side repo config](https://www.runatlantis.io/docs/server-side-repo-config.html#reference). + + Enables atlantis to run server side policies on the result of a terraform plan. Policies are defined in [server side repo config](server-side-repo-config.md#reference). ### `--enable-regexp-cmd` + ```bash atlantis server --enable-regexp-cmd # or ATLANTIS_ENABLE_REGEXP_CMD=true ``` + Enable Atlantis to use regular expressions to run plan/apply commands against defined project names when `-p` flag is passed with it. This can be used to run all defined projects (with the `name` key) in `atlantis.yaml` using `atlantis plan -p .*`. - The flag will only allow the regexes listed in the [`allowed_regexp_prefixes`](https://www.runatlantis.io/docs/repo-level-atlantis-yaml.html#reference) key defined in the repo `atlantis.yaml` file. If the key is undefined, its value defaults to `[]` which will allow any regex. + The flag will only allow the regexes listed in the [`allowed_regexp_prefixes`](repo-level-atlantis-yaml.md#reference) key defined in the repo `atlantis.yaml` file. If the key is undefined, its value defaults to `[]` which will allow any regex. This will not work with `-d` yet and to use `-p` the repo projects must be defined in the repo `atlantis.yaml` file. @@ -454,16 +525,19 @@ and set `--autoplan-modules` to `false`. ::: ### `--executable-name` + ```bash atlantis server --executable-name="atlantis" # or ATLANTIS_EXECUTABLE_NAME="atlantis" ``` + Comment command trigger executable name. Defaults to `atlantis`. This is useful when running multiple Atlantis servers against a single repository. ### `--fail-on-pre-workflow-hook-error` + ```bash atlantis server --fail-on-pre-workflow-hook-error # or @@ -472,32 +546,96 @@ and set `--autoplan-modules` to `false`. Fail and do not run the requested Atlantis command if any of the pre workflow hooks error. +### `--gitea-base-url` + + ```bash + atlantis server --gitea-base-url="http://your-gitea.corp:7990/basepath" + # or + ATLANTIS_GITEA_BASE_URL="http://your-gitea.corp:7990/basepath" + ``` + + Base URL of Gitea installation. Must include `http://` or `https://`. Defaults to `https://gitea.com` if left empty/absent. + +### `--gitea-token` + + ```bash + atlantis server --gitea-token="token" + # or (recommended) + ATLANTIS_GITEA_TOKEN="token" + ``` + + Gitea app password of API user. + +### `--gitea-user` + + ```bash + atlantis server --gitea-user="myuser" + # or + ATLANTIS_GITEA_USER="myuser" + ``` + + Gitea username of API user. + +### `--gitea-webhook-secret` + + ```bash + atlantis server --gitea-webhook-secret="secret" + # or (recommended) + ATLANTIS_GITEA_WEBHOOK_SECRET="secret" + ``` + + Secret used to validate Gitea webhooks. + + ::: warning SECURITY WARNING + If not specified, Atlantis won't be able to validate that the incoming webhook call came from Gitea. + This means that an attacker could spoof calls to Atlantis and cause it to perform malicious actions. + ::: + +### `--gitea-page-size` + + ```bash + atlantis server --gitea-page-size=30 + # or (recommended) + ATLANTIS_GITEA_PAGE_SIZE=30 + ``` + + Number of items on a single page in Gitea paged responses. + + ::: warning Configuration dependent + The default value conforms to the Gitea server's standard config setting: DEFAULT_PAGING_NUM + The highest valid value depends on the Gitea server's config setting: MAX_RESPONSE_ITEMS + ::: + ### `--gh-allow-mergeable-bypass-apply` + ```bash atlantis server --gh-allow-mergeable-bypass-apply # or ATLANTIS_GH_ALLOW_MERGEABLE_BYPASS_APPLY=true ``` + Feature flag to enable ability to use `mergeable` mode with required apply status check. ### `--gh-app-id` + ```bash atlantis server --gh-app-id="00000" # or ATLANTIS_GH_APP_ID="00000" ``` + GitHub app ID. If set, GitHub authentication will be performed as [an installation](https://docs.github.com/en/rest/apps/installations). ::: tip A GitHub app can be created by starting Atlantis first, then pointing your browser at - ``` + ```shell $(hostname)/github-app/setup ``` You'll be redirected to GitHub to create a new app, and will then be redirected to - ``` + ```shell $(hostname)/github-app/exchange-code?code=some-code ``` @@ -505,11 +643,13 @@ and set `--autoplan-modules` to `false`. ::: ### `--gh-app-key` + ```bash atlantis server --gh-app-key="-----BEGIN RSA PRIVATE KEY-----(...)" # or ATLANTIS_GH_APP_KEY="-----BEGIN RSA PRIVATE KEY-----(...)" ``` + The PEM encoded private key for the GitHub App. ::: warning SECURITY WARNING @@ -517,44 +657,69 @@ and set `--autoplan-modules` to `false`. ::: ### `--gh-app-key-file` + ```bash atlantis server --gh-app-key-file="path/to/app-key.pem" # or ATLANTIS_GH_APP_KEY_FILE="path/to/app-key.pem" ``` + Path to a GitHub App PEM encoded private key file. If set, GitHub authentication will be performed as [an installation](https://docs.github.com/en/rest/apps/installations). ### `--gh-app-slug` + ```bash atlantis server --gh-app-slug="myappslug" # or ATLANTIS_GH_APP_SLUG="myappslug" ``` + A slugged version of GitHub app name shown in pull requests comments, etc (not `Atlantis App` but something like `atlantis-app`). Atlantis uses the value of this parameter to identify the comments it has left on GitHub pull requests. This is used for functions such as `--hide-prev-plan-comments`. You need to obtain this value from your GitHub app, one way is to go to your App settings and open "Public page" from the left sidebar. Your `--gh-app-slug` value will be the last part of the URL, e.g `https://github.com/apps/`. ### `--gh-hostname` + ```bash atlantis server --gh-hostname="my.github.enterprise.com" # or ATLANTIS_GH_HOSTNAME="my.github.enterprise.com" ``` + Hostname of your GitHub Enterprise installation. If using [GitHub.com](https://github.com), don't set. Defaults to `github.com`. +### `--gh-app-installation-id` + + ```bash + atlantis server --gh-app-installation-id="123" + # or + ATLANTIS_GH_APP_INSTALLATION_ID="123" + ``` + +The installation ID of a specific instance of a GitHub application. Normally this value is +derived by querying GitHub for the list of installations of the ID supplied via `--gh-app-id` and selecting +the first one found and where multiple installations results in an error. Use this flag if you have multiple +instances of Atlantis but you want to use a single already-installed GitHub app for all of them. You would normally do this if +you are running a proxy as your single GitHub application that will proxy to an appropriate Atlantis instance +based on the organization or user that triggered the webhook. + ### `--gh-org` + ```bash atlantis server --gh-org="myorgname" # or ATLANTIS_GH_ORG="myorgname" ``` + GitHub organization name. Set to enable creating a private GitHub app for this organization. ### `--gh-team-allowlist` + ```bash atlantis server --gh-team-allowlist="myteam:plan, secteam:apply, DevOps Team:apply, DevOps Team:import" # or ATLANTIS_GH_TEAM_ALLOWLIST="myteam:plan, secteam:apply, DevOps Team:apply, DevOps Team:import" ``` + In versions v0.21.0 and later, the GitHub team name can be a name or a slug. In versions v0.20.1 and below, the Github team name required the case sensitive team name. @@ -569,28 +734,34 @@ and set `--autoplan-modules` to `false`. ::: ### `--gh-token` + ```bash atlantis server --gh-token="token" # or (recommended) ATLANTIS_GH_TOKEN="token" ``` + GitHub token of API user. ### `--gh-user` + ```bash atlantis server --gh-user="myuser" # or ATLANTIS_GH_USER="myuser" ``` + GitHub username of API user. ### `--gh-webhook-secret` + ```bash atlantis server --gh-webhook-secret="secret" # or (recommended) ATLANTIS_GH_WEBHOOK_SECRET="secret" ``` - Secret used to validate GitHub webhooks (see [https://developer.github.com/webhooks/securing/](https://docs.github.com/en/developers/webhooks-and-events/webhooks/securing-your-webhooks)). + + Secret used to validate GitHub webhooks (see [GitHub: Validating webhook deliveries](https://docs.github.com/en/webhooks/using-webhooks/validating-webhook-deliveries)). ::: warning SECURITY WARNING If not specified, Atlantis won't be able to validate that the incoming webhook call came from GitHub. @@ -598,36 +769,44 @@ and set `--autoplan-modules` to `false`. ::: ### `--gitlab-hostname` + ```bash atlantis server --gitlab-hostname="my.gitlab.enterprise.com" # or ATLANTIS_GITLAB_HOSTNAME="my.gitlab.enterprise.com" ``` + Hostname of your GitLab Enterprise installation. If using [Gitlab.com](https://gitlab.com), don't set. Defaults to `gitlab.com`. ### `--gitlab-token` + ```bash atlantis server --gitlab-token="token" # or (recommended) ATLANTIS_GITLAB_TOKEN="token" ``` + GitLab token of API user. ### `--gitlab-user` + ```bash atlantis server --gitlab-user="myuser" # or ATLANTIS_GITLAB_USER="myuser" ``` + GitLab username of API user. ### `--gitlab-webhook-secret` + ```bash atlantis server --gitlab-webhook-secret="secret" # or (recommended) ATLANTIS_GITLAB_WEBHOOK_SECRET="secret" ``` + Secret used to validate GitLab webhooks. ::: warning SECURITY WARNING @@ -636,71 +815,86 @@ and set `--autoplan-modules` to `false`. ::: ### `--help` + ```bash atlantis server --help ``` + View help. ### `--hide-prev-plan-comments` + ```bash atlantis server --hide-prev-plan-comments # or ATLANTIS_HIDE_PREV_PLAN_COMMENTS=true ``` + Hide previous plan comments to declutter PRs. This is only supported in GitHub and GitLab currently. This is not enabled by default. When using Github App, you need to set `--gh-app-slug` to enable this feature. ### `--hide-unchanged-plan-comments` + ```bash atlantis server --hide-unchanged-plan-comments # or ATLANTIS_HIDE_UNCHANGED_PLAN_COMMENTS=true ``` + Remove no-changes plan comments from the pull request. This is useful when you have many projects and want to keep the pull request clean from useless comments. ### `--include-git-untracked-files` + ```bash atlantis server --include-git-untracked-files # or ATLANTIS_INCLUDE_GIT_UNTRACKED_FILES=true ``` + Include git untracked files in the Atlantis modified file list. Used for example with CDKTF pre-workflow hooks that dynamically generate Terraform files. ### `--locking-db-type` + ```bash atlantis server --locking-db-type="" # or ATLANTIS_LOCKING_DB_TYPE="" ``` + The locking database type to use for storing plan and apply locks. Defaults to `boltdb`. Notes: - * If set to `boltdb`, only one process may have access to the boltdb instance. - * If set to `redis`, then `--redis-host`, `--redis-port`, and `--redis-password` must be set. + +* If set to `boltdb`, only one process may have access to the boltdb instance. +* If set to `redis`, then `--redis-host`, `--redis-port`, and `--redis-password` must be set. ### `--log-level` + ```bash atlantis server --log-level="" # or ATLANTIS_LOG_LEVEL="" ``` + Log level. Defaults to `info`. ### `--markdown-template-overrides-dir` + ```bash atlantis server --markdown-template-overrides-dir="path/to/templates/" # or ATLANTIS_MARKDOWN_TEMPLATE_OVERRIDES_DIR="path/to/templates/" ``` + This will be available in v0.21.0. Directory where Atlantis will read in overrides for markdown templates used to render comments on pull requests. Markdown template overrides may be specified either in individual files, or all together in a single file. All template - override files _must_ have the `.tmpl` extension, otherwise they will not be parsed. + override files *must* have the `.tmpl` extension, otherwise they will not be parsed. Markdown templates which may have overrides can be found [here](https://github.com/runatlantis/atlantis/tree/main/server/events/templates) @@ -710,67 +904,83 @@ This is useful when you have many projects and want to keep the pull request cle Defaults to the atlantis home directory `/home/atlantis/.markdown_templates/` in `/$HOME/.markdown_templates`. ### `--parallel-apply` + ```bash atlantis server --parallel-apply # or ATLANTIS_PARALLEL_APPLY=true ``` - Whether to run apply operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.html#run-plans-and-applies-in-parallel) takes precedence. + + Whether to run apply operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.md#run-plans-and-applies-in-parallel) takes precedence. ### `--parallel-plan` + ```bash atlantis server --parallel-plan # or ATLANTIS_PARALLEL_PLAN=true ``` - Whether to run plan operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.html#run-plans-and-applies-in-parallel) takes precedence. + + Whether to run plan operations in parallel. Defaults to `false`. Explicit declaration in [repo config](repo-level-atlantis-yaml.md#run-plans-and-applies-in-parallel) takes precedence. ### `--parallel-pool-size` + ```bash atlantis server --parallel-pool-size=100 # or ATLANTIS_PARALLEL_POOL_SIZE=100 ``` + Max size of the wait group that runs parallel plans and applies (if enabled). Defaults to `15` ### `--port` + ```bash atlantis server --port=4141 # or ATLANTIS_PORT=4141 ``` + Port to bind to. Defaults to `4141`. ### `--quiet-policy-checks` + ```bash atlantis server --quiet-policy-checks # or ATLANTIS_QUIET_POLICY_CHECKS=true ``` + Exclude policy check comments from pull requests unless there's an actual error from conftest. This also excludes warnings. Defaults to `false`. ### `--redis-db` + ```bash atlantis server --redis-db=0 # or ATLANTIS_REDIS_DB=0 ``` + The Redis Database to use when using a Locking DB type of `redis`. Defaults to `0`. ### `--redis-host` + ```bash atlantis server --redis-host="localhost" # or ATLANTIS_REDIS_HOST="localhost" ``` + The Redis Hostname for when using a Locking DB type of `redis`. ### `--redis-insecure-skip-verify` + ```bash atlantis server --redis-insecure-skip-verify=false # or ATLANTIS_REDIS_INSECURE_SKIP_VERIFY=false ``` + Controls whether the Redis client verifies the Redis server's certificate chain and host name. If true, accepts any certificate presented by the server and any host name in that certificate. Defaults to `false`. ::: warning SECURITY WARNING @@ -778,82 +988,97 @@ This is useful when you have many projects and want to keep the pull request cle ::: ### `--redis-password` + ```bash atlantis server --redis-password="password123" # or (recommended) ATLANTIS_REDIS_PASSWORD="password123" ``` + The Redis Password for when using a Locking DB type of `redis`. ### `--redis-port` + ```bash atlantis server --redis-port=6379 # or ATLANTIS_REDIS_PORT=6379 ``` + The Redis Port for when using a Locking DB type of `redis`. Defaults to `6379`. ### `--redis-tls-enabled` + ```bash atlantis server --redis-tls-enabled=false # or ATLANTIS_REDIS_TLS_ENABLED=false ``` + Enables a TLS connection, with min version of 1.2, to Redis when using a Locking DB type of `redis`. Defaults to `false`. ### `--repo-allowlist` + ```bash # NOTE: Use single quotes to avoid shell expansion of *. atlantis server --repo-allowlist='github.com/myorg/*' # or ATLANTIS_REPO_ALLOWLIST='github.com/myorg/*' ``` + Atlantis requires you to specify an allowlist of repositories it will accept webhooks from. Notes: - * Accepts a comma separated list, ex. `definition1,definition2` - * Format is `{hostname}/{owner}/{repo}`, ex. `github.com/runatlantis/atlantis` - * `*` matches any characters, ex. `github.com/runatlantis/*` will match all repos in the runatlantis organization - * An entry beginning with `!` negates it, ex. `github.com/foo/*,!github.com/foo/bar` will match all github repos in the `foo` owner *except* `bar`. - * For Bitbucket Server: `{hostname}` is the domain without scheme and port, `{owner}` is the name of the project (not the key), and `{repo}` is the repo name - * User (not project) repositories take on the format: `{hostname}/{full name}/{repo}` (e.g., `bitbucket.example.com/Jane Doe/myatlantis` for username `jdoe` and full name `Jane Doe`, which is not very intuitive) - * For Azure DevOps the allowlist takes one of two forms: `{owner}.visualstudio.com/{project}/{repo}` or `dev.azure.com/{owner}/{project}/{repo}` - * Microsoft is in the process of changing Azure DevOps to the latter form, so it may be safest to always specify both formats in your repo allowlist for each repository until the change is complete. + +* Accepts a comma separated list, ex. `definition1,definition2` +* Format is `{hostname}/{owner}/{repo}`, ex. `github.com/runatlantis/atlantis` +* `*` matches any characters, ex. `github.com/runatlantis/*` will match all repos in the runatlantis organization +* An entry beginning with `!` negates it, ex. `github.com/foo/*,!github.com/foo/bar` will match all github repos in the `foo` owner *except* `bar`. +* For Bitbucket Server: `{hostname}` is the domain without scheme and port, `{owner}` is the name of the project (not the key), and `{repo}` is the repo name + * User (not project) repositories take on the format: `{hostname}/{full name}/{repo}` (e.g., `bitbucket.example.com/Jane Doe/myatlantis` for username `jdoe` and full name `Jane Doe`, which is not very intuitive) +* For Azure DevOps the allowlist takes one of two forms: `{owner}.visualstudio.com/{project}/{repo}` or `dev.azure.com/{owner}/{project}/{repo}` +* Microsoft is in the process of changing Azure DevOps to the latter form, so it may be safest to always specify both formats in your repo allowlist for each repository until the change is complete. Examples: - * Allowlist `myorg/repo1` and `myorg/repo2` on `github.com` - * `--repo-allowlist=github.com/myorg/repo1,github.com/myorg/repo2` - * Allowlist all repos under `myorg` on `github.com` - * `--repo-allowlist='github.com/myorg/*'` - * Allowlist all repos under `myorg` on `github.com`, excluding `myorg/untrusted-repo` - * `--repo-allowlist='github.com/myorg/*,!github.com/myorg/untrusted-repo'` - * Allowlist all repos in my GitHub Enterprise installation - * `--repo-allowlist='github.yourcompany.com/*'` - * Allowlist all repos under `myorg` project `myproject` on Azure DevOps - * `--repo-allowlist='myorg.visualstudio.com/myproject/*,dev.azure.com/myorg/myproject/*'` - * Allowlist all repositories - * `--repo-allowlist='*'` + +* Allowlist `myorg/repo1` and `myorg/repo2` on `github.com` + * `--repo-allowlist=github.com/myorg/repo1,github.com/myorg/repo2` +* Allowlist all repos under `myorg` on `github.com` + * `--repo-allowlist='github.com/myorg/*'` +* Allowlist all repos under `myorg` on `github.com`, excluding `myorg/untrusted-repo` + * `--repo-allowlist='github.com/myorg/*,!github.com/myorg/untrusted-repo'` +* Allowlist all repos in my GitHub Enterprise installation + * `--repo-allowlist='github.yourcompany.com/*'` +* Allowlist all repos under `myorg` project `myproject` on Azure DevOps + * `--repo-allowlist='myorg.visualstudio.com/myproject/*,dev.azure.com/myorg/myproject/*'` +* Allowlist all repositories + * `--repo-allowlist='*'` ### `--repo-config` + ```bash atlantis server --repo-config="path/to/repos.yaml" # or ATLANTIS_REPO_CONFIG="path/to/repos.yaml" ``` - Path to a YAML server-side repo config file. See [Server Side Repo Config](server-side-repo-config.html). + + Path to a YAML server-side repo config file. See [Server Side Repo Config](server-side-repo-config.md). ### `--repo-config-json` + ```bash atlantis server --repo-config-json='{"repos":[{"id":"/.*/", "apply_requirements":["mergeable"]}]}' # or ATLANTIS_REPO_CONFIG_JSON='{"repos":[{"id":"/.*/", "apply_requirements":["mergeable"]}]}' ``` + Specify server-side repo config as a JSON string. Useful if you don't want to write a config file to disk. - See [Server Side Repo Config](server-side-repo-config.html) for more details. + See [Server Side Repo Config](server-side-repo-config.md) for more details. ::: tip - If specifying a [Workflow](custom-workflows.html#reference), [step](custom-workflows.html#step)'s + If specifying a [Workflow](custom-workflows.md#reference), [step](custom-workflows.md#step)'s can be specified as follows: + ```json { "repos": [], @@ -876,25 +1101,30 @@ This is useful when you have many projects and want to keep the pull request cle } } ``` + ::: ### `--restrict-file-list` + ```bash atlantis server --restrict-file-list # or (recommended) ATLANTIS_RESTRICT_FILE_LIST=true ``` + `--restrict-file-list` will block plan requests from projects outside the files modified in the pull request. This will not block plan requests with regex if using the `--enable-regexp-cmd` flag, in these cases commands like `atlantis plan -p .*` will still work if used. normal commands will stil be blocked if necessary. Defaults to `false`. ### `--silence-allowlist-errors` + ```bash atlantis server --silence-allowlist-errors # or ATLANTIS_SILENCE_ALLOWLIST_ERRORS=true ``` + Some users use the `--repo-allowlist` flag to control which repos Atlantis responds to. Normally, if Atlantis receives a pull request webhook from a repo not listed in the allowlist, it will comment back with an error. This flag disables that commenting. @@ -903,20 +1133,24 @@ This is useful when you have many projects and want to keep the pull request cle at an organization level rather than on each repo. ### `--silence-fork-pr-errors` + ```bash atlantis server --silence-fork-pr-errors # or ATLANTIS_SILENCE_FORK_PR_ERRORS=true ``` + Normally, if Atlantis receives a pull request webhook from a fork and --allow-fork-prs is not set, it will comment back with an error. This flag disables that commenting. ### `--silence-no-projects` + ```bash atlantis server --silence-no-projects # or ATLANTIS_SILENCE_NO_PROJECTS=true ``` + `--silence-no-projects` will tell Atlantis to ignore PRs if none of the modified files are part of a project defined in the `atlantis.yaml` file. This flag ensures an Atlantis server only responds to its explicitly declared projects. This has no effect if projects are undefined in the repo level `atlantis.yaml`. @@ -926,78 +1160,96 @@ This is useful when you have many projects and want to keep the pull request cle delegate work to each Atlantis server. Also useful when used with pre_workflow_hooks to dynamically generate an `atlantis.yaml` file. ### `--silence-vcs-status-no-plans` + ```bash atlantis server --silence-vcs-status-no-plans # or ATLANTIS_SILENCE_VCS_STATUS_NO_PLANS=true ``` + `--silence-vcs-status-no-plans` will tell Atlantis to ignore setting VCS status on plans if none of the modified files are part of a project defined in the `atlantis.yaml` file. ### `--silence-vcs-status-no-projects` + ```bash atlantis server --silence-vcs-status-no-projects # or ATLANTIS_SILENCE_VCS_STATUS_NO_PROJECTS=true ``` + `--silence-vcs-status-no-projects` will tell Atlantis to ignore setting VCS status on any command if none of the modified files are part of a project defined in the `atlantis.yaml` file. ### `--skip-clone-no-changes` + ```bash atlantis server --skip-clone-no-changes # or ATLANTIS_SKIP_CLONE_NO_CHANGES=true ``` + `--skip-clone-no-changes` will skip cloning the repo during autoplan if there are no changes to Terraform projects. This will only apply for GitHub and GitLab and only for repos that have `atlantis.yaml` file. Defaults to `false`. ### `--slack-token` + ```bash atlantis server --slack-token=token # or (recommended) ATLANTIS_SLACK_TOKEN='token' ``` - API token for Slack notifications. Slack is not fully supported. TODO: Slack docs. + + API token for Slack notifications. See [Using Slack hooks](using-slack-hooks.md). ### `--ssl-cert-file` + ```bash atlantis server --ssl-cert-file="/etc/ssl/certs/my-cert.crt" # or ATLANTIS_SSL_CERT_FILE="/etc/ssl/certs/my-cert.crt" ``` + File containing x509 Certificate used for serving HTTPS. If the cert is signed by a CA, the file should be the concatenation of the server's certificate, any intermediates, and the CA's certificate. ### `--ssl-key-file` + ```bash atlantis server --ssl-key-file="/etc/ssl/private/my-cert.key" # or ATLANTIS_SSL_KEY_FILE="/etc/ssl/private/my-cert.key" ``` + File containing x509 private key matching `--ssl-cert-file`. ### `--stats-namespace` + ```bash atlantis server --stats-namespace="myatlantis" # or ATLANTIS_STATS_NAMESPACE="myatlantis" ``` - Namespace for emitting stats/metrics. See [stats](stats.html) section. + + Namespace for emitting stats/metrics. See [stats](stats.md) section. ### `--tf-download` + ```bash atlantis server --tf-download=false # or ATLANTIS_TF_DOWNLOAD=false ``` + Defaults to `true`. Allow Atlantis to list and download additional versions of Terraform. Setting this to `false` can be useful in an air-gapped environment where a download mirror is not available. ### `--tf-download-url` + ```bash atlantis server --tf-download-url="https://releases.company.com" # or ATLANTIS_TF_DOWNLOAD_URL="https://releases.company.com" ``` + An alternative URL to download Terraform versions if they are missing. Useful in an airgapped environment where releases.hashicorp.com is not available. Directory structure of the custom endpoint should match that of releases.hashicorp.com. @@ -1005,106 +1257,128 @@ Setting this to `false` can be useful in an air-gapped environment where a downl This has no impact if `--tf-download` is set to `false`. ### `--tfe-hostname` + ```bash atlantis server --tfe-hostname="my-terraform-enterprise.company.com" # or ATLANTIS_TFE_HOSTNAME="my-terraform-enterprise.company.com" ``` + Hostname of your Terraform Enterprise installation to be used in conjunction with - `--tfe-token`. See [Terraform Cloud](terraform-cloud.html) for more details. + `--tfe-token`. See [Terraform Cloud](terraform-cloud.md) for more details. If using Terraform Cloud (i.e. you don't have your own Terraform Enterprise installation) no need to set since it defaults to `app.terraform.io`. ### `--tfe-local-execution-mode` + ```bash atlantis server --tfe-local-execution-mode # or ATLANTIS_TFE_LOCAL_EXECUTION_MODE=true ``` - Enable if you're using local execution mode (instead of TFE/C's remote execution mode). See [Terraform Cloud](terraform-cloud.html) for more details. + + Enable if you're using local execution mode (instead of TFE/C's remote execution mode). See [Terraform Cloud](terraform-cloud.md) for more details. ### `--tfe-token` + ```bash atlantis server --tfe-token="xxx.atlasv1.yyy" # or (recommended) ATLANTIS_TFE_TOKEN='xxx.atlasv1.yyy' ``` - A token for Terraform Cloud/Terraform Enterprise integration. See [Terraform Cloud](terraform-cloud.html) for more details. + + A token for Terraform Cloud/Terraform Enterprise integration. See [Terraform Cloud](terraform-cloud.md) for more details. ### `--use-tf-plugin-cache` + ```bash atlantis server --use-tf-plugin-cache=false # or ATLANTIS_USE_TF_PLUGIN_CACHE=false ``` + Set to false if you want to disable terraform plugin cache. This flag is useful when having multiple projects that need to run a plan and apply in the same PR to avoid the race condition of `plugin_cache_dir` concurrently, this is a terraform known issue, more info: -- [plugin_cache_dir concurrently discussion](https://github.com/hashicorp/terraform/issues/31964) -- [PR to improve the situation](https://github.com/hashicorp/terraform/pull/33479) +* [plugin_cache_dir concurrently discussion](https://github.com/hashicorp/terraform/issues/31964) +* [PR to improve the situation](https://github.com/hashicorp/terraform/pull/33479) The effect of the race condition is more evident when using parallel configuration to run plan and apply, by disabling the use of plugin cache will impact in the performance when starting a new plan or apply, but in large atlantis deployments with multiple projects and shared modules the use of `--parallel_plan` and `--parallel_apply` is mandatory for an efficient managment of the PRs. ### `--var-file-allowlist` + ```bash atlantis server --var-file-allowlist='/path/to/tfvars/dir' # or ATLANTIS_VAR_FILE_ALLOWLIST='/path/to/tfvars/dir' ``` + Comma-separated list of additional directory paths where [variable definition files](https://developer.hashicorp.com/terraform/language/values/variables#variable-definitions-tfvars-files) can be read from. The paths in this argument should be absolute paths. Relative paths and globbing are currently not supported. If this argument is not provided, it defaults to Atlantis' data directory, determined by the `--data-dir` argument. ### `--vcs-status-name` + ```bash atlantis server --vcs-status-name="atlantis-dev" # or ATLANTIS_VCS_STATUS_NAME="atlantis-dev" ``` + Name used to identify Atlantis when updating a pull request status. Defaults to `atlantis`. This is useful when running multiple Atlantis servers against a single repository so you can give each Atlantis server its own unique name to prevent the statuses clashing. ### `--web-basic-auth` + ```bash atlantis server --web-basic-auth # or ATLANTIS_WEB_BASIC_AUTH=true ``` + Enable Basic Authentication on the Atlantis web service. ### `--web-password` + ```bash atlantis server --web-password="atlantis" # or ATLANTIS_WEB_PASSWORD="atlantis" ``` + Password used for Basic Authentication on the Atlantis web service. Defaults to `atlantis`. ### `--web-username` + ```bash atlantis server --web-username="atlantis" # or ATLANTIS_WEB_USERNAME="atlantis" ``` + Username used for Basic Authentication on the Atlantis web service. Defaults to `atlantis`. ### `--websocket-check-origin` + ```bash atlantis server --websocket-check-origin # or ATLANTIS_WEBSOCKET_CHECK_ORIGIN=true ``` + Only allow websockets connection when they originate from the running Atlantis web server ### `--write-git-creds` + ```bash atlantis server --write-git-creds # or ATLANTIS_WRITE_GIT_CREDS=true ``` + Write out a .git-credentials file with the provider user and token to allow cloning private modules over HTTPS or SSH. See [here](https://git-scm.com/docs/git-credential-store) for more information. @@ -1121,4 +1395,3 @@ The effect of the race condition is more evident when using parallel configurati ::: warning SECURITY WARNING This does write secrets to disk and should only be enabled in a secure environment. ::: - diff --git a/runatlantis.io/docs/server-side-repo-config.md b/runatlantis.io/docs/server-side-repo-config.md index 77b44be4fa..16899ac9a3 100644 --- a/runatlantis.io/docs/server-side-repo-config.md +++ b/runatlantis.io/docs/server-side-repo-config.md @@ -1,27 +1,29 @@ -# Server Side Config +# Server Side Repo Config + A Server-Side Config file is used for more groups of server config that can't reasonably be expressed through flags. One such usecase is to control per-repo behaviour and what users can do in repo-level `atlantis.yaml` files. -[[toc]] - ## Do I Need A Server-Side Config File? + You do not need a server-side repo config file unless you want to customize some aspect of Atlantis on a per-repo basis. Read through the [use-cases](#use-cases) to determine if you need it. ## Enabling Server Side Config + To use server side repo config create a config file, ex. `repos.yaml`, and pass it to the `atlantis server` command via the `--repo-config` flag, ex. `--repo-config=path/to/repos.yaml`. If you don't wish to write a config file to disk, you can use the `--repo-config-json` flag or `ATLANTIS_REPO_CONFIG_JSON` environment variable -to specify your config as JSON. See [--repo-config-json](server-configuration.html#repo-config-json) +to specify your config as JSON. See [--repo-config-json](server-configuration.md#repo-config-json) for an example. - + ## Example Server Side Repo + ```yaml # repos lists the config for specific repos. repos: @@ -54,9 +56,9 @@ repos: # allowed_overrides specifies which keys can be overridden by this repo in # its atlantis.yaml file. - allowed_overrides: [apply_requirements, workflow, delete_source_branch_on_merge, repo_locking, custom_policy_check] + allowed_overrides: [apply_requirements, workflow, delete_source_branch_on_merge, repo_locking, repo_locks, custom_policy_check] - # allowed_workflows specifies which workflows the repos that match + # allowed_workflows specifies which workflows the repos that match # are allowed to select. allowed_workflows: [custom] @@ -71,18 +73,24 @@ repos: # repo_locking defines whether lock repository when planning. # If true (default), atlantis try to get a lock. + # deprecated: use repo_locks instead repo_locking: true + # repo_locks defines whether the repository would be locked on apply instead of plan, or disabled + # Valid values are on_plan (default), on_apply or disabled. + repo_locks: + mode: on_plan + # custom_policy_check defines whether policy checking tools besides Conftest are enabled in checks # If false (default), only Conftest JSON output is allowed custom_policy_check: false # pre_workflow_hooks defines arbitrary list of scripts to execute before workflow execution. - pre_workflow_hooks: + pre_workflow_hooks: - run: my-pre-workflow-hook-command arg1 - + # post_workflow_hooks defines arbitrary list of scripts to execute after workflow execution. - post_workflow_hooks: + post_workflow_hooks: - run: my-post-workflow-hook-command arg1 # policy_check defines if policy checking should be enable on this repository. @@ -112,13 +120,16 @@ workflows: ``` ## Use Cases + Here are some of the reasons you might want to use a repo config. ### Requiring PR Is Approved Before an applicable subcommand + If you want to require that all (or specific) repos must have pull requests approved before Atlantis will allow running `apply` or `import`, use the `plan_requirements`, `apply_requirements` or `import_requirements` keys. For all repos: + ```yaml # repos.yaml repos: @@ -129,6 +140,7 @@ repos: ``` For a specific repo: + ```yaml # repos.yaml repos: @@ -138,13 +150,15 @@ repos: import_requirements: [approved] ``` -See [Command Requirements](command-requirements.html) for more details. +See [Command Requirements](command-requirements.md) for more details. ### Requiring PR Is "Mergeable" Before Apply or Import + If you want to require that all (or specific) repos must have pull requests in a mergeable state before Atlantis will allow running `apply` or `import`, use the `plan_requirements`, `apply_requirements` or `import_requirements` keys. For all repos: + ```yaml # repos.yaml repos: @@ -155,6 +169,7 @@ repos: ``` For a specific repo: + ```yaml # repos.yaml repos: @@ -164,13 +179,15 @@ repos: import_requirements: [mergeable] ``` -See [Command Requirements](command-requirements.html) for more details. +See [Command Requirements](command-requirements.md) for more details. ### Repos Can Set Their Own Apply an applicable subcommand + If you want all (or specific) repos to be able to override the default apply requirements, use the `allowed_overrides` key. To allow all repos to override the default: + ```yaml # repos.yaml repos: @@ -183,7 +200,9 @@ repos: # But all repos can set their own using atlantis.yaml allowed_overrides: [plan_requirements, apply_requirements, import_requirements] ``` + To allow only a specific repo to override the default: + ```yaml # repos.yaml repos: @@ -200,6 +219,7 @@ repos: Then each allowed repo can have an `atlantis.yaml` file that sets `plan_requirements`, `apply_requirements` or `import_requirements` to an empty array (disabling the requirement). + ```yaml # atlantis.yaml in the repo root or set repo_config_file in repos.yaml version: 3 @@ -211,6 +231,7 @@ projects: ``` ### Running Scripts Before Atlantis Workflows + If you want to run scripts that would execute before Atlantis can run default or custom workflows, you can create a `pre-workflow-hooks`: @@ -222,10 +243,12 @@ repos: - run: | my bash script inline ``` -See [Pre Workflow Hooks](pre-workflow-hooks.html) for more details on writing + +See [Pre Workflow Hooks](pre-workflow-hooks.md) for more details on writing pre workflow hooks. ### Running Scripts After Atlantis Workflows + If you want to run scripts that would execute after Atlantis runs default or custom workflows, you can create a `post-workflow-hooks`: @@ -237,15 +260,18 @@ repos: - run: | my bash script inline ``` -See [Post Workflow Hooks](post-workflow-hooks.html) for more details on writing + +See [Post Workflow Hooks](post-workflow-hooks.md) for more details on writing post workflow hooks. ### Change The Default Atlantis Workflow + If you want to change the default commands that Atlantis runs during `plan` and `apply` phases, you can create a new `workflow`. If you want to use that workflow by default for all repos, use the workflow key `default`: + ```yaml # repos.yaml # NOTE: the repos key is not required. @@ -261,10 +287,11 @@ workflows: - run: my custom apply command ``` -See [Custom Workflows](custom-workflows.html) for more details on writing +See [Custom Workflows](custom-workflows.md) for more details on writing custom workflows. ### Allow Repos To Choose A Server-Side Workflow + If you want repos to be able to choose their own workflows that are defined in the server-side repo config, you need to create the workflows server-side and then allow each repo to override the `workflow` key: @@ -295,7 +322,8 @@ workflows: steps: - run: another custom command ``` -Or, if you want to restrict what workflows each repo has access to, use the `allowed_workflows` + +Or, if you want to restrict what workflows each repo has access to, use the `allowed_workflows` key: ```yaml @@ -345,13 +373,15 @@ There is always a workflow named `default` that corresponds to Atlantis' default unless you've created your own server-side workflow with that key (overriding it). ::: -See [Custom Workflows](custom-workflows.html) for more details on writing +See [Custom Workflows](custom-workflows.md) for more details on writing custom workflows. ### Allow Using Custom Policy Tools + Conftest is the standard policy check application integrated with Atlantis, but custom tools can still be run in custom workflows when the `custom_policy_check` option is set. See the [Custom Policy Checks page](custom-policy-checks.md) for detailed examples. ### Allow Repos To Define Their Own Workflows + If you want repos to be able to define their own workflows you need to allow them to override the `workflow` key and set `allow_custom_workflows` to `true`. @@ -375,6 +405,7 @@ repos: ``` Then each allowed repo can define and use a custom workflow in their `atlantis.yaml` files: + ```yaml # atlantis.yaml version: 3 @@ -392,12 +423,13 @@ workflows: - run: my custom apply command ``` -See [Custom Workflows](custom-workflows.html) for more details on writing +See [Custom Workflows](custom-workflows.md) for more details on writing custom workflows. ### Multiple Atlantis Servers Handle The Same Repository + Running multiple Atlantis servers to handle the same repository can be done to separate permissions for each Atlantis server. -In this case, a different [atlantis.yaml](repo-level-atlantis-yaml.html) repository config file can be used by using different `repos.yaml` files. +In this case, a different [atlantis.yaml](repo-level-atlantis-yaml.md) repository config file can be used by using different `repos.yaml` files. For example, consider a situation where a separate `production-server` atlantis uses repo config `atlantis-production.yaml` and `staging-server` atlantis uses repo config `atlantis-staging.yaml`. @@ -416,7 +448,7 @@ repos: ``` Then, create `atlantis-production.yaml` and `atlantis-staging.yaml` files in the repository. -See the configuration examples in [atlantis.yaml](repo-level-atlantis-yaml.html). +See the configuration examples in [atlantis.yaml](repo-level-atlantis-yaml.md). ```yaml # atlantis-production.yaml @@ -438,25 +470,29 @@ Now, 2 webhook URLs can be setup for the repository, which send events to `produ Each servers handle different repository config files. :::tip Notes -* If `no projects` comments are annoying, set [--silence-no-projects](server-configuration.html#silence-no-projects). -* The command trigger executable name can be reconfigured from `atlantis` to something else by setting [Executable Name](server-configuration.html#executable-name). + +* If `no projects` comments are annoying, set [--silence-no-projects](server-configuration.md#silence-no-projects). +* The command trigger executable name can be reconfigured from `atlantis` to something else by setting [Executable Name](server-configuration.md#executable-name). * When using different atlantis server vcs users such as `@atlantis-staging`, the comment `@atlantis-staging plan` can be used instead `atlantis plan` to call `staging-server` only. ::: ## Reference ### Top-Level Keys -| Key | Type | Default | Required | Description | -|-----------|---------------------------------------------------------|-----------|----------|---------------------------------------------------------------------------------------| -| repos | array[[Repo](#repo)] | see below | no | List of repos to apply settings to. | -| workflows | map[string: [Workflow](custom-workflows.html#workflow)] | see below | no | Map from workflow name to workflow. Workflows override the default Atlantis commands. | -| policies | Policies. | none | no | List of policy sets to run and associated metadata | -| metrics | Metrics. | none | no | Map of metric configuration | +| Key | Type | Default | Required | Description | +|-----------|-------------------------------------------------------|-----------|----------|---------------------------------------------------------------------------------------| +| repos | array[[Repo](#repo)] | see below | no | List of repos to apply settings to. | +| workflows | map[string: [Workflow](custom-workflows.md#workflow)] | see below | no | Map from workflow name to workflow. Workflows override the default Atlantis commands. | +| policies | Policies. | none | no | List of policy sets to run and associated metadata | +| metrics | Metrics. | none | no | Map of metric configuration | ::: tip A Note On Defaults + #### `repos` + `repos` always contains a first element with the Atlantis default config: + ```yaml repos: - id: /.*/ @@ -470,7 +506,9 @@ repos: ``` #### `workflows` + `workflows` always contains the Atlantis default workflow under the key `default`: + ```yaml workflows: default: @@ -485,29 +523,33 @@ If you set a workflow with the key `default`, it will override this. ::: ### Repo -| Key | Type | Default | Required | Description | -|-------------------------------|----------|---------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| id | string | none | yes | Value can be a regular expression when specified as /<regex>/ or an exact string match. Repo IDs are of the form `{vcs hostname}/{org}/{name}`, ex. `github.com/owner/repo`. Hostname is specified without scheme or port. For Bitbucket Server, {org} is the **name** of the project, not the key. | -| branch | string | none | no | An regex matching pull requests by base branch (the branch the pull request is getting merged into). By default, all branches are matched | -| repo_config_file | string | none | no | Repo config file path in this repo. By default, use `atlantis.yaml` which is located on repository root. When multiple atlantis servers work with the same repo, please set different file names. | -| workflow | string | none | no | A custom workflow. -| plan_requirements | []string | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | | -| apply_requirements | []string | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| import_requirements | []string | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.html) for more details. | -| allowed_overrides | []string | none | no | A list of restricted keys that `atlantis.yaml` files can override. The only supported keys are `apply_requirements`, `workflow`, `delete_source_branch_on_merge`,`repo_locking`, and `custom_policy_check` | -| allowed_workflows | []string | none | no | A list of workflows that `atlantis.yaml` files can select from. | -| allow_custom_workflows | bool | false | no | Whether or not to allow [Custom Workflows](custom-workflows.html). | -| delete_source_branch_on_merge | bool | false | no | Whether or not to delete the source branch on merge. | -| repo_locking | bool | false | no | Whether or not to get a lock. | -| policy_check | bool | false | no | Whether or not to run policy checks on this repository. | -| custom_policy_check | bool | false | no | Whether or not to enable custom policy check tools outside of Conftest on this repository. | -| autodiscover | AutoDiscover | none | no | Auto discover settings for this repo +| Key | Type | Default | Required | Description | +|-------------------------------|-------------------------|-----------------|----------|-----------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| id | string | none | yes | Value can be a regular expression when specified as /<regex>/ or an exact string match. Repo IDs are of the form `{vcs hostname}/{org}/{name}`, ex. `github.com/owner/repo`. Hostname is specified without scheme or port. For Bitbucket Server, {org} is the **name** of the project, not the key. | +| branch | string | none | no | An regex matching pull requests by base branch (the branch the pull request is getting merged into). By default, all branches are matched | +| repo_config_file | string | none | no | Repo config file path in this repo. By default, use `atlantis.yaml` which is located on repository root. When multiple atlantis servers work with the same repo, please set different file names. | +| workflow | string | none | no | A custom workflow. | +| plan_requirements | []string | none | no | Requirements that must be satisfied before `atlantis plan` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| apply_requirements | []string | none | no | Requirements that must be satisfied before `atlantis apply` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| import_requirements | []string | none | no | Requirements that must be satisfied before `atlantis import` can be run. Currently the only supported requirements are `approved`, `mergeable`, and `undiverged`. See [Command Requirements](command-requirements.md) for more details. | +| allowed_overrides | []string | none | no | A list of restricted keys that `atlantis.yaml` files can override. The only supported keys are `apply_requirements`, `workflow`, `delete_source_branch_on_merge`,`repo_locking`, `repo_locks`, and `custom_policy_check` | +| allowed_workflows | []string | none | no | A list of workflows that `atlantis.yaml` files can select from. | +| allow_custom_workflows | bool | false | no | Whether or not to allow [Custom Workflows](custom-workflows.md). | +| delete_source_branch_on_merge | bool | false | no | Whether or not to delete the source branch on merge. | +| repo_locking | bool | false | no | (deprecated) Whether or not to get a lock. | +| repo_locks | [RepoLocks](#repolocks) | `mode: on_plan` | no | Whether or not repository locks are enabled for this project on plan or apply. See [RepoLocks](#repolocks) for more details. | +| policy_check | bool | false | no | Whether or not to run policy checks on this repository. | +| custom_policy_check | bool | false | no | Whether or not to enable custom policy check tools outside of Conftest on this repository. | +| autodiscover | AutoDiscover | none | no | Auto discover settings for this repo | +| silence_pr_comments | []string | none | no | Silence PR comments from defined stages while preserving PR status checks. Useful in large environments with many Atlantis instances and/or projects, when the comments are too big and too many, therefore it is preferable to rely solely on PR status checks. Supported values are: `plan`, `apply`. | :::tip Notes + * If multiple repos match, the last match will apply. * If a key isn't defined, it won't override a key that matched from above. For example, given a repo ID `github.com/owner/repo` and a config: + ```yaml repos: - id: /.*/ @@ -518,12 +560,14 @@ If you set a workflow with the key `default`, it will override this. ``` The final config will look like: + ```yaml apply_requirements: [] workflow: default allowed_overrides: [] allow_custom_workflows: true ``` + Where * `apply_requirements` is set from the `id: github.com/owner/repo` config because it overrides the previous matching config from `id: /.*/`. @@ -535,6 +579,16 @@ If you set a workflow with the key `default`, it will override this. by the `id: github.com/owner/repo` config because it didn't define that key. ::: +### RepoLocks + +```yaml +mode: on_apply +``` + +| Key | Type | Default | Required | Description | +|------|--------|-----------|----------|---------------------------------------------------------------------------------------------------------------------------------------| +| mode | `Mode` | `on_plan` | no | Whether or not repository locks are enabled for this project on plan or apply. Valid values are `disabled`, `on_plan` and `on_apply`. | + ### Policies | Key | Type | Default | Required | Description | @@ -545,6 +599,7 @@ If you set a workflow with the key `default`, it will override this. | policy_sets | []PolicySet | none | yes | set of policies to run on a plan output | ### Owners + | Key | Type | Default | Required | Description | |-------------|-------------------|---------|------------|---------------------------------------------------------| | users | []string | none | no | list of github users that can approve failing policies | @@ -558,7 +613,6 @@ If you set a workflow with the key `default`, it will override this. | path | string | none | yes | path to the rego policies directory | | source | string | none | yes | only `local` is supported at this time | - ### Metrics | Key | Type | Default | Required | Description | diff --git a/runatlantis.io/docs/stats.md b/runatlantis.io/docs/stats.md index a2980c5634..9c6073ab64 100644 --- a/runatlantis.io/docs/stats.md +++ b/runatlantis.io/docs/stats.md @@ -8,12 +8,11 @@ Currently Statsd and Prometheus is supported. See configuration below for detail ## Configuration -Metrics are configured through the [Server Side Config](server-side-repo-config.html#metrics). +Metrics are configured through the [Server Side Config](server-side-repo-config.md#metrics). ## Available Metrics -Assuming metrics are exposed from the endpoint `/metrics` from the [metrics](server-side-repo-config.html#metrics) server side config e.g. - +Assuming metrics are exposed from the endpoint `/metrics` from the [metrics](server-side-repo-config.md#metrics) server side config e.g. ```yaml metrics: @@ -21,10 +20,8 @@ metrics: endpoint: "/metrics" ``` - To see all the metrics exposed from atlantis service, make a GET request to the `/metrics` endpoint. - ```bash curl localhost:4141/metrics # HELP atlantis_cmd_autoplan_builder_execution_error atlantis_cmd_autoplan_builder_execution_error counter @@ -47,20 +44,19 @@ atlantis_cmd_autoplan_builder_execution_time_count 10 ..... ``` - ::: tip NOTE The output shown above is trimmed, since with every new version release this metric set will need to be updated accordingly as there may be a case if some metrics are added/modified/deprecated, so the output shown above just gives a brief idea of how these metrics look like and rest can be explored. ::: Important metrics to monitor are -| Metric Name | Metric Type | Purpose | -|------------------------------------------------|----------------------------------------------------------------------|--------------------------------------------------------------------------------------------------------------------| -| `atlantis_cmd_autoplan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.html#autoplanning) has thrown error. | -| `atlantis_cmd_comment_plan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis plan` has thrown error. | -| `atlantis_cmd_autoplan_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.html#autoplanning) has run successfully. | -| `atlantis_cmd_comment_apply_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has thrown error. | -| `atlantis_cmd_comment_apply_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has run successfully. | +| Metric Name | Metric Type | Purpose | +|------------------------------------------------|----------------------------------------------------------------------|-------------------------------------------------------------------------------------| +| `atlantis_cmd_autoplan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.md#autoplanning) has thrown error. | +| `atlantis_cmd_comment_plan_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis plan` has thrown error. | +| `atlantis_cmd_autoplan_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when [autoplan](autoplanning.md#autoplanning) has run successfully. | +| `atlantis_cmd_comment_apply_execution_error` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has thrown error. | +| `atlantis_cmd_comment_apply_execution_success` | [counter](https://prometheus.io/docs/concepts/metric_types/#counter) | number of times when on commenting `atlantis apply` has run successfully. | ::: tip NOTE There are plenty of additional metrics exposed by atlantis that are not described above. diff --git a/runatlantis.io/docs/streaming-logs.md b/runatlantis.io/docs/streaming-logs.md index c066d47d47..df936c52f9 100644 --- a/runatlantis.io/docs/streaming-logs.md +++ b/runatlantis.io/docs/streaming-logs.md @@ -20,4 +20,3 @@ This will link to the atlantis UI which provides real-time logging in addition t ::: warning As of now the logs are currently stored in memory and cleared when a given pull request is closed, so this link shouldn't be persisted anywhere. ::: - diff --git a/runatlantis.io/docs/terraform-cloud.md b/runatlantis.io/docs/terraform-cloud.md index bab22a5db0..2e3393d7dd 100644 --- a/runatlantis.io/docs/terraform-cloud.md +++ b/runatlantis.io/docs/terraform-cloud.md @@ -6,15 +6,17 @@ and Private Terraform Enterprise was renamed Terraform Enterprise. ::: Atlantis integrates seamlessly with Terraform Cloud and Terraform Enterprise, whether you're using: + * [Free Remote State Management](https://app.terraform.io) * Terraform Cloud Paid Tiers * A Private Installation of Terraform Enterprise Read the docs below :point_down: depending on your use-case. -[[toc]] ## Using Atlantis With Free Remote State Storage + To use Atlantis with Free Remote State Storage, you need to: + 1. Migrate your state to Terraform Cloud. See [Migrating State from Local Terraform](https://developer.hashicorp.com/terraform/cloud-docs/migrate) 1. Update any projects that are referencing the state you migrated to use the new location 1. [Generate a Terraform Cloud/Enterprise Token](#generating-a-terraform-cloud-enterprise-token) @@ -24,6 +26,7 @@ That's it! Atlantis will run as normal and your state will be stored in Terrafor Cloud. ## Using Atlantis With Terraform Cloud Remote Operations or Terraform Enterprise + Atlantis integrates with the full version of Terraform Cloud and Terraform Enterprise via the [remote backend](https://developer.hashicorp.com/terraform/language/settings/backends/remote). @@ -31,7 +34,9 @@ Atlantis will run `terraform` commands as usual, however those commands will actually be executed *remotely* in Terraform Cloud or Terraform Enterprise. ### Why? + Using Atlantis with Terraform Cloud or Terraform Enterprise gives you access to features like: + * Real-time streaming output * Ability to cancel in-progress commands * Secret variables @@ -40,28 +45,34 @@ Using Atlantis with Terraform Cloud or Terraform Enterprise gives you access to **Without** having to change your pull request workflow. ### Getting Started + To use Atlantis with Terraform Cloud Remote Operations or Terraform Enterprise, you need to: + 1. Migrate your state to Terraform Cloud/Enterprise. See [Migrating State from Local Terraform](https://developer.hashicorp.com/terraform/cloud-docs/migrate) 1. Update any projects that are referencing the state you migrated to use the new location 1. [Generate a Terraform Cloud/Enterprise Token](#generating-a-terraform-cloud-enterprise-token) 1. [Pass the token to Atlantis](#passing-the-token-to-atlantis) ## Generating a Terraform Cloud/Enterprise Token + Atlantis needs a Terraform Cloud/Enterprise Token that it will use to access the API. Using a **Team Token is recommended**, however you can also use a User Token. ### Team Token + To generate a team token, click on **Settings** in the top bar, then **Teams** in the sidebar. Choose an existing team or create a new one. Enable the **Manage Workspaces** permission, then scroll down to **Team API Token**. ### User Token + To generate a user token, click on your avatar, then **User Settings**, then **Tokens** in the sidebar. Ensure the **Manage Workspaces** permission is enabled for this user's team. ## Passing The Token To Atlantis + The token can be passed to Atlantis via the `ATLANTIS_TFE_TOKEN` environment variable. You can also use the `--tfe-token` flag, however your token would then be easily @@ -88,12 +99,14 @@ Under the hood, Atlantis is generating a `~/.terraformrc` file. If you already had a `~/.terraformrc` file where Atlantis is running, then you'll need to manually add the credentials block to that file: -``` + +```hcl ... credentials "app.terraform.io" { token = "xxxx" } ``` + instead of using the `ATLANTIS_TFE_TOKEN` environment variable, since Atlantis won't overwrite your `.terraformrc` file. ::: diff --git a/runatlantis.io/docs/terraform-versions.md b/runatlantis.io/docs/terraform-versions.md index 79fdee0db3..321278d505 100644 --- a/runatlantis.io/docs/terraform-versions.md +++ b/runatlantis.io/docs/terraform-versions.md @@ -4,47 +4,60 @@ You can customize which version of Terraform Atlantis defaults to by setting the `--default-tf-version` flag (ex. `--default-tf-version=v1.3.7`). ## Via `atlantis.yaml` + If you wish to use a different version than the default for a specific repo or project, you need to create an `atlantis.yaml` file and set the `terraform_version` key: + ```yaml version: 3 projects: - dir: . terraform_version: v1.1.5 ``` -See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.html#terraform-versions) for more details. + +See [atlantis.yaml Use Cases](repo-level-atlantis-yaml.md#terraform-versions) for more details. ## Via terraform config + Alternatively, one can use the terraform configuration block's `required_version` key to specify an exact version (`x.y.z` or `= x.y.z`), or as of [atlantis v0.21.0](https://github.com/runatlantis/atlantis/releases/tag/v0.21.0), a comparison or pessimistic [version constraint](https://developer.hashicorp.com/terraform/language/expressions/version-constraints#version-constraint-syntax): -#### Exactly version 1.2.9 + +### Exactly version 1.2.9 + ```tf terraform { required_version = "= 1.2.9" } ``` -#### Any patch/tiny version of minor version 1.2 (1.2.z) + +### Any patch/tiny version of minor version 1.2 (1.2.z) + ```tf terraform { required_version = "~> 1.2.0" } ``` -#### Any minor version of major version 1 (1.y.z) + +### Any minor version of major version 1 (1.y.z) + ```tf terraform { required_version = "~> 1.2" } ``` -#### Any version that is at least 1.2.0 + +### Any version that is at least 1.2.0 + ```tf terraform { required_version = ">= 1.2.0" } ``` + See [Terraform `required_version`](https://developer.hashicorp.com/terraform/language/settings#specifying-a-required-terraform-version) for reference. ::: tip NOTE Atlantis will automatically download the latest version that fulfills the constraint specified. -A `terraform_version` specified in the `atlantis.yaml` file takes precedence over both the [`--default-tf-version`](server-configuration.html#default-tf-version) flag and the `required_version` in the terraform hcl. +A `terraform_version` specified in the `atlantis.yaml` file takes precedence over both the [`--default-tf-version`](server-configuration.md#default-tf-version) flag and the `required_version` in the terraform hcl. ::: ::: tip NOTE diff --git a/runatlantis.io/docs/troubleshooting-https.md b/runatlantis.io/docs/troubleshooting-https.md index 191a4b1242..f59058da1c 100644 --- a/runatlantis.io/docs/troubleshooting-https.md +++ b/runatlantis.io/docs/troubleshooting-https.md @@ -3,25 +3,24 @@ When using a self-signed certificate for Atlantis (with flags `--ssl-cert-file` and `--ssl-key-file`), there are a few considerations. -Atlantis uses the web server from the standard Go library, +Atlantis uses the web server from the standard Go library, the method name is [ListenAndServeTLS](https://pkg.go.dev/net/http#ListenAndServeTLS). `ListenAndServeTLS` acts identically to [ListenAndServe](https://pkg.go.dev/net/http#ListenAndServe), -except that it expects HTTPS connections. -Additionally, files containing a certificate and matching private key for the server must be provided. -If the certificate is signed by a certificate authority, -the file passed to `--ssl-cert-file` should be the concatenation of the server's certificate, any intermediates, and the CA's certificate. +except that it expects HTTPS connections. +Additionally, files containing a certificate and matching private key for the server must be provided. +If the certificate is signed by a certificate authority, +the file passed to `--ssl-cert-file` should be the concatenation of the server's certificate, any intermediates, and the CA's certificate. -If you have this error when specifying a TLS cert with a key: -``` +If you have this error when specifying a TLS cert with a key: + +```plain [ERROR] server.go:413 server: Tls: private key does not match public key ``` Check that the locally signed certificate authority is prepended to the self signed certificate. -A good example is shown at [Seth Vargo terraform implementation of atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke/blob/master/terraform/tls.tf#L64) +A good example is shown at [Seth Vargo terraform implementation of atlantis-on-gke](https://github.com/sethvargo/atlantis-on-gke/blob/master/terraform/tls.tf#L64-L84) For Go specific TLS resources have a look at the repository by [denji called golang-tls](https://github.com/denji/golang-tls). For a complete explanation on PKI, read this [article](https://smallstep.com/blog/everything-pki.html). - - diff --git a/runatlantis.io/docs/upgrading-atlantis-yaml.md b/runatlantis.io/docs/upgrading-atlantis-yaml.md index 1b8fe7aaa0..37e20900e8 100644 --- a/runatlantis.io/docs/upgrading-atlantis-yaml.md +++ b/runatlantis.io/docs/upgrading-atlantis-yaml.md @@ -1,15 +1,17 @@ # Upgrading atlantis.yaml ## Upgrading From v2 To v3 + Atlantis version `v0.7.0` introduced a new version 3 of `atlantis.yaml`. -**If you're not using [custom `run` steps](custom-workflows.html#custom-run-command), +**If you're not using [custom `run` steps](custom-workflows.md#custom-run-command), then you can upgrade from `version: 2` to `version: 3` without any changes.** **NOTE:** Version 2 **is not being deprecated** and there is no need to upgrade your version if you don't wish to do so. The only change from v2 to v3 is that we're parsing custom `run` steps differently. + ```yaml # atlantis.yaml workflows: @@ -18,33 +20,38 @@ workflows: steps: - run: my custom command ``` +
An example workflow using a custom run step
Previously, we used a library that would parse the custom step prior to running it. Now, we just run the step directly. This will only affect your steps if they were using shell escaping of some sort. For example, if your step was previously: + ```yaml # version: 2 - run: "printf \'print me\'" ``` You can now write this in version 3 as: + ```yaml # version: 3 - run: "printf 'print me'" ``` - ## Upgrading From V1 To V3 + If you are upgrading from an **old** Atlantis version `<=v0.3.10` (from before July 4, 2018) you'll need to follow the following steps. ### Single atlantis.yaml + If you had multiple `atlantis.yaml` files per directory then you'll need to consolidate them into a single `atlantis.yaml` file at the root of the repo. For example, if you had a directory structure: -``` + +```plain . ├── project1 │ └── atlantis.yaml @@ -53,7 +60,8 @@ For example, if you had a directory structure: ``` Then your new structure would look like: -``` + +```plain . ├── atlantis.yaml ├── project1 @@ -61,6 +69,7 @@ Then your new structure would look like: ``` And your `atlantis.yaml` would look something like: + ```yaml version: 2 projects: @@ -80,13 +89,16 @@ workflows: We will talk more about `workflows` below. ### Terraform Version + The `terraform_version` key moved from being a top-level key to being per `project` so if before your `atlantis.yaml` was in directory `mydir` and looked like: + ```yaml terraform_version: 0.11.0 ``` Then your new config would be: + ```yaml version: 2 projects: @@ -95,9 +107,11 @@ projects: ``` ### Workflows + Workflows are the new way to set all `pre_*`, `post_*` and `extra_arguments`. Each `project` can have a custom workflow via the `workflow` key. + ```yaml version: 2 projects: @@ -106,6 +120,7 @@ projects: ``` Workflows are defined as a top-level key: + ```yaml version: 2 projects: @@ -118,6 +133,7 @@ workflows: To start with, determine whether you're customizing commands that happen during `plan` or `apply`. You then set that key under the workflow's name: + ```yaml ... workflows: @@ -133,6 +149,7 @@ workflows: If you're not customizing a specific stage then you can omit that key. For example if you're only customizing the commands that happen during `plan` then your config will look like: + ```yaml ... workflows: @@ -143,7 +160,9 @@ workflows: ``` #### Extra Arguments + `extra_arguments` is now specified as follows. Given a previous config: + ```yaml extra_arguments: - command_name: init @@ -158,6 +177,7 @@ extra_arguments: ``` Your config would now look like: + ```yaml ... workflows: @@ -174,8 +194,8 @@ workflows: extra_args: ["-lock=false"] ``` - #### Pre/Post Commands + Instead of using `pre_*` or `post_*`, you now can insert your custom commands before/after the built-in commands. Given a previous config: @@ -202,6 +222,7 @@ post_apply: ``` Your config would now look like: + ```yaml ... workflows: diff --git a/runatlantis.io/docs/using-atlantis.md b/runatlantis.io/docs/using-atlantis.md index 15a0b5a681..61c06e1a21 100644 --- a/runatlantis.io/docs/using-atlantis.md +++ b/runatlantis.io/docs/using-atlantis.md @@ -5,8 +5,9 @@ Atlantis triggers commands via pull request comments. ::: tip You can use following executable names. + * `atlantis help` - * `atlantis` is executable name. You can configure by [Executable Name](/docs/server-configuration.html#executable-name). + * `atlantis` is executable name. You can configure by [Executable Name](server-configuration.md#executable-name). * `run help` * `run` is a global executable name. * `@GithubUser help` @@ -14,35 +15,46 @@ You can use following executable names. ::: Currently, Atlantis supports the following commands. -[[toc]] --- + ## atlantis help + ```bash atlantis help ``` + ### Explanation + View help --- + ## atlantis version + ```bash atlantis version ``` ### Explanation + Print the output of 'terraform version'. --- + ## atlantis plan + ```bash atlantis plan [options] -- [terraform plan flags] ``` + ### Explanation + Runs `terraform plan` on the pull request's branch. You may wish to re-run plan after Atlantis has already done so if you've changed some resources manually. ### Examples + ```bash # Runs plan for any projects that Atlantis thinks were modified. # If an `atlantis.yaml` file is specified, runs plan on the projects that @@ -60,9 +72,10 @@ atlantis plan -w staging ``` ### Options + * `-d directory` Which directory to run plan in relative to root of repo. Use `.` for root. - * Ex. `atlantis plan -d child/dir` -* `-p project` Which project to run plan for. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.html). Cannot be used at same time as `-d` or `-w` because the project defines this already. + * Ex. `atlantis plan -d child/dir` +* `-p project` Which project to run plan for. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.md). Cannot be used at same time as `-d` or `-w` because the project defines this already. * `-w workspace` Switch to this [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces) before planning. Defaults to `default`. Ignore this if Terraform workspaces are unused. * `--verbose` Append Atlantis log to comment. @@ -74,30 +87,38 @@ A `atlantis plan` (without flags), like autoplans, discards all plans previously If `terraform plan` requires additional arguments, like `-target=resource` or `-var 'foo=bar'` or `-var-file myfile.tfvars` you can append them to the end of the comment after `--`, ex. -``` + +```shell atlantis plan -d dir -- -var foo='bar' ``` -If you always need to append a certain flag, see [Custom Workflow Use Cases](custom-workflows.html#adding-extra-arguments-to-terraform-commands). + +If you always need to append a certain flag, see [Custom Workflow Use Cases](custom-workflows.md#adding-extra-arguments-to-terraform-commands). ### Using the -destroy Flag #### Example + To perform a destructive plan that will destroy resources you can use the `-destroy` flag like this: ```bash atlantis plan -- -destroy atlantis plan -d dir -- -destroy ``` -::: warning NOTE + +::: warning NOTE The `-destroy` flag generates a destroy plan, If this plan is applied it can result in data loss or service disruptions. Ensure that you have thoroughly reviewed your Terraform configuration and intend to remove the specified resources before using this flag. ::: --- + ## atlantis apply + ```bash atlantis apply [options] -- [terraform apply flags] ``` + ### Explanation + Runs `terraform apply` for the plan that matches the directory/project/workspace. ::: tip @@ -106,8 +127,8 @@ This includes all projects that have been planned manually with `atlantis plan` For Atlantis commands to work, Atlantis needs to know the location where the plan file is. For that, you can use $PLANFILE which will contain the path of the plan file to be used in your custom steps. i.e `terraform plan -out $PLANFILE` ::: - ### Examples + ```bash # Runs apply for all unapplied plans from this pull request. atlantis apply @@ -123,15 +144,17 @@ atlantis apply -w staging ``` ### Options + * `-d directory` Apply the plan for this directory, relative to root of repo. Use `.` for root. -* `-p project` Apply the plan for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.html). Cannot be used at same time as `-d` or `-w`. +* `-p project` Apply the plan for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml` file](repo-level-atlantis-yaml.md). Cannot be used at same time as `-d` or `-w`. * `-w workspace` Apply the plan for this [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces). Ignore this if Terraform workspaces are unused. -* `--auto-merge-disabled` Disable [automerge](automerging.html) for this apply command. +* `--auto-merge-disabled` Disable [automerge](automerging.md) for this apply command. * `--verbose` Append Atlantis log to comment. ### Additional Terraform flags Because Atlantis under the hood is running `terraform apply plan.tfplan`, any Terraform options that would change the `plan` are ignored, ex: + * `-target=resource` * `-var 'foo=bar'` * `-var-file=myfile.tfvars` @@ -140,17 +163,22 @@ They're ignored because they can't be specified for an already generated planfil If you would like to specify these flags, do it while running `atlantis plan`. --- + ## atlantis import + ```bash atlantis import [options] ADDRESS ID -- [terraform import flags] ``` + ### Explanation + Runs `terraform import` that matches the directory/project/workspace. This command discards the terraform plan result. After an import and before an apply, another `atlantis plan` must be run again. -To allow the `import` command requires [--allow-commands](/docs/server-configuration.html#allow-commands) configuration. +To allow the `import` command requires [--allow-commands](server-configuration.md#allow-commands) configuration. ### Examples + ```bash # Runs import atlantis import ADDRESS ID @@ -166,36 +194,45 @@ atlantis import -w staging ADDRESS ID ``` ::: tip + * If import for_each resources, it requires a single quoted address. * ex. `atlantis import 'aws_instance.example["foo"]' i-1234567890abcdef0` ::: ### Options + * `-d directory` Import a resource for this directory, relative to root of repo. Use `.` for root. -* `-p project` Import a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.html) repo configuration file. This cannot be used at the same time as `-d` or `-w`. +* `-p project` Import a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.md) repo configuration file. This cannot be used at the same time as `-d` or `-w`. * `-w workspace` Import a resource for a specific [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces). Ignore this if Terraform workspaces are unused. ### Additional Terraform flags If `terraform import` requires additional arguments, like `-var 'foo=bar'` or `-var-file myfile.tfvars` append them to the end of the comment after `--`, e.g. -``` + +```shell atlantis import -d dir 'aws_instance.example["foo"]' i-1234567890abcdef0 -- -var foo='bar' ``` -If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.html#adding-extra-arguments-to-terraform-commands). + +If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.md#adding-extra-arguments-to-terraform-commands). --- + ## atlantis state rm + ```bash atlantis state [options] rm ADDRESS... -- [terraform state rm flags] ``` + ### Explanation + Runs `terraform state rm` that matches the directory/project/workspace. This command discards the terraform plan result. After run state rm and before an apply, another `atlantis plan` must be run again. -To allow the `state` command requires [--allow-commands](/docs/server-configuration.html#allow-commands) configuration. +To allow the `state` command requires [--allow-commands](server-configuration.md#allow-commands) configuration. ### Examples + ```bash # Runs state rm atlantis state rm ADDRESS1 ADDRESS2 @@ -211,44 +248,55 @@ atlantis state -w staging rm ADDRESS ``` ::: tip + * If run state rm to for_each resources, it requires a single quoted address. * ex. `atlantis state rm 'aws_instance.example["foo"]'` ::: ### Options + * `-d directory` Run state rm a resource for this directory, relative to root of repo. Use `.` for root. -* `-p project` Run state rm a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.html) repo configuration file. This cannot be used at the same time as `-d` or `-w`. +* `-p project` Run state rm a resource for this project. Refers to the name of the project configured in the repo's [`atlantis.yaml`](repo-level-atlantis-yaml.md) repo configuration file. This cannot be used at the same time as `-d` or `-w`. * `-w workspace` Run state rm a resource for a specific [Terraform workspace](https://developer.hashicorp.com/terraform/language/state/workspaces). Ignore this if Terraform workspaces are unused. ### Additional Terraform flags If `terraform state rm` requires additional arguments, like `-lock=false'` append them to the end of the comment after `--`, e.g. -``` + +```shell atlantis state -d dir rm 'aws_instance.example["foo"]' -- -lock=false ``` -If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.html#adding-extra-arguments-to-terraform-commands). + +If a flag is needed to be always appended, see [Custom Workflow Use Cases](custom-workflows.md#adding-extra-arguments-to-terraform-commands). --- + ## atlantis unlock + ```bash atlantis unlock ``` ### Explanation + Removes all atlantis locks and discards all plans for this PR. To unlock a specific plan you can use the Atlantis UI. --- + ## atlantis approve_policies + ```bash atlantis approve_policies ``` ### Explanation + Approves all current policy checking failures for the PR. -See also [policy checking](/docs/policy-checking.html). +See also [policy checking](policy-checking.md). ### Options + * `--verbose` Append Atlantis log to comment. diff --git a/runatlantis.io/docs/using-slack-hooks.md b/runatlantis.io/docs/using-slack-hooks.md index c75c243fca..572b0857f8 100644 --- a/runatlantis.io/docs/using-slack-hooks.md +++ b/runatlantis.io/docs/using-slack-hooks.md @@ -13,7 +13,7 @@ For this you'll need to: ## Configuring Slack for Atlantis -* Go to [https://api.slack.com/apps](https://api.slack.com/apps) +* Go to [Slack: Apps](https://api.slack.com/apps) * Click the `Create New App` button * Select `From scratch` in the dialog that opens * Give it a name, e.g. `atlantis-bot`. @@ -43,13 +43,12 @@ webhooks: workspace-regex: .* branch-regex: .* kind: slack - channel: my-channel + channel: my-channel-id ``` If you are deploying Atlantis as a Helm chart, this can be implemented via the `config` parameter available for [chart customizations](https://github.com/runatlantis/helm-charts#customization): -``` - +```yaml ## Use Server Side Config, ## ref: https://www.runatlantis.io/docs/server-configuration.html config: | @@ -59,9 +58,7 @@ config: | workspace-regex: .* branch-regex: .* kind: slack - channel: my-channel + channel: my-channel-id ``` - - -The `apply` event information will be sent to the `my-channel` Slack channel. +The `apply` event information will be sent to the `my-channel-id` Slack channel. diff --git a/runatlantis.io/docs/webhook-secrets.md b/runatlantis.io/docs/webhook-secrets.md index 8b66ee8276..4e2ab1a059 100644 --- a/runatlantis.io/docs/webhook-secrets.md +++ b/runatlantis.io/docs/webhook-secrets.md @@ -17,27 +17,30 @@ Azure DevOps uses Basic authentication for webhooks rather than webhook secrets. ::: ::: tip NOTE -An app-wide token is generated during [GitHub App setup](access-credentials.html#github-app). You can recover it by navigating to the [GitHub app settings page](https://github.com/settings/apps) and selecting "Edit" next to your Atlantis app's name. Token appears after clicking "Edit" under the Webhook header. +An app-wide token is generated during [GitHub App setup](access-credentials.md#github-app). You can recover it by navigating to the [GitHub app settings page](https://github.com/settings/apps) and selecting "Edit" next to your Atlantis app's name. Token appears after clicking "Edit" under the Webhook header. ::: ::: warning Bitbucket.org **does not** support webhook secrets. -To mitigate, use repo allowlists and IP allowlists. See [Security](security.html#bitbucket-cloud-bitbucket-org) for more information. +To mitigate, use repo allowlists and IP allowlists. See [Security](security.md#bitbucket-cloud-bitbucket-org) for more information. ::: ## Generating A Webhook Secret + You can use any random string generator to create your Webhook secret. It should be > 24 characters. For example: + * Generate via Ruby with `ruby -rsecurerandom -e 'puts SecureRandom.hex(32)'` -* Generate online with [https://www.browserling.com/tools/random-string](https://www.browserling.com/tools/random-string) +* Generate online with [browserling: Generate Random Strings and Numbers](https://www.browserling.com/tools/random-string) ::: tip NOTE You must use **the same** webhook secret for each repo. ::: ## Next Steps + * Record your secret -* You'll be using it later to [configure your webhooks](configuring-webhooks.html), however if you're -following the [Installation Guide](installation-guide.html) then your next step is to -[Deploy Atlantis](deployment.html) +* You'll be using it later to [configure your webhooks](configuring-webhooks.md), however if you're +following the [Installation Guide](installation-guide.md) then your next step is to +[Deploy Atlantis](deployment.md) diff --git a/runatlantis.io/e2e/site-check.spec.js b/runatlantis.io/e2e/site-check.spec.js new file mode 100644 index 0000000000..2fbf3b5a3a --- /dev/null +++ b/runatlantis.io/e2e/site-check.spec.js @@ -0,0 +1,12 @@ +import { test } from '@playwright/test'; + +test('page should load without errors', async ({ page }) => { + // Listen for any errors that occur within the page + page.on('pageerror', error => { + console.error('Page error:', error.message); + throw new Error(`Page error: ${error.message}`); + }); + + // Navigate to the URL + await page.goto('http://localhost:8080/'); +}); diff --git a/runatlantis.io/guide/README.md b/runatlantis.io/guide.md similarity index 80% rename from runatlantis.io/guide/README.md rename to runatlantis.io/guide.md index 15472518b8..9d71a3acf1 100644 --- a/runatlantis.io/guide/README.md +++ b/runatlantis.io/guide.md @@ -1,15 +1,17 @@ # Introduction ## Getting Started -* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](test-drive.html). -* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.html). -* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](/docs/installation-guide.html). + +* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](./guide/test-drive.md). +* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](./guide/testing-locally.md). +* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](./docs/installation-guide.md). ::: tip Looking for the full docs? -Go here: [www.runatlantis.io/docs](/docs/) +Go here: [www.runatlantis.io/docs](./docs.md) ::: ## Overview – What Is Atlantis? + Atlantis is an application for automating Terraform via pull requests. It is deployed as a standalone application into your infrastructure. No third-party has access to your credentials. @@ -21,14 +23,18 @@ When you want to apply, comment `atlantis apply` on the pull request and Atlanti will run `terraform apply` and comment back with the output. ## Watch + Check out the video below to see it in action: -[![Atlantis Walkthrough](./images/atlantis-walkthrough-icon.png)](https://www.youtube.com/watch?v=TmIPWda0IKg) +[![Atlantis Walkthrough](./guide/images/atlantis-walkthrough-icon.png)](https://www.youtube.com/watch?v=TmIPWda0IKg) ## Why would you run Atlantis? + ### Increased visibility + When everyone is executing Terraform on their own computers, it's hard to know the current state of your infrastructure: + * Is what's in `main` branch deployed? * Did someone forget to create a pull request for that latest change? * What was the output from that last `terraform apply`? @@ -37,6 +43,7 @@ With Atlantis, everything is visible on the pull request. You can view the histo of everything that was done to your infrastructure. ### Enable collaboration with everyone + You probably don't want to distribute Terraform credentials to everyone in your engineering organization, but now anyone can open up a Terraform pull request. @@ -44,10 +51,12 @@ You can require approval before the pull request is applied so nothing happens accidentally. ### Review Terraform pull requests better + You can't fully review a Terraform change without seeing the output of `terraform plan`. Now that output is added to the pull request automatically. ### Standardize your workflows + Atlantis locks a directory/workspace until the pull request is merged or the lock is manually deleted. This ensures that changes are applied in the order expected. @@ -55,6 +64,7 @@ The exact commands that Atlantis runs are configurable. You can run custom scrip to construct your ideal workflow. ## Next Steps -* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](test-drive.html). -* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.html). -* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](/docs/installation-guide.html). + +* If you'd like to just test out running Atlantis on an **example repo** check out the [Test Drive](./guide/test-drive.md). +* If you'd like to test out running Atlantis on **your repos** then read [Testing Locally](./guide/testing-locally.md). +* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](./docs/installation-guide.md). diff --git a/runatlantis.io/guide/test-drive.md b/runatlantis.io/guide/test-drive.md index 22e8c77f21..8510f0a0e2 100644 --- a/runatlantis.io/guide/test-drive.md +++ b/runatlantis.io/guide/test-drive.md @@ -1,18 +1,22 @@ # Test Drive -To test drive Atlantis on an example repo, download the latest release: -[https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases) + +To test drive Atlantis on an example repo, download the latest release from +[GitHub](https://github.com/runatlantis/atlantis/releases) Once you've extracted the archive, run: + ```bash ./atlantis testdrive ``` This mode sets up Atlantis on a test repo so you can try it out. It will + - Fork an example Terraform project into your GitHub account - Install Terraform (if not already in your PATH) - Install [ngrok](https://ngrok.com/) so we can expose Atlantis to GitHub - Start Atlantis so you can execute commands on the pull request ## Next Steps -* If you're ready to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.html). -* If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](/docs/installation-guide.html). + +- If you're ready to test out running Atlantis on **your repos** then read [Testing Locally](testing-locally.md). +- If you're ready to properly install Atlantis on real infrastructure then head over to the [Installation Guide](../docs/installation-guide.md). diff --git a/runatlantis.io/guide/testing-locally.md b/runatlantis.io/guide/testing-locally.md index 054b0d9c2a..4e00c923de 100644 --- a/runatlantis.io/guide/testing-locally.md +++ b/runatlantis.io/guide/testing-locally.md @@ -1,45 +1,51 @@ # Testing Locally + These instructions are for running Atlantis **locally on your own computer** so you can test it out against your own repositories before deciding whether to install it more permanently. ::: tip -If you want to set up a production-ready Atlantis installation, read [Deployment](../docs/deployment.html). +If you want to set up a production-ready Atlantis installation, read [Deployment](../docs/deployment.md). ::: Steps: -[[toc]] - ## Install Terraform + `terraform` needs to be in the `$PATH` for Atlantis. -Download from [https://developer.hashicorp.com/terraform/downloads](https://developer.hashicorp.com/terraform/downloads) -``` +Download from [Terraform](https://developer.hashicorp.com/terraform/downloads) + +```shell unzip path/to/terraform_*.zip -d /usr/local/bin ``` ## Download Atlantis -Get the latest release from [https://github.com/runatlantis/atlantis/releases](https://github.com/runatlantis/atlantis/releases) + +Get the latest release from [GitHub](https://github.com/runatlantis/atlantis/releases) and unpackage it. ## Download Ngrok + Atlantis needs to be accessible somewhere that github.com/gitlab.com/bitbucket.org or your GitHub/GitLab Enterprise installation can reach. One way to accomplish this is with ngrok, a tool that forwards your local port to a random public hostname. -Go to [https://ngrok.com/download](https://ngrok.com/download), download ngrok and `unzip` it. +[Download](https://ngrok.com/download) ngrok and `unzip` it. Start `ngrok` on port `4141` and take note of the hostname it gives you: + ```bash ./ngrok http 4141 ``` In a new tab (where you'll soon start Atlantis) create an environment variable with ngrok's hostname: + ```bash URL="https://{YOUR_HOSTNAME}.ngrok.io" ``` ## Create a Webhook Secret + GitHub and GitLab use webhook secrets so clients can verify that the webhooks came from them. ::: warning @@ -47,16 +53,19 @@ Bitbucket Cloud (bitbucket.org) doesn't use webhook secrets so if you're using B When you're ready to do a production deploy of Atlantis you should allowlist [Bitbucket IPs](https://confluence.atlassian.com/bitbucket/what-are-the-bitbucket-cloud-ip-addresses-i-should-use-to-configure-my-corporate-firewall-343343385.html) to ensure the webhooks are coming from them. ::: -Create a random string of any length (you can use [https://www.random.org/strings/](https://www.random.org/strings/)) +Create a random string of any length (you can use [random.org](https://www.random.org/strings/)) and set an environment variable: -``` + +```shell SECRET="{YOUR_RANDOM_STRING}" ``` ## Add Webhook + Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bitbucket repo: ### GitHub or GitHub Enterprise Webhook +
Expand
    @@ -82,6 +91,7 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi
### GitLab or GitLab Enterprise Webhook +
Expand
    @@ -103,6 +113,7 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi
### Bitbucket Cloud (bitbucket.org) Webhook +
Expand
    @@ -124,6 +135,7 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi
### Bitbucket Server (aka Stash) Webhook +
Expand
    @@ -140,51 +152,99 @@ Take the URL that ngrok output and create a webhook in your GitHub, GitLab or Bi
+### Gitea Webhook + +
+ Expand +
    +
  • Click Settings > Webhooks in the top- and then sidebar
  • +
  • Click Add webhook > Gitea (Gitea webhooks are service specific, but this works)
  • +
  • set Target URL to http://$URL/events (or https://$URL/events if you're using SSL) where $URL is where Atlantis is hosted. Be sure to add /events
  • +
  • double-check you added /events to the end of your URL.
  • +
  • set Secret to the Webhook Secret you generated previously +
      +
    • NOTE If you're adding a webhook to multiple repositories, each repository will need to use the same secret.
    • +
    +
  • +
  • Select Custom Events...
  • +
  • Check the boxes +
      +
    • Repository events > Push
    • +
    • Issue events > Issue Comment
    • +
    • Pull Request events > Pull Request
    • +
    • Pull Request events > Pull Request Comment
    • +
    • Pull Request events > Pull Request Reviewed
    • +
    • Pull Request events > Pull Request Synchronized
    • +
    +
  • +
  • Leave Active checked
  • +
  • Click Add Webhook
  • +
  • See Next Steps
  • +
+
## Create an access token for Atlantis + We recommend using a dedicated CI user or creating a new user named **@atlantis** that performs all API actions, however for testing, you can use your own user. Here we'll create the access token that Atlantis uses to comment on the pull request and set commit statuses. ### GitHub or GitHub Enterprise Access Token + - Create a [Personal Access Token](https://docs.github.com/en/authentication/keeping-your-account-and-data-secure/creating-a-personal-access-token#creating-a-fine-grained-personal-access-token) - create a token with **repo** scope - set the token as an environment variable -``` + +```shell TOKEN="{YOUR_TOKEN}" ``` ### GitLab or GitLab Enterprise Access Token -- follow [https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token) + +- follow [GitLab: Create a personal access token](https://docs.gitlab.com/ce/user/profile/personal_access_tokens.html#create-a-personal-access-token) - create a token with **api** scope - set the token as an environment variable -``` + +```shell TOKEN="{YOUR_TOKEN}" ``` ### Bitbucket Cloud (bitbucket.org) Access Token -- follow [https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/) + +- follow [BitBucket Cloud: Create an app password](https://support.atlassian.com/bitbucket-cloud/docs/create-an-app-password/) - Label the password "atlantis" - Select **Pull requests**: **Read** and **Write** so that Atlantis can read your pull requests and write comments to them - set the token as an environment variable -``` + +```shell TOKEN="{YOUR_TOKEN}" ``` ### Bitbucket Server (aka Stash) Access Token + - Click on your avatar in the top right and select **Manage account** - Click **HTTP access tokens** in the sidebar - Click **Create token** - Name the token **atlantis** - Give the token **Read** Project permissions and **Write** Pull request permissions -- Choose an Expiry option **Do not expire** or **Expire automatically** +- Choose an Expiry option **Do not expire** or **Expire automatically** - Click **Create** and set the token as an environment variable -``` + +```shell TOKEN="{YOUR_TOKEN}" ``` +### Gitea Access Token + +- Go to "Profile and Settings" > "Settings" in Gitea (top-right) +- Go to "Applications" under "User Settings" in Gitea +- Create a token under the "Manage Access Tokens" with the following permissions: + - issue: Read and Write + - repository: Read and Write +- Record the access token ## Start Atlantis + You're almost ready to start Atlantis, just set two more variables: ```bash @@ -195,9 +255,11 @@ REPO_ALLOWLIST="$YOUR_GIT_HOST/$YOUR_USERNAME/$YOUR_REPO" # server without scheme or port and $YOUR_USERNAME will be the name of the **project** the repo # is under, **not the key** of the project. ``` + Now you can start Atlantis. The exact command differs depending on your Git host: ### GitHub Command + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -208,6 +270,7 @@ atlantis server \ ``` ### GitHub Enterprise Command + ```bash HOSTNAME=YOUR_GITHUB_ENTERPRISE_HOSTNAME # ex. github.runatlantis.io atlantis server \ @@ -220,6 +283,7 @@ atlantis server \ ``` ### GitLab Command + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -230,6 +294,7 @@ atlantis server \ ``` ### GitLab Enterprise Command + ```bash HOSTNAME=YOUR_GITLAB_ENTERPRISE_HOSTNAME # ex. gitlab.runatlantis.io atlantis server \ @@ -242,6 +307,7 @@ atlantis server \ ``` ### Bitbucket Cloud (bitbucket.org) Command + ```bash atlantis server \ --atlantis-url="$URL" \ @@ -251,6 +317,7 @@ atlantis server \ ``` ### Bitbucket Server (aka Stash) Command + ```bash BASE_URL=YOUR_BITBUCKET_SERVER_URL # ex. http://bitbucket.mycorp:7990 atlantis server \ @@ -278,46 +345,71 @@ atlantis server \ --ssl-key-file=file.key ``` +### Gitea + +```bash +atlantis server \ +--atlantis-url="$URL" \ +--gitea-user="$ATLANTIS_GITEA_USER" \ +--gitea-token="$ATLANTIS_GITEA_TOKEN" \ +--gitea-webhook-secret="$ATLANTIS_GITEA_WEBHOOK_SECRET" \ +--gitea-base-url="$ATLANTIS_GITEA_BASE_URL" \ +--gitea-page-size="$ATLANTIS_GITEA_PAGE_SIZE" \ +--repo-allowlist="$REPO_ALLOWLIST" +--ssl-cert-file=file.crt +--ssl-key-file=file.key +``` + ## Create a pull request + Create a pull request so you can test Atlantis. ::: tip You could add a null resource as a test: + ```hcl resource "null_resource" "example" {} ``` + Or just modify the whitespace in a file. ::: ### Autoplan + You should see Atlantis logging about receiving the webhook and you should see the output of `terraform plan` on your repo. Atlantis tries to figure out the directory to plan in based on the files modified. If you need to customize the directories that Atlantis runs in or the commands it runs if you're using workspaces -or `.tfvars` files, see [atlantis.yaml Reference](/docs/repo-level-atlantis-yaml.html#reference). +or `.tfvars` files, see [atlantis.yaml Reference](../docs/repo-level-atlantis-yaml.md#reference). ### Manual Plan + To manually `plan` in a specific directory or workspace, comment on the pull request using the `-d` or `-w` flags: -``` + +```shell atlantis plan -d mydir atlantis plan -w staging ``` To add additional arguments to the underlying `terraform plan` you can use: -``` + +```shell atlantis plan -- -target=resource -var 'foo=bar' ``` ### Apply + If you'd like to `apply`, type a comment: `atlantis apply`. You can use the `-d` or `-w` flags to point Atlantis at a specific plan. Otherwise it tries to apply the plan for the root directory. ## Real-time logs -The [real-time terraform output](/docs/streaming-logs.md) for your command can be found by clicking into the status check for a given project in a PR which + +The [real-time terraform output](../docs/streaming-logs.md) for your command can be found by clicking into the status check for a given project in a PR which links to the log-streaming UI. This is a terminal UI where you can view your commands executing in real-time. ## Next Steps -* If things are working as expected you can `Ctrl-C` the `atlantis server` command and the `ngrok` command. -* Hopefully Atlantis is working with your repo and you're ready to move on to a [production-ready deployment](../docs/deployment.html). -* If it's not working as expected, you may need to customize how Atlantis runs with an `atlantis.yaml` file. -See [atlantis.yaml use cases](/docs/repo-level-atlantis-yaml.html#use-cases). -* Check out our [full documentation](../docs/) for more details. + +- If things are working as expected you can `Ctrl-C` the `atlantis server` command and the `ngrok` command. +- Hopefully Atlantis is working with your repo and you're ready to move on to a [production-ready deployment](../docs/deployment.md). +- If it's not working as expected, you may need to customize how Atlantis runs with an `atlantis.yaml` file. +See [atlantis.yaml use cases](../docs/repo-level-atlantis-yaml.md#use-cases). +- Check out our [full documentation](../docs.md) for more details. diff --git a/runatlantis.io/index.md b/runatlantis.io/index.md new file mode 100644 index 0000000000..537c45de24 --- /dev/null +++ b/runatlantis.io/index.md @@ -0,0 +1,43 @@ +--- +# https://vitepress.dev/reference/default-theme-home-page +layout: home + +pageClass: home-custom + +hero: + name: Atlantis + text: Terraform Pull Request Automation + tagline: Running Terraform Workflows with Ease + image: /hero.png + actions: + - theme: brand + text: Get Started + link: /guide + - theme: alt + text: What is Atlantis? + link: /blog/2017/introducing-atlantis + - theme: alt + text: Join us on Slack + link: https://join.slack.com/t/atlantis-community/shared_invite/zt-9xlxtxtc-CUSKB1ATt_sQy6um~LDPNw + +features: + - title: Fewer Mistakes + details: "Catch errors in Terraform plan output before applying changes. Ensure changes are applied before merging." + icon: ✅ + - title: Empower Developers + details: "Developers can safely submit Terraform pull requests without credentials. Require approvals for applies." + icon: đŸ’ģ + - title: Instant Audit Logs + details: "Detailed logs for infrastructure changes, approvals, and user actions. Configure approvals for production changes." + icon: 📋 + - title: Proven at Scale + details: "Used by top companies to manage over 600 repos with 300 developers. In production since 2017." + icon: 🌍 + - title: Self-Hosted + details: "Your credentials remain secure. Deployable on VMs, Kubernetes, Fargate, etc. Supports GitHub, GitLab, Bitbucket, Azure DevOps." + icon: ⚙ī¸ + - title: Open Source + details: "Atlantis is an open source project with strong community support, powered by volunteer contributions." + icon: 🌐 + +--- diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-114x114.png b/runatlantis.io/public/apple-touch-icon-114x114.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-114x114.png rename to runatlantis.io/public/apple-touch-icon-114x114.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-120x120.png b/runatlantis.io/public/apple-touch-icon-120x120.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-120x120.png rename to runatlantis.io/public/apple-touch-icon-120x120.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-144x144.png b/runatlantis.io/public/apple-touch-icon-144x144.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-144x144.png rename to runatlantis.io/public/apple-touch-icon-144x144.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-152x152.png b/runatlantis.io/public/apple-touch-icon-152x152.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-152x152.png rename to runatlantis.io/public/apple-touch-icon-152x152.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-57x57.png b/runatlantis.io/public/apple-touch-icon-57x57.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-57x57.png rename to runatlantis.io/public/apple-touch-icon-57x57.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-60x60.png b/runatlantis.io/public/apple-touch-icon-60x60.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-60x60.png rename to runatlantis.io/public/apple-touch-icon-60x60.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-72x72.png b/runatlantis.io/public/apple-touch-icon-72x72.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-72x72.png rename to runatlantis.io/public/apple-touch-icon-72x72.png diff --git a/runatlantis.io/.vuepress/public/apple-touch-icon-76x76.png b/runatlantis.io/public/apple-touch-icon-76x76.png similarity index 100% rename from runatlantis.io/.vuepress/public/apple-touch-icon-76x76.png rename to runatlantis.io/public/apple-touch-icon-76x76.png diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic1.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic1.webp new file mode 100644 index 0000000000..50d4156d52 Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic1.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic2.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic2.webp new file mode 100644 index 0000000000..fe15cbf47e Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic2.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic3.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic3.webp new file mode 100644 index 0000000000..b448df066d Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic3.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic4.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic4.webp new file mode 100644 index 0000000000..79418e0b3e Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic4.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic5.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic5.webp new file mode 100644 index 0000000000..3c0086d447 Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic5.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic6.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic6.webp new file mode 100644 index 0000000000..3be25b55b5 Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic6.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic7.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic7.webp new file mode 100644 index 0000000000..bf38895ebc Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic7.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic8.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic8.webp new file mode 100644 index 0000000000..9220492f87 Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic8.webp differ diff --git a/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic9.webp b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic9.webp new file mode 100644 index 0000000000..1aaad9cc7b Binary files /dev/null and b/runatlantis.io/public/blog/4-reasons-to-try-hashicorps-new-free-terraform-remote-state-storage/pic9.webp differ diff --git a/runatlantis.io/public/blog/april-2024-survey-results/deploy.webp b/runatlantis.io/public/blog/april-2024-survey-results/deploy.webp new file mode 100644 index 0000000000..38b721ccaa Binary files /dev/null and b/runatlantis.io/public/blog/april-2024-survey-results/deploy.webp differ diff --git a/runatlantis.io/public/blog/april-2024-survey-results/features.webp b/runatlantis.io/public/blog/april-2024-survey-results/features.webp new file mode 100644 index 0000000000..d116d5ab5f Binary files /dev/null and b/runatlantis.io/public/blog/april-2024-survey-results/features.webp differ diff --git a/runatlantis.io/public/blog/april-2024-survey-results/iac.webp b/runatlantis.io/public/blog/april-2024-survey-results/iac.webp new file mode 100644 index 0000000000..793dba5de6 Binary files /dev/null and b/runatlantis.io/public/blog/april-2024-survey-results/iac.webp differ diff --git a/runatlantis.io/public/blog/april-2024-survey-results/interact.webp b/runatlantis.io/public/blog/april-2024-survey-results/interact.webp new file mode 100644 index 0000000000..0eca135b15 Binary files /dev/null and b/runatlantis.io/public/blog/april-2024-survey-results/interact.webp differ diff --git a/runatlantis.io/public/blog/april-2024-survey-results/repos.webp b/runatlantis.io/public/blog/april-2024-survey-results/repos.webp new file mode 100644 index 0000000000..6e15d4e9f6 Binary files /dev/null and b/runatlantis.io/public/blog/april-2024-survey-results/repos.webp differ diff --git a/runatlantis.io/public/blog/april-2024-survey-results/vcs.webp b/runatlantis.io/public/blog/april-2024-survey-results/vcs.webp new file mode 100644 index 0000000000..628ab3869d Binary files /dev/null and b/runatlantis.io/public/blog/april-2024-survey-results/vcs.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic1.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic1.webp new file mode 100644 index 0000000000..72dbca2425 Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic1.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic2.gif b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic2.gif new file mode 100644 index 0000000000..5846753a4f Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic2.gif differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic3.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic3.webp new file mode 100644 index 0000000000..8119b862e5 Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic3.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic4.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic4.webp new file mode 100644 index 0000000000..96a6eb388d Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic4.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic5.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic5.webp new file mode 100644 index 0000000000..936b7a02ab Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic5.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic6.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic6.webp new file mode 100644 index 0000000000..aafbc40298 Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic6.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic7.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic7.webp new file mode 100644 index 0000000000..5e55af24fe Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic7.webp differ diff --git a/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic8.webp b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic8.webp new file mode 100644 index 0000000000..ced941c865 Binary files /dev/null and b/runatlantis.io/public/blog/atlantis-0-4-4-now-supports-bitbucket/pic8.webp differ diff --git a/runatlantis.io/public/blog/hosting-our-static-site/code/cloudfront.tf b/runatlantis.io/public/blog/hosting-our-static-site/code/cloudfront.tf new file mode 100644 index 0000000000..3f0a3a4715 --- /dev/null +++ b/runatlantis.io/public/blog/hosting-our-static-site/code/cloudfront.tf @@ -0,0 +1,59 @@ +resource "aws_cloudfront_distribution" "www_distribution" { + // origin is where CloudFront gets its content from. + origin { + // We need to set up a "custom" origin because otherwise CloudFront won't + // redirect traffic from the root domain to the www domain, that is from + // runatlantis.io to www.runatlantis.io. + custom_origin_config { + // These are all the defaults. + http_port = "80" + https_port = "443" + origin_protocol_policy = "http-only" + origin_ssl_protocols = ["TLSv1", "TLSv1.1", "TLSv1.2"] + } + + // Here we're using our S3 bucket's URL! + domain_name = "${aws_s3_bucket.www.website_endpoint}" + // This can be any name to identify this origin. + origin_id = "${var.www_domain_name}" + } + + enabled = true + default_root_object = "index.html" + + // All values are defaults from the AWS console. + default_cache_behavior { + viewer_protocol_policy = "redirect-to-https" + compress = true + allowed_methods = ["GET", "HEAD"] + cached_methods = ["GET", "HEAD"] + // This needs to match the `origin_id` above. + target_origin_id = "${var.www_domain_name}" + min_ttl = 0 + default_ttl = 86400 + max_ttl = 31536000 + + forwarded_values { + query_string = false + cookies { + forward = "none" + } + } + } + + // Here we're ensuring we can hit this distribution using www.runatlantis.io + // rather than the domain name CloudFront gives us. + aliases = ["${var.www_domain_name}"] + + restrictions { + geo_restriction { + restriction_type = "none" + } + } + + // Here's where our certificate is loaded in! + viewer_certificate { + acm_certificate_arn = "${aws_acm_certificate.certificate.arn}" + ssl_support_method = "sni-only" + } +} diff --git a/runatlantis.io/public/blog/hosting-our-static-site/code/dns.tf b/runatlantis.io/public/blog/hosting-our-static-site/code/dns.tf new file mode 100644 index 0000000000..3d1c0a4a57 --- /dev/null +++ b/runatlantis.io/public/blog/hosting-our-static-site/code/dns.tf @@ -0,0 +1,18 @@ +// We want AWS to host our zone so its nameservers can point to our CloudFront +// distribution. +resource "aws_route53_zone" "zone" { + name = "${var.root_domain_name}" +} + +// This Route53 record will point at our CloudFront distribution. +resource "aws_route53_record" "www" { + zone_id = "${aws_route53_zone.zone.zone_id}" + name = "${var.www_domain_name}" + type = "A" + + alias = { + name = "${aws_cloudfront_distribution.www_distribution.domain_name}" + zone_id = "${aws_cloudfront_distribution.www_distribution.hosted_zone_id}" + evaluate_target_health = false + } +} diff --git a/runatlantis.io/public/blog/hosting-our-static-site/code/full.tf b/runatlantis.io/public/blog/hosting-our-static-site/code/full.tf new file mode 100644 index 0000000000..c35ac47529 --- /dev/null +++ b/runatlantis.io/public/blog/hosting-our-static-site/code/full.tf @@ -0,0 +1,84 @@ +resource "aws_s3_bucket" "root" { + bucket = "${var.root_domain_name}" + acl = "public-read" + policy = < /tmp/ngrok.log & +./ngrok config add-authtoken $NGROK_AUTH_TOKEN > /dev/null 2>&1 +./ngrok http 4141 > /tmp/ngrok.log 2>&1 & sleep 2 # find out what URL ngrok has given us export ATLANTIS_URL=$(curl -s 'http://localhost:4040/api/tunnels' | jq -r '.tunnels[] | select(.proto=="https") | .public_url') # Now we can start the e2e tests +cd "${GITHUB_WORKSPACE:-$(git rev-parse --show-toplevel)}/e2e" echo "Running 'make build'" make build diff --git a/scripts/fmt.sh b/scripts/fmt.sh new file mode 100755 index 0000000000..b8a5aef752 --- /dev/null +++ b/scripts/fmt.sh @@ -0,0 +1,23 @@ +#!/usr/bin/env bash + +set -euo pipefail + +go install golang.org/x/tools/cmd/goimports@latest + +gobin="$(go env GOPATH)/bin" +declare -r gobin + +declare -a files +readarray -d '' files < <(find . -type f -name '*.go' ! -name 'mock_*' ! -path './vendor/*' ! -path '**/mocks/*' -print0) +declare -r files + +output="$("${gobin}"/goimports -l "${files[@]}")" +declare -r output + +if [[ -n "$output" ]]; then + echo "These files had their 'import' changed - please fix them locally and push a fix" + + echo "$output" + + exit 1 +fi diff --git a/scripts/pin_ci_terraform_providers.sh b/scripts/pin_ci_terraform_providers.sh new file mode 100755 index 0000000000..8de2dfab6c --- /dev/null +++ b/scripts/pin_ci_terraform_providers.sh @@ -0,0 +1,74 @@ +#!/bin/bash + +# Script to pin terraform providers in e2e tests + +RANDOM_PROVIDER_VERSION="3.6.1" +NULL_PROVIDER_VERSION="3.2.2" + +TEST_REPOS_DIR="server/controllers/events/testdata/test-repos" + +for file in $(find $TEST_REPOS_DIR -name '*.tf') +do + basename=$(basename $file) + if [[ "$basename" == "versions.tf" ]] + then + continue + fi + if [[ "$basename" != "main.tf" ]] + then + echo "Found unexpected file: $file" + exit 1 + fi + has_null_provider=false + has_random_provider=false + + version_file="$(dirname $file)/versions.tf" + for resource in $(cat $file | grep '^resource' | awk '{print $2}' | tr -d '"') + do + if [[ "$resource" == "null_resource" ]] + then + has_null_provider=true + elif [[ "$resource" == "random_id" ]] + then + has_random_provider=true + else + echo "Unknown resource $resource in $file" + exit 1 + fi + done + if ! $has_null_provider && ! $has_random_provider + then + echo "No providers needed for $file" + continue + fi + echo "Adding $version_file for $file" + rm -f $version_file + if $has_null_provider + then + echo 'provider "null" {}' >> $version_file + fi + if $has_random_provider + then + echo 'provider "random" {}' >> $version_file + fi + echo "terraform {" >> $version_file + echo " required_providers {" >> $version_file + + if $has_random_provider + then + echo " random = {" >> $version_file + echo ' source = "hashicorp/random"' >> $version_file + echo " version = \"= $RANDOM_PROVIDER_VERSION\"" >> $version_file + echo " }" >> $version_file + fi + if $has_null_provider + then + echo " null = {" >> $version_file + echo ' source = "hashicorp/null"' >> $version_file + echo " version = \"= $NULL_PROVIDER_VERSION\"" >> $version_file + echo " }" >> $version_file + fi + echo " }" >> $version_file + echo "}" >> $version_file + +done diff --git a/server/controllers/api_controller.go b/server/controllers/api_controller.go index 43e316bbdf..c48c99b41d 100644 --- a/server/controllers/api_controller.go +++ b/server/controllers/api_controller.go @@ -20,16 +20,19 @@ import ( const atlantisTokenHeader = "X-Atlantis-Token" type APIController struct { - APISecret []byte - Locker locking.Locker - Logger logging.SimpleLogging - Parser events.EventParsing - ProjectCommandBuilder events.ProjectCommandBuilder - ProjectPlanCommandRunner events.ProjectPlanCommandRunner - ProjectApplyCommandRunner events.ProjectApplyCommandRunner - RepoAllowlistChecker *events.RepoAllowlistChecker - Scope tally.Scope - VCSClient vcs.Client + APISecret []byte + Locker locking.Locker + Logger logging.SimpleLogging + Parser events.EventParsing + ProjectCommandBuilder events.ProjectCommandBuilder + ProjectPlanCommandRunner events.ProjectPlanCommandRunner + ProjectApplyCommandRunner events.ProjectApplyCommandRunner + FailOnPreWorkflowHookError bool + PreWorkflowHooksCommandRunner events.PreWorkflowHooksCommandRunner + PostWorkflowHooksCommandRunner events.PostWorkflowHooksCommandRunner + RepoAllowlistChecker *events.RepoAllowlistChecker + Scope tally.Scope + VCSClient vcs.Client } type APIRequest struct { @@ -44,7 +47,7 @@ type APIRequest struct { } } -func (a *APIRequest) getCommands(ctx *command.Context, cmdBuilder func(*command.Context, *events.CommentCommand) ([]command.ProjectContext, error)) ([]command.ProjectContext, error) { +func (a *APIRequest) getCommands(ctx *command.Context, cmdBuilder func(*command.Context, *events.CommentCommand) ([]command.ProjectContext, error)) ([]command.ProjectContext, []*events.CommentCommand, error) { cc := make([]*events.CommentCommand, 0) for _, project := range a.Projects { @@ -63,12 +66,12 @@ func (a *APIRequest) getCommands(ctx *command.Context, cmdBuilder func(*command. for _, commentCommand := range cc { projectCmds, err := cmdBuilder(ctx, commentCommand) if err != nil { - return nil, fmt.Errorf("failed to build command: %v", err) + return nil, nil, fmt.Errorf("failed to build command: %v", err) } cmds = append(cmds, projectCmds...) } - return cmds, nil + return cmds, cc, nil } func (a *APIController) apiReportError(w http.ResponseWriter, code int, err error) { @@ -142,29 +145,55 @@ func (a *APIController) Apply(w http.ResponseWriter, r *http.Request) { } func (a *APIController) apiPlan(request *APIRequest, ctx *command.Context) (*command.Result, error) { - cmds, err := request.getCommands(ctx, a.ProjectCommandBuilder.BuildPlanCommands) + cmds, cc, err := request.getCommands(ctx, a.ProjectCommandBuilder.BuildPlanCommands) if err != nil { return nil, err } var projectResults []command.ProjectResult - for _, cmd := range cmds { + for i, cmd := range cmds { + err = a.PreWorkflowHooksCommandRunner.RunPreHooks(ctx, cc[i]) + if err != nil { + ctx.Log.Err("Error running pre-workflow hooks %s.", err) + if a.FailOnPreWorkflowHookError { + return nil, err + } + } + res := a.ProjectPlanCommandRunner.Plan(cmd) projectResults = append(projectResults, res) + + err = a.PostWorkflowHooksCommandRunner.RunPostHooks(ctx, cc[i]) + if err != nil { + ctx.Log.Err("Error running post-workflow hooks %s.", err) + } } return &command.Result{ProjectResults: projectResults}, nil } func (a *APIController) apiApply(request *APIRequest, ctx *command.Context) (*command.Result, error) { - cmds, err := request.getCommands(ctx, a.ProjectCommandBuilder.BuildApplyCommands) + cmds, cc, err := request.getCommands(ctx, a.ProjectCommandBuilder.BuildApplyCommands) if err != nil { return nil, err } var projectResults []command.ProjectResult - for _, cmd := range cmds { + for i, cmd := range cmds { + err = a.PreWorkflowHooksCommandRunner.RunPreHooks(ctx, cc[i]) + if err != nil { + ctx.Log.Err("Error running pre-workflow hooks %s.", err) + if a.FailOnPreWorkflowHookError { + return nil, err + } + } + res := a.ProjectApplyCommandRunner.Apply(cmd) projectResults = append(projectResults, res) + + err = a.PostWorkflowHooksCommandRunner.RunPostHooks(ctx, cc[i]) + if err != nil { + ctx.Log.Err("Error running post-workflow hooks %s.", err) + } } return &command.Result{ProjectResults: projectResults}, nil } @@ -223,6 +252,7 @@ func (a *APIController) apiParseAndValidate(r *http.Request) (*APIRequest, *comm }, Scope: a.Scope, Log: a.Logger, + API: true, }, http.StatusOK, nil } diff --git a/server/controllers/api_controller_test.go b/server/controllers/api_controller_test.go index 1f2370ef08..3b3aa520aa 100644 --- a/server/controllers/api_controller_test.go +++ b/server/controllers/api_controller_test.go @@ -86,17 +86,27 @@ func setup(t *testing.T) (controllers.APIController, *MockProjectCommandBuilder, ApplySuccess: "success", }) + preWorkflowHooksCommandRunner := NewMockPreWorkflowHooksCommandRunner() + + When(preWorkflowHooksCommandRunner.RunPreHooks(Any[*command.Context](), Any[*events.CommentCommand]())).ThenReturn(nil) + + postWorkflowHooksCommandRunner := NewMockPostWorkflowHooksCommandRunner() + + When(postWorkflowHooksCommandRunner.RunPostHooks(Any[*command.Context](), Any[*events.CommentCommand]())).ThenReturn(nil) + ac := controllers.APIController{ - APISecret: []byte(atlantisToken), - Locker: locker, - Logger: logger, - Scope: scope, - Parser: parser, - ProjectCommandBuilder: projectCommandBuilder, - ProjectPlanCommandRunner: projectCommandRunner, - ProjectApplyCommandRunner: projectCommandRunner, - VCSClient: vcsClient, - RepoAllowlistChecker: repoAllowlistChecker, + APISecret: []byte(atlantisToken), + Locker: locker, + Logger: logger, + Scope: scope, + Parser: parser, + ProjectCommandBuilder: projectCommandBuilder, + ProjectPlanCommandRunner: projectCommandRunner, + ProjectApplyCommandRunner: projectCommandRunner, + PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, + PostWorkflowHooksCommandRunner: postWorkflowHooksCommandRunner, + VCSClient: vcsClient, + RepoAllowlistChecker: repoAllowlistChecker, } return ac, projectCommandBuilder, projectCommandRunner } diff --git a/server/controllers/events/events_controller.go b/server/controllers/events/events_controller.go index 2246a8f48b..a7ffa0c592 100644 --- a/server/controllers/events/events_controller.go +++ b/server/controllers/events/events_controller.go @@ -14,6 +14,7 @@ package events import ( + "encoding/json" "fmt" "io" "net/http" @@ -28,6 +29,7 @@ import ( "github.com/runatlantis/atlantis/server/events/vcs" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketcloud" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketserver" + "github.com/runatlantis/atlantis/server/events/vcs/gitea" "github.com/runatlantis/atlantis/server/logging" tally "github.com/uber-go/tally/v4" gitlab "github.com/xanzy/go-gitlab" @@ -37,6 +39,11 @@ const githubHeader = "X-Github-Event" const gitlabHeader = "X-Gitlab-Event" const azuredevopsHeader = "Request-Id" +const giteaHeader = "X-Gitea-Event" +const giteaEventTypeHeader = "X-Gitea-Event-Type" +const giteaSignatureHeader = "X-Gitea-Signature" +const giteaRequestIDHeader = "X-Gitea-Delivery" + // bitbucketEventTypeHeader is the same in both cloud and server. const bitbucketEventTypeHeader = "X-Event-Key" const bitbucketCloudRequestIDHeader = "X-Request-UUID" @@ -91,11 +98,20 @@ type VCSEventsController struct { // Azure DevOps Team Project. If empty, no request validation is done. AzureDevopsWebhookBasicPassword []byte AzureDevopsRequestValidator AzureDevopsRequestValidator + GiteaWebhookSecret []byte } // Post handles POST webhook requests. func (e *VCSEventsController) Post(w http.ResponseWriter, r *http.Request) { - if r.Header.Get(githubHeader) != "" { + if r.Header.Get(giteaHeader) != "" { + if !e.supportsHost(models.Gitea) { + e.respond(w, logging.Debug, http.StatusBadRequest, "Ignoring request since not configured to support Gitea") + return + } + e.Logger.Debug("handling Gitea post") + e.handleGiteaPost(w, r) + return + } else if r.Header.Get(githubHeader) != "" { if !e.supportsHost(models.Github) { e.respond(w, logging.Debug, http.StatusBadRequest, "Ignoring request since not configured to support GitHub") return @@ -288,6 +304,91 @@ func (e *VCSEventsController) handleAzureDevopsPost(w http.ResponseWriter, r *ht } } +func (e *VCSEventsController) handleGiteaPost(w http.ResponseWriter, r *http.Request) { + signature := r.Header.Get(giteaSignatureHeader) + eventType := r.Header.Get(giteaEventTypeHeader) + reqID := r.Header.Get(giteaRequestIDHeader) + + defer r.Body.Close() // Ensure the request body is closed + + body, err := io.ReadAll(r.Body) + if err != nil { + e.respond(w, logging.Error, http.StatusBadRequest, "Unable to read body: %s %s=%s", err, "X-Gitea-Delivery", reqID) + return + } + + if len(e.GiteaWebhookSecret) > 0 { + if err := gitea.ValidateSignature(body, signature, e.GiteaWebhookSecret); err != nil { + e.respond(w, logging.Warn, http.StatusBadRequest, errors.Wrap(err, "request did not pass validation").Error()) + return + } + } + + // Log the event type for debugging purposes + e.Logger.Debug("Received Gitea event %s with ID %s", eventType, reqID) + + // Depending on the event type, handle the event appropriately + switch eventType { + case "pull_request_comment": + e.HandleGiteaPullRequestCommentEvent(w, body, reqID) + case "pull_request": + e.Logger.Debug("Handling as pull_request") + e.handleGiteaPullRequestEvent(w, body, reqID) + // Add other case handlers as necessary + default: + e.respond(w, logging.Debug, http.StatusOK, "Ignoring unsupported Gitea event type: %s %s=%s", eventType, "X-Gitea-Delivery", reqID) + } +} + +func (e *VCSEventsController) handleGiteaPullRequestEvent(w http.ResponseWriter, body []byte, reqID string) { + e.Logger.Debug("Entering handleGiteaPullRequestEvent") + // Attempt to unmarshal the incoming body into the Gitea PullRequest struct + var payload gitea.GiteaWebhookPayload + if err := json.Unmarshal(body, &payload); err != nil { + e.Logger.Err("Failed to unmarshal Gitea webhook payload: %v", err) + e.respond(w, logging.Error, http.StatusBadRequest, "Failed to parse request body") + return + } + + e.Logger.Debug("Successfully unmarshaled Gitea event") + + // Use the parser function to convert into Atlantis models + pull, pullEventType, baseRepo, headRepo, user, err := e.Parser.ParseGiteaPullRequestEvent(payload.PullRequest) + if err != nil { + e.Logger.Err("Failed to parse Gitea pull request event: %v", err) + e.respond(w, logging.Error, http.StatusInternalServerError, "Failed to process event") + return + } + + e.Logger.Debug("Parsed Gitea event into Atlantis models successfully") + + logger := e.Logger.With("gitea-request-id", reqID) + logger.Debug("Identified Gitea event as type", "type", pullEventType) + + // Call a generic handler for pull request events + response := e.handlePullRequestEvent(logger, baseRepo, headRepo, pull, user, pullEventType) + + e.respond(w, logging.Debug, http.StatusOK, response.body) +} + +// HandleGiteaPullRequestCommentEvent handles comment events from Gitea where Atlantis commands can come from. +func (e *VCSEventsController) HandleGiteaPullRequestCommentEvent(w http.ResponseWriter, body []byte, reqID string) { + var event gitea.GiteaIssueCommentPayload + if err := json.Unmarshal(body, &event); err != nil { + e.Logger.Err("Failed to unmarshal Gitea comment payload: %v", err) + e.respond(w, logging.Error, http.StatusBadRequest, "Failed to parse request body") + return + } + e.Logger.Debug("Successfully unmarshaled Gitea comment event") + + baseRepo, user, pullNum, _ := e.Parser.ParseGiteaIssueCommentEvent(event) + // Since we're lacking headRepo and maybePull details, we'll pass nil + // This follows the same approach as the GitHub client for handling comment events without full PR details + response := e.handleCommentEvent(e.Logger, baseRepo, nil, nil, user, pullNum, event.Comment.Body, event.Comment.ID, models.Gitea) + + e.respond(w, logging.Debug, http.StatusOK, response.body) +} + // HandleGithubCommentEvent handles comment events from GitHub where Atlantis // commands can come from. It's exported to make testing easier. func (e *VCSEventsController) HandleGithubCommentEvent(event *github.IssueCommentEvent, githubReqID string, logger logging.SimpleLogging) HTTPResponse { @@ -571,7 +672,7 @@ func (e *VCSEventsController) handleCommentEvent(logger logging.SimpleLogging, b } } - // It's a comment we're gonna react to, so add a reaction. + // It's a comment we're going to react to so add a reaction. if e.EmojiReaction != "" { err := e.VCSClient.ReactToComment(logger, baseRepo, pullNum, commentID, e.EmojiReaction) if err != nil { diff --git a/server/controllers/events/events_controller_e2e_test.go b/server/controllers/events/events_controller_e2e_test.go index 781d483c34..7f63f191f5 100644 --- a/server/controllers/events/events_controller_e2e_test.go +++ b/server/controllers/events/events_controller_e2e_test.go @@ -53,12 +53,12 @@ var mockPreWorkflowHookRunner *runtimemocks.MockPreWorkflowHookRunner var mockPostWorkflowHookRunner *runtimemocks.MockPostWorkflowHookRunner -func (m *NoopTFDownloader) GetFile(_, _ string) error { +func (m *NoopTFDownloader) GetAny(_, _ string) error { return nil } -func (m *NoopTFDownloader) GetAny(_, _ string) error { - return nil +func (m *NoopTFDownloader) Install(_ string, _ string, _ *version.Version) (string, error) { + return "", nil } type LocalConftestCache struct { @@ -1333,7 +1333,6 @@ func setupE2E(t *testing.T, repoDir string, opt setupOption) (events_controllers workingDir := &events.FileWorkspace{ DataDir: dataDir, TestingOverrideHeadCloneURL: "override-me", - Logger: logger, } var preWorkflowHooks []*valid.WorkflowHook if !opt.disablePreWorkflowHooks { @@ -1425,7 +1424,6 @@ func setupE2E(t *testing.T, repoDir string, opt setupOption) (events_controllers false, "auto", statsScope, - logger, terraformClient, ) diff --git a/server/controllers/events/events_controller_test.go b/server/controllers/events/events_controller_test.go index 183772df8e..9594ae5284 100644 --- a/server/controllers/events/events_controller_test.go +++ b/server/controllers/events/events_controller_test.go @@ -42,6 +42,7 @@ import ( ) const githubHeader = "X-Github-Event" +const giteaHeader = "X-Gitea-Event" const gitlabHeader = "X-Gitlab-Event" const azuredevopsHeader = "Request-Id" @@ -68,6 +69,17 @@ func TestPost_UnsupportedVCSGithub(t *testing.T) { ResponseContains(t, w, http.StatusBadRequest, "Ignoring request since not configured to support GitHub") } +func TestPost_UnsupportedVCSGitea(t *testing.T) { + t.Log("when the request is for an unsupported vcs a 400 is returned") + e, _, _, _, _, _, _, _, _ := setup(t) + e.SupportedVCSHosts = nil + req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil)) + req.Header.Set(giteaHeader, "value") + w := httptest.NewRecorder() + e.Post(w, req) + ResponseContains(t, w, http.StatusBadRequest, "Ignoring request since not configured to support Gitea") +} + func TestPost_UnsupportedVCSGitlab(t *testing.T) { t.Log("when the request is for an unsupported vcs a 400 is returned") e, _, _, _, _, _, _, _, _ := setup(t) @@ -90,6 +102,17 @@ func TestPost_InvalidGithubSecret(t *testing.T) { ResponseContains(t, w, http.StatusBadRequest, "err") } +func TestPost_InvalidGiteaSecret(t *testing.T) { + t.Log("when the gitea payload can't be validated a 400 is returned") + e, v, _, _, _, _, _, _, _ := setup(t) + w := httptest.NewRecorder() + req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil)) + req.Header.Set(giteaHeader, "value") + When(v.Validate(req, secret)).ThenReturn(nil, errors.New("err")) + e.Post(w, req) + ResponseContains(t, w, http.StatusBadRequest, "request did not pass validation") +} + func TestPost_InvalidGitlabSecret(t *testing.T) { t.Log("when the gitlab payload can't be validated a 400 is returned") e, _, gl, _, _, _, _, _, _ := setup(t) @@ -112,6 +135,18 @@ func TestPost_UnsupportedGithubEvent(t *testing.T) { ResponseContains(t, w, http.StatusOK, "Ignoring unsupported event") } +func TestPost_UnsupportedGiteaEvent(t *testing.T) { + t.Log("when the event type is an unsupported gitea event we ignore it") + e, v, _, _, _, _, _, _, _ := setup(t) + w := httptest.NewRecorder() + req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil)) + req.Header.Set(giteaHeader, "value") + e.GiteaWebhookSecret = nil + When(v.Validate(req, nil)).ThenReturn([]byte(`{"not an event": ""}`), nil) + e.Post(w, req) + ResponseContains(t, w, http.StatusOK, "Ignoring unsupported Gitea event") +} + func TestPost_UnsupportedGitlabEvent(t *testing.T) { t.Log("when the event type is an unsupported gitlab event we ignore it") e, _, gl, _, _, _, _, _, _ := setup(t) @@ -211,8 +246,10 @@ func TestPost_GitlabCommentNotAllowlisted(t *testing.T) { w := httptest.NewRecorder() e.Post(w, req) - Equals(t, http.StatusForbidden, w.Result().StatusCode) - body, _ := io.ReadAll(w.Result().Body) + resp := w.Result() + defer resp.Body.Close() + Equals(t, http.StatusForbidden, resp.StatusCode) + body, _ := io.ReadAll(resp.Body) exp := "Repo not allowlisted" Assert(t, strings.Contains(string(body), exp), "exp %q to be contained in %q", exp, string(body)) expRepo, _ := models.NewRepo(models.Gitlab, "gitlabhq/gitlab-test", "https://example.com/gitlabhq/gitlab-test.git", "", "") @@ -244,8 +281,10 @@ func TestPost_GitlabCommentNotAllowlistedWithSilenceErrors(t *testing.T) { w := httptest.NewRecorder() e.Post(w, req) - Equals(t, http.StatusForbidden, w.Result().StatusCode) - body, _ := io.ReadAll(w.Result().Body) + resp := w.Result() + defer resp.Body.Close() + Equals(t, http.StatusForbidden, resp.StatusCode) + body, _ := io.ReadAll(resp.Body) exp := "Repo not allowlisted" Assert(t, strings.Contains(string(body), exp), "exp %q to be contained in %q", exp, string(body)) vcsClient.VerifyWasCalled(Never()).CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) @@ -276,8 +315,10 @@ func TestPost_GithubCommentNotAllowlisted(t *testing.T) { w := httptest.NewRecorder() e.Post(w, req) - Equals(t, http.StatusForbidden, w.Result().StatusCode) - body, _ := io.ReadAll(w.Result().Body) + resp := w.Result() + defer resp.Body.Close() + Equals(t, http.StatusForbidden, resp.StatusCode) + body, _ := io.ReadAll(resp.Body) exp := "Repo not allowlisted" Assert(t, strings.Contains(string(body), exp), "exp %q to be contained in %q", exp, string(body)) expRepo, _ := models.NewRepo(models.Github, "baxterthehacker/public-repo", "https://github.com/baxterthehacker/public-repo.git", "", "") @@ -310,8 +351,10 @@ func TestPost_GithubCommentNotAllowlistedWithSilenceErrors(t *testing.T) { w := httptest.NewRecorder() e.Post(w, req) - Equals(t, http.StatusForbidden, w.Result().StatusCode) - body, _ := io.ReadAll(w.Result().Body) + resp := w.Result() + defer resp.Body.Close() + Equals(t, http.StatusForbidden, resp.StatusCode) + body, _ := io.ReadAll(resp.Body) exp := "Repo not allowlisted" Assert(t, strings.Contains(string(body), exp), "exp %q to be contained in %q", exp, string(body)) vcsClient.VerifyWasCalled(Never()).CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) @@ -976,7 +1019,8 @@ func setup(t *testing.T) (events_controllers.VCSEventsController, *mocks.MockGit CommandRunner: cr, PullCleaner: c, GithubWebhookSecret: secret, - SupportedVCSHosts: []models.VCSHostType{models.Github, models.Gitlab, models.AzureDevops}, + SupportedVCSHosts: []models.VCSHostType{models.Github, models.Gitlab, models.AzureDevops, models.Gitea}, + GiteaWebhookSecret: secret, GitlabWebhookSecret: secret, GitlabRequestParserValidator: gl, RepoAllowlistChecker: repoAllowlistChecker, diff --git a/server/controllers/events/testdata/null_provider_lockfile_old_version b/server/controllers/events/testdata/null_provider_lockfile_old_version index 9af6a64b26..c83acaa26b 100644 --- a/server/controllers/events/testdata/null_provider_lockfile_old_version +++ b/server/controllers/events/testdata/null_provider_lockfile_old_version @@ -2,20 +2,24 @@ # Manual edits may be lost in future updates. provider "registry.terraform.io/hashicorp/null" { - version = "3.1.0" - constraints = "3.1.0" + version = "3.2.2" + constraints = "3.2.2" hashes = [ - "h1:grYDj8/Lvp1OwME+g1AsECPN1czO5ssSf+8fCluCHQY=", - "zh:02a1675fd8de126a00460942aaae242e65ca3380b5bb192e8773ef3da9073fd2", - "zh:53e30545ff8926a8e30ad30648991ca8b93b6fa496272cd23b26763c8ee84515", - "zh:5f9200bf708913621d0f6514179d89700e9aa3097c77dac730e8ba6e5901d521", - "zh:9ebf4d9704faba06b3ec7242c773c0fbfe12d62db7d00356d4f55385fc69bfb2", - "zh:a6576c81adc70326e4e1c999c04ad9ca37113a6e925aefab4765e5a5198efa7e", - "zh:a8a42d13346347aff6c63a37cda9b2c6aa5cc384a55b2fe6d6adfa390e609c53", - "zh:c797744d08a5307d50210e0454f91ca4d1c7621c68740441cf4579390452321d", - "zh:cecb6a304046df34c11229f20a80b24b1603960b794d68361a67c5efe58e62b8", - "zh:e1371aa1e502000d9974cfaff5be4cfa02f47b17400005a16f14d2ef30dc2a70", - "zh:fc39cc1fe71234a0b0369d5c5c7f876c71b956d23d7d6f518289737a001ba69b", - "zh:fea4227271ebf7d9e2b61b89ce2328c7262acd9fd190e1fd6d15a591abfa848e", + "h1:Gef5VGfobY5uokA5nV/zFvWeMNR2Pmq79DH94QnNZPM=", + "h1:IMVAUHKoydFrlPrl9OzasDnw/8ntZFerCC9iXw1rXQY=", + "h1:vWAsYRd7MjYr3adj8BVKRohVfHpWQdvkIwUQ2Jf5FVM=", + "h1:zT1ZbegaAYHwQa+QwIFugArWikRJI9dqohj8xb0GY88=", + "zh:3248aae6a2198f3ec8394218d05bd5e42be59f43a3a7c0b71c66ec0df08b69e7", + "zh:32b1aaa1c3013d33c245493f4a65465eab9436b454d250102729321a44c8ab9a", + "zh:38eff7e470acb48f66380a73a5c7cdd76cc9b9c9ba9a7249c7991488abe22fe3", + "zh:4c2f1faee67af104f5f9e711c4574ff4d298afaa8a420680b0cb55d7bbc65606", + "zh:544b33b757c0b954dbb87db83a5ad921edd61f02f1dc86c6186a5ea86465b546", + "zh:696cf785090e1e8cf1587499516b0494f47413b43cb99877ad97f5d0de3dc539", + "zh:6e301f34757b5d265ae44467d95306d61bef5e41930be1365f5a8dcf80f59452", + "zh:78d5eefdd9e494defcb3c68d282b8f96630502cac21d1ea161f53cfe9bb483b3", + "zh:913a929070c819e59e94bb37a2a253c228f83921136ff4a7aa1a178c7cce5422", + "zh:aa9015926cd152425dbf86d1abdbc74bfe0e1ba3d26b3db35051d7b9ca9f72ae", + "zh:bb04798b016e1e1d49bcc76d62c53b56c88c63d6f2dfe38821afef17c416a0e1", + "zh:c23084e1b23577de22603cff752e59128d83cfecc2e6819edadd8cf7a10af11e", ] } diff --git a/server/controllers/events/testdata/test-repos/automerge/dir1/versions.tf b/server/controllers/events/testdata/test-repos/automerge/dir1/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/automerge/dir1/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/automerge/dir2/versions.tf b/server/controllers/events/testdata/test-repos/automerge/dir2/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/automerge/dir2/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/automerge/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/automerge/exp-output-autoplan.txt index c32ed6dfdc..8f32ac5efc 100644 --- a/server/controllers/events/testdata/test-repos/automerge/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/automerge/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` ```diff @@ -20,10 +21,14 @@ Plan: 1 to add, 0 to change, 0 to destroy. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` + ```shell + atlantis plan -d dir1 + ``` --- ### 2. dir: `dir2` workspace: `default` @@ -43,17 +48,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` + ```shell + atlantis plan -d dir2 + ``` --- ### Plan Summary 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/main.tf b/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/main.tf index 2aa6a6437d..231579dd90 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/main.tf +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/main.tf @@ -1,4 +1,3 @@ resource "random_id" "dummy1" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/versions.tf b/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/versions.tf new file mode 100644 index 0000000000..2c49bb6727 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/dir1/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/main.tf b/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/main.tf index 5292f29c85..97f93c35e1 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/main.tf +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/main.tf @@ -1,4 +1,3 @@ resource "random_id" "dummy2" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/versions.tf b/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/versions.tf new file mode 100644 index 0000000000..2c49bb6727 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/dir2/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt index 7f0f5f45a8..d49fde3e8f 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default`
Show Output @@ -21,18 +22,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` +
* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` -
+ ```shell + atlantis plan -d dir1 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -54,18 +58,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` - + ```shell + atlantis plan -d dir2 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -73,7 +80,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-import-dummy1.txt b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-import-dummy1.txt index 04f87516ab..0e6c0c960c 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-import-dummy1.txt +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-import-dummy1.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` \ No newline at end of file + ```shell + atlantis plan -d dir1 + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt index c9a7d87124..f94c8567ed 100644 --- a/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/import-multiple-project/exp-output-plan-again.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` ```diff @@ -14,10 +15,14 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` + ```shell + atlantis plan -d dir1 + ``` --- ### 2. dir: `dir2` workspace: `default` @@ -38,18 +43,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` - + ```shell + atlantis plan -d dir2 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -57,7 +65,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 1 with changes, 1 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt index 8fcbeaa757..ddcccae10a 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-autoplan.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.for_each["default"] will be created @@ -28,22 +27,29 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 2 to add, 0 to change, 0 to destroy. ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 2 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-count.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-count.txt index d7957913db..32680f595f 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-count.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-count.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-foreach.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-foreach.txt index 45b02dd35f..1e1caabfca 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-foreach.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-import-foreach.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-plan-again.txt index 46a378158b..379d9e8ce7 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/exp-output-plan-again.txt @@ -8,13 +8,21 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d . -- -var var=overridden` + ```shell + atlantis plan -d . -- -var var=overridden + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/main.tf b/server/controllers/events/testdata/test-repos/import-single-project-var/main.tf index f7bf7839d0..082a9534c0 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project-var/main.tf +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/main.tf @@ -1,12 +1,10 @@ resource "random_id" "for_each" { for_each = toset([var.var]) - keepers = {} byte_length = 1 } resource "random_id" "count" { count = 1 - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-single-project-var/versions.tf b/server/controllers/events/testdata/test-repos/import-single-project-var/versions.tf new file mode 100644 index 0000000000..2c49bb6727 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/import-single-project-var/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt index 5662b98336..9c9fa29474 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-autoplan.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.dummy2 will be created @@ -28,22 +27,29 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 2 to add, 0 to change, 0 to destroy. ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 2 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy1.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy1.txt index 1823a29537..f4a6cb37d9 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy1.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy1.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy2.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy2.txt index d515857ff1..9ab2dbb7e3 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy2.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-import-dummy2.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-plan-again.txt index adc09b4a37..a7268e38f8 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/import-single-project/exp-output-plan-again.txt @@ -8,13 +8,21 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` + ```shell + atlantis plan -d . + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/import-single-project/main.tf b/server/controllers/events/testdata/test-repos/import-single-project/main.tf index 2e60a118f5..0a1884fe5e 100644 --- a/server/controllers/events/testdata/test-repos/import-single-project/main.tf +++ b/server/controllers/events/testdata/test-repos/import-single-project/main.tf @@ -1,9 +1,7 @@ resource "random_id" "dummy1" { - keepers = {} byte_length = 1 } resource "random_id" "dummy2" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-single-project/versions.tf b/server/controllers/events/testdata/test-repos/import-single-project/versions.tf new file mode 100644 index 0000000000..2c49bb6727 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/import-single-project/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/import-workspace/dir1/main.tf b/server/controllers/events/testdata/test-repos/import-workspace/dir1/main.tf index de0cb8d4a2..0bc18fe1e3 100644 --- a/server/controllers/events/testdata/test-repos/import-workspace/dir1/main.tf +++ b/server/controllers/events/testdata/test-repos/import-workspace/dir1/main.tf @@ -1,14 +1,12 @@ resource "random_id" "dummy1" { count = terraform.workspace == "ops" ? 1 : 0 - keepers = {} byte_length = 1 } resource "random_id" "dummy2" { count = terraform.workspace == "ops" ? 1 : 0 - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/import-workspace/dir1/versions.tf b/server/controllers/events/testdata/test-repos/import-workspace/dir1/versions.tf new file mode 100644 index 0000000000..2c49bb6727 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/import-workspace/dir1/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy1.txt b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy1.txt index 99e0e3434f..38f283b20e 100644 --- a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy1.txt +++ b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy1.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` + ```shell + atlantis plan -p dir1-ops + ``` diff --git a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy2.txt b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy2.txt index 3f168d91b3..cd4659c0b7 100644 --- a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy2.txt +++ b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-import-dir1-ops-dummy2.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` + ```shell + atlantis plan -p dir1-ops + ``` diff --git a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-plan.txt index 9859fcbc1f..7edca86268 100644 --- a/server/controllers/events/testdata/test-repos/import-workspace/exp-output-plan.txt +++ b/server/controllers/events/testdata/test-repos/import-workspace/exp-output-plan.txt @@ -8,13 +8,21 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p dir1-ops` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p dir1-ops + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` + ```shell + atlantis plan -p dir1-ops + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-autoplan.txt index 1e55d623b5..43a1815d11 100644 --- a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `staging` workspace: `default` 1. dir: `production` workspace: `default` +--- ### 1. dir: `staging` workspace: `default`
Show Output @@ -23,13 +24,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + var = "staging" ``` +
* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging` - + ```shell + atlantis plan -d staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -53,13 +58,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + var = "production" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d production + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d production` - + ```shell + atlantis plan -d production + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -67,7 +76,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-production.txt b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-production.txt index f08e2c50ae..298d515d93 100644 --- a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-production.txt +++ b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-production.txt @@ -20,8 +20,12 @@ Plan: 1 to add, 0 to change, 0 to destroy. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d production + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d production` + ```shell + atlantis plan -d production + ``` diff --git a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-staging.txt b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-staging.txt index de773736db..9f8399b7f1 100644 --- a/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-staging.txt +++ b/server/controllers/events/testdata/test-repos/modules-yaml/exp-output-plan-staging.txt @@ -20,8 +20,12 @@ Plan: 1 to add, 0 to change, 0 to destroy. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging` + ```shell + atlantis plan -d staging + ``` diff --git a/server/controllers/events/testdata/test-repos/modules-yaml/modules/null/versions.tf b/server/controllers/events/testdata/test-repos/modules-yaml/modules/null/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/modules-yaml/modules/null/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/modules/exp-output-autoplan-only-staging.txt b/server/controllers/events/testdata/test-repos/modules/exp-output-autoplan-only-staging.txt index d1faf53fc8..c3bdadc019 100644 --- a/server/controllers/events/testdata/test-repos/modules/exp-output-autoplan-only-staging.txt +++ b/server/controllers/events/testdata/test-repos/modules/exp-output-autoplan-only-staging.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + var = "staging" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging` - + ```shell + atlantis plan -d staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/modules/exp-output-plan-production.txt b/server/controllers/events/testdata/test-repos/modules/exp-output-plan-production.txt index 19246ade07..13d2414f3f 100644 --- a/server/controllers/events/testdata/test-repos/modules/exp-output-plan-production.txt +++ b/server/controllers/events/testdata/test-repos/modules/exp-output-plan-production.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + var = "production" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d production + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d production` - + ```shell + atlantis plan -d production + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/modules/exp-output-plan-staging.txt b/server/controllers/events/testdata/test-repos/modules/exp-output-plan-staging.txt index d1faf53fc8..c3bdadc019 100644 --- a/server/controllers/events/testdata/test-repos/modules/exp-output-plan-staging.txt +++ b/server/controllers/events/testdata/test-repos/modules/exp-output-plan-staging.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + var = "staging" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging` - + ```shell + atlantis plan -d staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/modules/modules/null/versions.tf b/server/controllers/events/testdata/test-repos/modules/modules/null/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/modules/modules/null/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-apply-reqs/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-approve-policies-clear.txt b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-approve-policies-clear.txt index e6643f8ce3..107a689278 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-approve-policies-clear.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-approve-policies-clear.txt @@ -1,6 +1,7 @@ Ran Approve Policies for 1 projects: 1. dir: `.` workspace: `default` +--- ### 1. dir: `.` workspace: `default` **Approve Policies Failed**: One or more policy sets require additional approval. @@ -9,15 +10,25 @@ Ran Approve Policies for 1 projects: policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-clear-approval/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-auto-policy-check.txt index f366769233..c8b5da50dd 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-auto-policy-check.txt @@ -25,15 +25,24 @@ post-conftest output policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-custom-run-steps/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt index d6e39f260d..b842f99682 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-approve-policies.txt @@ -1,6 +1,7 @@ Ran Approve Policies for 1 projects: 1. dir: `.` workspace: `default` +--- ### 1. dir: `.` workspace: `default` **Approve Policies Error** @@ -15,15 +16,25 @@ Ran Approve Policies for 1 projects: policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-diff-owner/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-previous-match/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo-server-side/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-disabled-repo/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo-server-side/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-enabled-repo/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt index c7f45c85f5..669b9cb064 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - - null_resource_policy - WARNING: Null Resource crea policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-extra-args/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-extra-args/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/dir1/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/dir1/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/dir1/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/dir2/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/dir2/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/dir2/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-apply.txt b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-apply.txt index eb6bda8987..7e0bd72a67 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-apply.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-apply.txt @@ -2,6 +2,7 @@ Ran Apply for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` ```diff diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt index c292c651f3..944cd1ba56 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-auto-policy-check.txt @@ -2,6 +2,7 @@ Ran Policy Check for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` #### Policy Set: `test_policy` @@ -13,10 +14,14 @@ Ran Policy Check for 2 projects: * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d dir1` + ```shell + atlantis plan -d dir1 + ``` --- ### 2. dir: `dir2` workspace: `default` @@ -35,15 +40,25 @@ FAIL - - main - WARNING: Forbidden Resource creation is pro policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d dir2` + ```shell + atlantis plan -d dir2 + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt index 098c4eba93..e01442f671 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-multi-projects/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default`
Show Output @@ -23,13 +24,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` +
* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` - + ```shell + atlantis plan -d dir1 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -53,13 +58,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` - + ```shell + atlantis plan -d dir2 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -67,7 +76,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks-success-silent/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks-success-silent/exp-output-autoplan.txt index ea7d4bf3ec..0fe7b1646b 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks-success-silent/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks-success-silent/exp-output-autoplan.txt @@ -9,13 +9,21 @@ state, without changing any real infrastructure. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` + ```shell + atlantis plan -d . + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/policy-checks/exp-output-auto-policy-check.txt index 0ace841faf..3d94c6521c 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks/exp-output-auto-policy-check.txt @@ -15,15 +15,24 @@ FAIL - - main - WARNING: Null Resource creation is prohibit policy set: test_policy: requires: 1 approval(s), have: 0. ``` * :heavy_check_mark: To **approve** this project, comment: - * `atlantis approve_policies -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis approve_policies -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `atlantis approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + ```shell + atlantis approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan` \ No newline at end of file + ```shell + atlantis plan + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/policy-checks/exp-output-autoplan.txt index 6f7ce87643..d3f41336a8 100644 --- a/server/controllers/events/testdata/test-repos/policy-checks/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/policy-checks/exp-output-autoplan.txt @@ -19,17 +19,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/policy-checks/versions.tf b/server/controllers/events/testdata/test-repos/policy-checks/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/policy-checks/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-apply.txt b/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-apply.txt index a136ff9691..4b78a636d5 100644 --- a/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-apply.txt +++ b/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-apply.txt @@ -2,6 +2,7 @@ Ran Apply for 2 projects: 1. dir: `infrastructure/production` workspace: `default` 1. dir: `infrastructure/staging` workspace: `default` +--- ### 1. dir: `infrastructure/production` workspace: `default` ```diff diff --git a/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-autoplan.txt index 29f5f76dae..8bf40fc657 100644 --- a/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/repo-config-file/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `infrastructure/staging` workspace: `default` 1. dir: `infrastructure/production` workspace: `default` +--- ### 1. dir: `infrastructure/staging` workspace: `default` ```diff @@ -20,10 +21,14 @@ Plan: 1 to add, 0 to change, 0 to destroy. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d infrastructure/staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d infrastructure/staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d infrastructure/staging` + ```shell + atlantis plan -d infrastructure/staging + ``` --- ### 2. dir: `infrastructure/production` workspace: `default` @@ -43,17 +48,25 @@ Plan: 1 to add, 0 to change, 0 to destroy. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d infrastructure/production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d infrastructure/production + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d infrastructure/production` + ```shell + atlantis plan -d infrastructure/production + ``` --- ### Plan Summary 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/repo-config-file/infrastructure/production/versions.tf b/server/controllers/events/testdata/test-repos/repo-config-file/infrastructure/production/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/repo-config-file/infrastructure/production/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/repo-config-file/infrastructure/staging/versions.tf b/server/controllers/events/testdata/test-repos/repo-config-file/infrastructure/staging/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/repo-config-file/infrastructure/staging/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/server-side-cfg/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/server-side-cfg/exp-output-autoplan.txt index ad9591b8ae..37e78c18af 100644 --- a/server/controllers/events/testdata/test-repos/server-side-cfg/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/server-side-cfg/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `.` workspace: `default` 1. dir: `.` workspace: `staging` +--- ### 1. dir: `.` workspace: `default`
Show Output @@ -28,13 +29,17 @@ Changes to Outputs: postplan custom ``` +
* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -61,13 +66,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "staging" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -w staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -w staging` - + ```shell + atlantis plan -w staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -75,7 +84,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/server-side-cfg/versions.tf b/server/controllers/events/testdata/test-repos/server-side-cfg/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/server-side-cfg/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-autoplan.txt index 6e70ac89fb..822531032c 100644 --- a/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-autoplan.txt @@ -30,17 +30,25 @@ Changes to Outputs: + var = "default" + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-plan.txt index 6e70ac89fb..822531032c 100644 --- a/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-plan.txt +++ b/server/controllers/events/testdata/test-repos/simple-with-lockfile/exp-output-plan.txt @@ -30,17 +30,25 @@ Changes to Outputs: + var = "default" + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple-with-lockfile/versions.tf b/server/controllers/events/testdata/test-repos/simple-with-lockfile/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/simple-with-lockfile/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-apply-all.txt b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-apply-all.txt index 61eac2271a..4e757a396c 100644 --- a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-apply-all.txt +++ b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-apply-all.txt @@ -2,6 +2,7 @@ Ran Apply for 2 projects: 1. dir: `.` workspace: `default` 1. dir: `.` workspace: `staging` +--- ### 1. dir: `.` workspace: `default` ```diff diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-autoplan.txt index dcbb45bf78..c445925f6c 100644 --- a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `.` workspace: `default` 1. dir: `.` workspace: `staging` +--- ### 1. dir: `.` workspace: `default`
Show Output @@ -29,13 +30,17 @@ Changes to Outputs: postplan ``` +
* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -60,13 +65,17 @@ Changes to Outputs: + var = "fromfile" + workspace = "staging" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -w staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -w staging` - + ```shell + atlantis plan -w staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -74,7 +83,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-default.txt b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-default.txt index f0419c9189..b944f4deab 100644 --- a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-default.txt +++ b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-default.txt @@ -25,17 +25,25 @@ Changes to Outputs: postplan ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-staging.txt b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-staging.txt index 7e34016bab..64880424f6 100644 --- a/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-staging.txt +++ b/server/controllers/events/testdata/test-repos/simple-yaml/exp-output-plan-staging.txt @@ -20,17 +20,25 @@ Changes to Outputs: + var = "fromfile" + workspace = "staging" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -w staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -w staging` - + ```shell + atlantis plan -w staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple-yaml/versions.tf b/server/controllers/events/testdata/test-repos/simple-yaml/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/simple-yaml/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-apply-var-all.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-apply-var-all.txt index 2977099b55..cb7dd9a752 100644 --- a/server/controllers/events/testdata/test-repos/simple/exp-output-apply-var-all.txt +++ b/server/controllers/events/testdata/test-repos/simple/exp-output-apply-var-all.txt @@ -2,6 +2,7 @@ Ran Apply for 2 projects: 1. dir: `.` workspace: `default` 1. dir: `.` workspace: `new_workspace` +--- ### 1. dir: `.` workspace: `default`
Show Output diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt index 242515e415..13bdae3fac 100644 --- a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt +++ b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-new-workspace.txt @@ -30,17 +30,25 @@ Changes to Outputs: + var = "new_workspace" + workspace = "new_workspace" ``` +
* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -w new_workspace` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -w new_workspace + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -w new_workspace -- -var var=new_workspace` - + ```shell + atlantis plan -w new_workspace -- -var var=new_workspace + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt index 5a86cff0c9..ab28d0ca84 100644 --- a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt +++ b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan-var-overridden.txt @@ -30,17 +30,25 @@ Changes to Outputs: + var = "overridden" + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d . -- -var var=overridden` - + ```shell + atlantis plan -d . -- -var var=overridden + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan.txt index 54e5505476..191b540b63 100644 --- a/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan.txt +++ b/server/controllers/events/testdata/test-repos/simple/exp-output-atlantis-plan.txt @@ -30,17 +30,25 @@ Changes to Outputs: + var = "default_workspace" + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d . -- -var var=default_workspace` - + ```shell + atlantis plan -d . -- -var var=default_workspace + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-auto-policy-check.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-auto-policy-check.txt index c6e0823922..fd03e48aed 100644 --- a/server/controllers/events/testdata/test-repos/simple/exp-output-auto-policy-check.txt +++ b/server/controllers/events/testdata/test-repos/simple/exp-output-auto-policy-check.txt @@ -5,13 +5,21 @@ Ran Policy Check for dir: `.` workspace: `default` ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * `atlantis plan -d .` + ```shell + atlantis plan -d . + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/simple/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/simple/exp-output-autoplan.txt index 6e70ac89fb..822531032c 100644 --- a/server/controllers/events/testdata/test-repos/simple/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/simple/exp-output-autoplan.txt @@ -30,17 +30,25 @@ Changes to Outputs: + var = "default" + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/simple/versions.tf b/server/controllers/events/testdata/test-repos/simple/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/simple/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/main.tf b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/main.tf index 1af2266d40..0c4b79e3f8 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/main.tf +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/main.tf @@ -1,4 +1,3 @@ resource "random_id" "dummy" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/versions.tf b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/versions.tf new file mode 100644 index 0000000000..2c49bb6727 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir1/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/main.tf b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/main.tf index 1af2266d40..0c4b79e3f8 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/main.tf +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/main.tf @@ -1,4 +1,3 @@ resource "random_id" "dummy" { - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/versions.tf b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/versions.tf new file mode 100644 index 0000000000..2c49bb6727 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/dir2/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt index 49c4dc2673..1de0174378 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default`
Show Output @@ -21,18 +22,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` +
* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` - + ```shell + atlantis plan -d dir1 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -54,18 +58,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` - + ```shell + atlantis plan -d dir2 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -73,7 +80,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy1.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy1.txt index 45b6c1ed55..8d98fee1d7 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy1.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy1.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` \ No newline at end of file + ```shell + atlantis plan -d dir1 + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy2.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy2.txt index 7a28ec5e85..e6bef5251a 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy2.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-import-dummy2.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` \ No newline at end of file + ```shell + atlantis plan -d dir2 + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt index 49c4dc2673..1de0174378 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan-again.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default`
Show Output @@ -21,18 +22,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` +
* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` - + ```shell + atlantis plan -d dir1 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -54,18 +58,21 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` - + ```shell + atlantis plan -d dir2 + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -73,7 +80,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan.txt index fb3cfdbbd7..d74495004a 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-plan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` ```diff @@ -14,10 +15,14 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir1` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir1 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` + ```shell + atlantis plan -d dir1 + ``` --- ### 2. dir: `dir2` workspace: `default` @@ -31,17 +36,25 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d dir2` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d dir2 + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` + ```shell + atlantis plan -d dir2 + ``` --- ### Plan Summary 2 projects, 0 with changes, 2 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-state-rm-multiple-projects.txt b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-state-rm-multiple-projects.txt index 3c8e0eb0bb..973455d73c 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-state-rm-multiple-projects.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-multiple-project/exp-output-state-rm-multiple-projects.txt @@ -2,6 +2,7 @@ Ran State for 2 projects: 1. dir: `dir1` workspace: `default` 1. dir: `dir2` workspace: `default` +--- ### 1. dir: `dir1` workspace: `default` ```diff @@ -12,7 +13,9 @@ Successfully removed 1 resource instance(s). :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir1` + ```shell + atlantis plan -d dir1 + ``` --- ### 2. dir: `dir2` workspace: `default` @@ -24,6 +27,8 @@ Successfully removed 1 resource instance(s). :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d dir2` + ```shell + atlantis plan -d dir2 + ``` --- \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt index 077f989d9a..3728b1b223 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-autoplan.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.for_each["default"] will be created @@ -28,7 +27,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.simple will be created @@ -39,22 +37,29 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 3 to add, 0 to change, 0 to destroy. ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` - + ```shell + atlantis plan -d . + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-count.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-count.txt index d7957913db..32680f595f 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-count.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-count.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-foreach.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-foreach.txt index 284c8e2457..982e937496 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-foreach.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-foreach.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-simple.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-simple.txt index 1f17baa2d7..be74444839 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-simple.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-import-simple.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt index edb4c17579..288ee1df89 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan-again.txt @@ -17,7 +17,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.for_each["overridden"] will be created @@ -28,7 +27,6 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } # random_id.simple will be created @@ -39,22 +37,29 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 3 to add, 0 to change, 0 to destroy. ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d . -- -var var=overridden` - + ```shell + atlantis plan -d . -- -var var=overridden + ``` Plan: 3 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan.txt index 46a378158b..379d9e8ce7 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-plan.txt @@ -8,13 +8,21 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d .` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d . + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d . -- -var var=overridden` + ```shell + atlantis plan -d . -- -var var=overridden + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-foreach.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-foreach.txt index 264b5f2881..a6f0f97cce 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-foreach.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-foreach.txt @@ -8,4 +8,6 @@ Successfully removed 1 resource instance(s). :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-multiple.txt b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-multiple.txt index a0d1b54717..0848fc65e8 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-multiple.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/exp-output-state-rm-multiple.txt @@ -9,4 +9,6 @@ Successfully removed 2 resource instance(s). :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -d .` \ No newline at end of file + ```shell + atlantis plan -d . + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/main.tf b/server/controllers/events/testdata/test-repos/state-rm-single-project/main.tf index d434ac8645..05e52a00b2 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-single-project/main.tf +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/main.tf @@ -1,17 +1,14 @@ resource "random_id" "simple" { - keepers = {} byte_length = 1 } resource "random_id" "for_each" { for_each = toset([var.var]) - keepers = {} byte_length = 1 } resource "random_id" "count" { count = 1 - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-single-project/versions.tf b/server/controllers/events/testdata/test-repos/state-rm-single-project/versions.tf new file mode 100644 index 0000000000..2c49bb6727 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/state-rm-single-project/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/main.tf b/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/main.tf index 353cb66e31..3056320d04 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/main.tf +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/main.tf @@ -1,7 +1,6 @@ resource "random_id" "dummy1" { count = terraform.workspace == "ops" ? 1 : 0 - keepers = {} byte_length = 1 } diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/versions.tf b/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/versions.tf new file mode 100644 index 0000000000..2c49bb6727 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/dir1/versions.tf @@ -0,0 +1,9 @@ +provider "random" {} +terraform { + required_providers { + random = { + source = "hashicorp/random" + version = "3.6.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-import-dummy1.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-import-dummy1.txt index a6a1dbbfaa..b81ff32704 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-import-dummy1.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-import-dummy1.txt @@ -15,4 +15,6 @@ your Terraform state and will henceforth be managed by Terraform. :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` \ No newline at end of file + ```shell + atlantis plan -p dir1-ops + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt index b24ee90b20..632b3cf24c 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan-again.txt @@ -17,22 +17,29 @@ Terraform will perform the following actions: + dec = (known after apply) + hex = (known after apply) + id = (known after apply) - + keepers = {} } Plan: 1 to add, 0 to change, 0 to destroy. ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p dir1-ops` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p dir1-ops + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` - + ```shell + atlantis plan -p dir1-ops + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan.txt index 4c73caa512..3beeb14cab 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-plan.txt @@ -10,13 +10,21 @@ and found no differences, so no changes are needed. ``` * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p dir1-ops` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p dir1-ops + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` + ```shell + atlantis plan -p dir1-ops + ``` --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-state-rm-dummy1.txt b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-state-rm-dummy1.txt index 5aa99db217..8c63577a49 100644 --- a/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-state-rm-dummy1.txt +++ b/server/controllers/events/testdata/test-repos/state-rm-workspace/exp-output-state-rm-dummy1.txt @@ -8,4 +8,6 @@ Successfully removed 1 resource instance(s). :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `atlantis plan -p dir1-ops` \ No newline at end of file + ```shell + atlantis plan -p dir1-ops + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt index 20be38a244..cf3378bc59 100644 --- a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt +++ b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-default.txt @@ -20,17 +20,25 @@ Changes to Outputs: + var = "default" + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p default` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p default + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p default` - + ```shell + atlantis plan -p default + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt index e34c9bc2dd..efad85de0e 100644 --- a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt +++ b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/exp-output-plan-staging.txt @@ -20,17 +20,25 @@ Changes to Outputs: + var = "staging" + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p staging` - + ```shell + atlantis plan -p staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/versions.tf b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/tfvars-yaml-no-autoplan/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml/exp-output-autoplan.txt b/server/controllers/events/testdata/test-repos/tfvars-yaml/exp-output-autoplan.txt index 82ce193d9f..75c4320f96 100644 --- a/server/controllers/events/testdata/test-repos/tfvars-yaml/exp-output-autoplan.txt +++ b/server/controllers/events/testdata/test-repos/tfvars-yaml/exp-output-autoplan.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. project: `default` dir: `.` workspace: `default` 1. project: `staging` dir: `.` workspace: `default` +--- ### 1. project: `default` dir: `.` workspace: `default`
Show Output @@ -26,13 +27,17 @@ Changes to Outputs: workspace=default ``` +
* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p default` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p default + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p default` - + ```shell + atlantis plan -p default + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -57,13 +62,17 @@ Changes to Outputs: + var = "staging" + workspace = "default" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -p staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -p staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -p staging` - + ```shell + atlantis plan -p staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -71,7 +80,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/tfvars-yaml/versions.tf b/server/controllers/events/testdata/test-repos/tfvars-yaml/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/tfvars-yaml/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt index cd4e8e0b95..986241f599 100644 --- a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt +++ b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-production.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `production` workspace: `production` 1. dir: `staging` workspace: `staging` +--- ### 1. dir: `production` workspace: `production`
Show Output @@ -23,13 +24,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "production" ``` +
* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production -w production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d production -w production + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d production -w production` - + ```shell + atlantis plan -d production -w production + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -53,13 +58,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "staging" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d staging -w staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging -w staging` - + ```shell + atlantis plan -d staging -w staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -67,7 +76,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt index cd4e8e0b95..986241f599 100644 --- a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt +++ b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/exp-output-autoplan-staging.txt @@ -2,6 +2,7 @@ Ran Plan for 2 projects: 1. dir: `production` workspace: `production` 1. dir: `staging` workspace: `staging` +--- ### 1. dir: `production` workspace: `production`
Show Output @@ -23,13 +24,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "production" ``` +
* :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d production -w production` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d production -w production + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d production -w production` - + ```shell + atlantis plan -d production -w production + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -53,13 +58,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. Changes to Outputs: + workspace = "staging" ``` + * :arrow_forward: To **apply** this plan, comment: - * `atlantis apply -d staging -w staging` -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + ```shell + atlantis apply -d staging -w staging + ``` +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * `atlantis plan -d staging -w staging` - + ```shell + atlantis plan -d staging -w staging + ``` Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -67,7 +76,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `atlantis apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `atlantis unlock` \ No newline at end of file +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + ```shell + atlantis apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + ```shell + atlantis unlock + ``` \ No newline at end of file diff --git a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/production/versions.tf b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/production/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/production/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/staging/versions.tf b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/staging/versions.tf new file mode 100644 index 0000000000..96a68f8056 --- /dev/null +++ b/server/controllers/events/testdata/test-repos/workspace-parallel-yaml/staging/versions.tf @@ -0,0 +1,8 @@ +terraform { + required_providers { + null = { + source = "hashicorp/null" + version = "= 3.2.2" + } + } +} diff --git a/server/controllers/github_app_controller.go b/server/controllers/github_app_controller.go index 5ac08d00fa..823c82928e 100644 --- a/server/controllers/github_app_controller.go +++ b/server/controllers/github_app_controller.go @@ -6,7 +6,7 @@ import ( "net/http" "net/url" - "github.com/runatlantis/atlantis/server/controllers/templates" + "github.com/runatlantis/atlantis/server/controllers/web_templates" "github.com/runatlantis/atlantis/server/events/vcs" "github.com/runatlantis/atlantis/server/logging" ) @@ -70,7 +70,7 @@ func (g *GithubAppController) ExchangeCode(w http.ResponseWriter, r *http.Reques g.Logger.Debug("Found credentials for GitHub app %q with id %d", app.Name, app.ID) - err = templates.GithubAppSetupTemplate.Execute(w, templates.GithubSetupData{ + err = web_templates.GithubAppSetupTemplate.Execute(w, web_templates.GithubSetupData{ Target: "", Manifest: "", ID: app.ID, @@ -142,7 +142,7 @@ func (g *GithubAppController) New(w http.ResponseWriter, _ *http.Request) { return } - err = templates.GithubAppSetupTemplate.Execute(w, templates.GithubSetupData{ + err = web_templates.GithubAppSetupTemplate.Execute(w, web_templates.GithubSetupData{ Target: url.String(), Manifest: string(jsonManifest), }) diff --git a/server/controllers/jobs_controller.go b/server/controllers/jobs_controller.go index 0363977944..bb38a05e44 100644 --- a/server/controllers/jobs_controller.go +++ b/server/controllers/jobs_controller.go @@ -6,7 +6,7 @@ import ( "net/url" "github.com/gorilla/mux" - "github.com/runatlantis/atlantis/server/controllers/templates" + "github.com/runatlantis/atlantis/server/controllers/web_templates" "github.com/runatlantis/atlantis/server/controllers/websocket" "github.com/runatlantis/atlantis/server/core/locking" "github.com/runatlantis/atlantis/server/logging" @@ -29,8 +29,8 @@ type JobsController struct { AtlantisVersion string AtlantisURL *url.URL Logger logging.SimpleLogging - ProjectJobsTemplate templates.TemplateWriter - ProjectJobsErrorTemplate templates.TemplateWriter + ProjectJobsTemplate web_templates.TemplateWriter + ProjectJobsErrorTemplate web_templates.TemplateWriter Backend locking.Backend WsMux *websocket.Multiplexor KeyGenerator JobIDKeyGenerator @@ -45,7 +45,7 @@ func (j *JobsController) getProjectJobs(w http.ResponseWriter, r *http.Request) return err } - viewData := templates.ProjectJobData{ + viewData := web_templates.ProjectJobData{ AtlantisVersion: j.AtlantisVersion, ProjectPath: jobID, CleanedBasePath: j.AtlantisURL.Path, diff --git a/server/controllers/locks_controller.go b/server/controllers/locks_controller.go index bab7fad27a..27b330c8b3 100644 --- a/server/controllers/locks_controller.go +++ b/server/controllers/locks_controller.go @@ -5,7 +5,7 @@ import ( "net/http" "net/url" - "github.com/runatlantis/atlantis/server/controllers/templates" + "github.com/runatlantis/atlantis/server/controllers/web_templates" "github.com/gorilla/mux" "github.com/runatlantis/atlantis/server/core/locking" @@ -23,7 +23,7 @@ type LocksController struct { Logger logging.SimpleLogging ApplyLocker locking.ApplyLocker VCSClient vcs.Client - LockDetailTemplate templates.TemplateWriter + LockDetailTemplate web_templates.TemplateWriter WorkingDir events.WorkingDir WorkingDirLocker events.WorkingDirLocker Backend locking.Backend @@ -73,12 +73,12 @@ func (l *LocksController) GetLock(w http.ResponseWriter, r *http.Request) { return } if lock == nil { - l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id %q", idUnencoded) + l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id '%s'", idUnencoded) return } owner, repo := models.SplitRepoFullName(lock.Project.RepoFullName) - viewData := templates.LockDetailData{ + viewData := web_templates.LockDetailData{ LockKeyEncoded: id, LockKey: idUnencoded, PullRequestLink: lock.Pull.URL, @@ -107,18 +107,18 @@ func (l *LocksController) DeleteLock(w http.ResponseWriter, r *http.Request) { idUnencoded, err := url.PathUnescape(id) if err != nil { - l.respond(w, logging.Warn, http.StatusBadRequest, "Invalid lock id %q. Failed with error: %s", id, err) + l.respond(w, logging.Warn, http.StatusBadRequest, "Invalid lock id '%s'. Failed with error: '%s'", id, err) return } - lock, err := l.DeleteLockCommand.DeleteLock(idUnencoded) + lock, err := l.DeleteLockCommand.DeleteLock(l.Logger, idUnencoded) if err != nil { - l.respond(w, logging.Error, http.StatusInternalServerError, "deleting lock failed with: %s", err) + l.respond(w, logging.Error, http.StatusInternalServerError, "deleting lock failed with: '%s'", err) return } if lock == nil { - l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id %q", idUnencoded) + l.respond(w, logging.Info, http.StatusNotFound, "No lock found at id '%s'", idUnencoded) return } @@ -139,7 +139,7 @@ func (l *LocksController) DeleteLock(w http.ResponseWriter, r *http.Request) { } else { l.Logger.Debug("skipping commenting on pull request and deleting workspace because BaseRepo field is empty") } - l.respond(w, logging.Info, http.StatusOK, "Deleted lock id %q", id) + l.respond(w, logging.Info, http.StatusOK, "Deleted lock id '%s'", id) } // respond is a helper function to respond and log the response. lvl is the log diff --git a/server/controllers/locks_controller_test.go b/server/controllers/locks_controller_test.go index 0f80e7c1f7..d878b34e33 100644 --- a/server/controllers/locks_controller_test.go +++ b/server/controllers/locks_controller_test.go @@ -11,8 +11,8 @@ import ( "time" "github.com/runatlantis/atlantis/server/controllers" - "github.com/runatlantis/atlantis/server/controllers/templates" - tMocks "github.com/runatlantis/atlantis/server/controllers/templates/mocks" + "github.com/runatlantis/atlantis/server/controllers/web_templates" + tMocks "github.com/runatlantis/atlantis/server/controllers/web_templates/mocks" "github.com/runatlantis/atlantis/server/core/db" "github.com/runatlantis/atlantis/server/core/locking" @@ -159,7 +159,7 @@ func TestGetLock_None(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.GetLock(w, req) - ResponseContains(t, w, http.StatusNotFound, "No lock found at id \"id\"") + ResponseContains(t, w, http.StatusNotFound, "No lock found at id 'id'") } func TestGetLock_Success(t *testing.T) { @@ -185,7 +185,7 @@ func TestGetLock_Success(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.GetLock(w, req) - tmpl.VerifyWasCalledOnce().Execute(w, templates.LockDetailData{ + tmpl.VerifyWasCalledOnce().Execute(w, web_templates.LockDetailData{ LockKeyEncoded: "id", LockKey: "id", RepoOwner: "owner", @@ -215,14 +215,14 @@ func TestDeleteLock_InvalidLockID(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "%A@"}) w := httptest.NewRecorder() lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusBadRequest, "Invalid lock id \"%A@\"") + ResponseContains(t, w, http.StatusBadRequest, "Invalid lock id '%A@'") } func TestDeleteLock_LockerErr(t *testing.T) { t.Log("If there is an error retrieving the lock, a 500 is returned") RegisterMockTestingT(t) dlc := mocks2.NewMockDeleteLockCommand() - When(dlc.DeleteLock("id")).ThenReturn(nil, errors.New("err")) + When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(nil, errors.New("err")) lc := controllers.LocksController{ DeleteLockCommand: dlc, Logger: logging.NewNoopLogger(t), @@ -238,7 +238,7 @@ func TestDeleteLock_None(t *testing.T) { t.Log("If there is no lock at that ID we get a 404") RegisterMockTestingT(t) dlc := mocks2.NewMockDeleteLockCommand() - When(dlc.DeleteLock("id")).ThenReturn(nil, nil) + When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(nil, nil) lc := controllers.LocksController{ DeleteLockCommand: dlc, Logger: logging.NewNoopLogger(t), @@ -247,7 +247,7 @@ func TestDeleteLock_None(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusNotFound, "No lock found at id \"id\"") + ResponseContains(t, w, http.StatusNotFound, "No lock found at id 'id'") } func TestDeleteLock_OldFormat(t *testing.T) { @@ -255,7 +255,7 @@ func TestDeleteLock_OldFormat(t *testing.T) { RegisterMockTestingT(t) cp := vcsmocks.NewMockClient() dlc := mocks2.NewMockDeleteLockCommand() - When(dlc.DeleteLock("id")).ThenReturn(&models.ProjectLock{}, nil) + When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(&models.ProjectLock{}, nil) lc := controllers.LocksController{ DeleteLockCommand: dlc, Logger: logging.NewNoopLogger(t), @@ -265,7 +265,7 @@ func TestDeleteLock_OldFormat(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"") + ResponseContains(t, w, http.StatusOK, "Deleted lock id 'id'") cp.VerifyWasCalled(Never()).CreateComment(Any[logging.SimpleLogging](), Any[models.Repo](), Any[int](), Any[string](), Any[string]()) } @@ -284,7 +284,7 @@ func TestDeleteLock_UpdateProjectStatus(t *testing.T) { pull := models.PullRequest{ BaseRepo: models.Repo{FullName: repoName}, } - When(l.DeleteLock("id")).ThenReturn(&models.ProjectLock{ + When(l.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(&models.ProjectLock{ Pull: pull, Workspace: workspaceName, Project: models.Project{ @@ -321,7 +321,7 @@ func TestDeleteLock_UpdateProjectStatus(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"") + ResponseContains(t, w, http.StatusOK, "Deleted lock id 'id'") status, err := backend.GetPullStatus(pull) Ok(t, err) Assert(t, status.Projects != nil, "status projects was nil") @@ -338,7 +338,7 @@ func TestDeleteLock_CommentFailed(t *testing.T) { t.Log("If the commenting fails we still return success") RegisterMockTestingT(t) dlc := mocks2.NewMockDeleteLockCommand() - When(dlc.DeleteLock("id")).ThenReturn(&models.ProjectLock{ + When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(&models.ProjectLock{ Pull: models.PullRequest{ BaseRepo: models.Repo{FullName: "owner/repo"}, }, @@ -363,7 +363,7 @@ func TestDeleteLock_CommentFailed(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"") + ResponseContains(t, w, http.StatusOK, "Deleted lock id 'id'") } func TestDeleteLock_CommentSuccess(t *testing.T) { @@ -380,7 +380,7 @@ func TestDeleteLock_CommentSuccess(t *testing.T) { pull := models.PullRequest{ BaseRepo: models.Repo{FullName: "owner/repo"}, } - When(dlc.DeleteLock("id")).ThenReturn(&models.ProjectLock{ + When(dlc.DeleteLock(Any[logging.SimpleLogging](), Eq("id"))).ThenReturn(&models.ProjectLock{ Pull: pull, Workspace: "workspace", Project: models.Project{ @@ -400,7 +400,7 @@ func TestDeleteLock_CommentSuccess(t *testing.T) { req = mux.SetURLVars(req, map[string]string{"id": "id"}) w := httptest.NewRecorder() lc.DeleteLock(w, req) - ResponseContains(t, w, http.StatusOK, "Deleted lock id \"id\"") + ResponseContains(t, w, http.StatusOK, "Deleted lock id 'id'") cp.VerifyWasCalled(Once()).CreateComment(Any[logging.SimpleLogging](), Eq(pull.BaseRepo), Eq(pull.Num), Eq("**Warning**: The plan for dir: `path` workspace: `workspace` was **discarded** via the Atlantis UI.\n\n"+ "To `apply` this plan you must run `plan` again."), Eq("")) diff --git a/server/controllers/templates/web_templates.go b/server/controllers/templates/web_templates.go deleted file mode 100644 index 01bbc2faac..0000000000 --- a/server/controllers/templates/web_templates.go +++ /dev/null @@ -1,695 +0,0 @@ -// Copyright 2017 HootSuite Media Inc. -// -// Licensed under the Apache License, Version 2.0 (the License); -// you may not use this file except in compliance with the License. -// You may obtain a copy of the License at -// http://www.apache.org/licenses/LICENSE-2.0 -// Unless required by applicable law or agreed to in writing, software -// distributed under the License is distributed on an AS IS BASIS, -// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. -// See the License for the specific language governing permissions and -// limitations under the License. -// Modified hereafter by contributors to runatlantis/atlantis. - -package templates - -import ( - "html/template" - "io" - "time" - - "github.com/runatlantis/atlantis/server/jobs" -) - -//go:generate pegomock generate --package mocks -o mocks/mock_template_writer.go TemplateWriter - -// TemplateWriter is an interface over html/template that's used to enable -// mocking. -type TemplateWriter interface { - // Execute applies a parsed template to the specified data object, - // writing the output to wr. - Execute(wr io.Writer, data interface{}) error -} - -// LockIndexData holds the fields needed to display the index view for locks. -type LockIndexData struct { - LockPath string - RepoFullName string - PullNum int - Path string - Workspace string - LockedBy string - Time time.Time - TimeFormatted string -} - -// ApplyLockData holds the fields to display in the index view -type ApplyLockData struct { - Locked bool - GlobalApplyLockEnabled bool - Time time.Time - TimeFormatted string -} - -// IndexData holds the data for rendering the index page -type IndexData struct { - Locks []LockIndexData - PullToJobMapping []jobs.PullInfoWithJobIDs - - ApplyLock ApplyLockData - AtlantisVersion string - // CleanedBasePath is the path Atlantis is accessible at externally. If - // not using a path-based proxy, this will be an empty string. Never ends - // in a '/' (hence "cleaned"). - CleanedBasePath string -} - -var IndexTemplate = template.Must(template.New("index.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
-
- -

atlantis

-

Plan discarded and unlocked!

-
-
- {{ if .ApplyLock.GlobalApplyLockEnabled }} - {{ if .ApplyLock.Locked }} -
-
Apply commands are disabled globally
-
Lock Status: Active
-
Active Since: {{ .ApplyLock.TimeFormatted }}
- Enable Apply Commands -
- {{ else }} -
-
Apply commands are enabled
- Disable Apply Commands -
- {{ end }} - {{ end }} -
-
-
-
-
-

Locks

- {{ $basePath := .CleanedBasePath }} - {{ if .Locks }} -
-
- Repository - Project - Workspace - Locked By - Date/Time - Status -
- {{ range .Locks }} - - {{ end }} -
- {{ else }} -

No locks found.

- {{ end }} -
-
-
-
-
-

Jobs

- {{ if .PullToJobMapping }} -
-
- Repository - Project - Workspace - Date/Time - Step - Description -
- {{ range .PullToJobMapping }} -
- {{ .Pull.RepoFullName }} #{{ .Pull.PullNum }} - {{ if .Pull.Path }}{{ .Pull.Path }}{{ end }} - {{ if .Pull.Workspace }}{{ .Pull.Workspace }}{{ end }} - - {{ range .JobIDInfos }} -
{{ .TimeFormatted }}
- {{ end }} -
- - {{ range .JobIDInfos }} - - {{ end }} - - - {{ range .JobIDInfos }} -
{{ .JobDescription }}
- {{ end }} -
-
- {{ end }} -
- {{ else }} -

No jobs found.

- {{ end }} -
- - -
-
-{{ .AtlantisVersion }} -
- - - -`)) - -// LockDetailData holds the fields needed to display the lock detail view. -type LockDetailData struct { - LockKeyEncoded string - LockKey string - RepoOwner string - RepoName string - PullRequestLink string - LockedBy string - Workspace string - AtlantisVersion string - // CleanedBasePath is the path Atlantis is accessible at externally. If - // not using a path-based proxy, this will be an empty string. Never ends - // in a '/' (hence "cleaned"). - CleanedBasePath string -} - -var LockTemplate = template.Must(template.New("lock.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - -
-
- -

atlantis

-

{{.LockKey}} Locked

-
- -
-
-
-
Repo Owner:
{{.RepoOwner}}
-
Repo Name:
{{.RepoName}}
-
Pull Request Link:
-
Locked By:
{{.LockedBy}}
-
Workspace:
{{.Workspace}}
-
-
- Discard Plan & Unlock -
-
- -
-v{{ .AtlantisVersion }} -
- - - -`)) - -// ProjectJobData holds the data needed to stream the current PR information -type ProjectJobData struct { - AtlantisVersion string - ProjectPath string - CleanedBasePath string -} - -var ProjectJobsTemplate = template.Must(template.New("blank.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
- -

atlantis

-

-
-
-
-
- -
Initializing... -
- - - - - - - - - - - -`)) - -type ProjectJobsError struct { - AtlantisVersion string - ProjectPath string - CleanedBasePath string -} - -var ProjectJobsErrorTemplate = template.Must(template.New("blank.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
-
- -

atlantis

-

-
-
-
-
-
-
-
-
-
- - - - - - - - - -`)) - -// GithubSetupData holds the data for rendering the github app setup page -type GithubSetupData struct { - Target string - Manifest string - ID int64 - Key string - WebhookSecret string - URL string - CleanedBasePath string -} - -var GithubAppSetupTemplate = template.Must(template.New("github-app.html.tmpl").Parse(` - - - - - atlantis - - - - - - - - - - - -
-
- -

atlantis

- -

- {{ if .Target }} - Create a github app - {{ else }} - Github app created successfully! - {{ end }} -

-
-
- {{ if .Target }} -
- - -
- {{ else }} -

Visit {{ .URL }}/installations/new to install the app for your user or organization, then update the following values in your config and restart Atlantis:

- -
    -
  • gh-app-id:
    {{ .ID }}
  • -
  • gh-app-key-file:
    {{ .Key }}
  • -
  • gh-webhook-secret:
    {{ .WebhookSecret }}
  • -
- {{ end }} -
-
- - -`)) diff --git a/server/controllers/templates/mocks/mock_template_writer.go b/server/controllers/web_templates/mocks/mock_template_writer.go similarity index 97% rename from server/controllers/templates/mocks/mock_template_writer.go rename to server/controllers/web_templates/mocks/mock_template_writer.go index e3fafa580c..5d3e33a2ef 100644 --- a/server/controllers/templates/mocks/mock_template_writer.go +++ b/server/controllers/web_templates/mocks/mock_template_writer.go @@ -1,5 +1,5 @@ // Code generated by pegomock. DO NOT EDIT. -// Source: github.com/runatlantis/atlantis/server/controllers/templates (interfaces: TemplateWriter) +// Source: github.com/runatlantis/atlantis/server/controllers/web_templates (interfaces: TemplateWriter) package mocks diff --git a/server/controllers/web_templates/templates/github-app.html.tmpl b/server/controllers/web_templates/templates/github-app.html.tmpl new file mode 100644 index 0000000000..34ce01550d --- /dev/null +++ b/server/controllers/web_templates/templates/github-app.html.tmpl @@ -0,0 +1,81 @@ + + + + + atlantis + + + + + + + + + + + +
+
+ +

atlantis

+ +

+ {{ if .Target }} + Create a github app + {{ else }} + Github app created successfully! + {{ end }} +

+
+
+ {{ if .Target }} +
+ + +
+ {{ else }} +

Visit {{ .URL }}/installations/new to install the app for your user or organization, then update the following values in your config and restart Atlantis:

+ +
    +
  • gh-app-id:
    {{ .ID }}
  • +
  • gh-app-key-file:
    {{ .Key }}
  • +
  • gh-webhook-secret:
    {{ .WebhookSecret }}
  • +
+ {{ end }} +
+
+ + diff --git a/server/controllers/web_templates/templates/index.html.tmpl b/server/controllers/web_templates/templates/index.html.tmpl new file mode 100644 index 0000000000..b9021f9b61 --- /dev/null +++ b/server/controllers/web_templates/templates/index.html.tmpl @@ -0,0 +1,243 @@ + + + + + atlantis + + + + + + + + + + + +
+
+ +

atlantis

+

Plan discarded and unlocked!

+
+
+ {{ if .ApplyLock.GlobalApplyLockEnabled }} + {{ if .ApplyLock.Locked }} +
+
Apply commands are disabled globally
+
Lock Status: Active
+
Active Since: {{ .ApplyLock.TimeFormatted }}
+ Enable Apply Commands +
+ {{ else }} +
+
Apply commands are enabled
+ Disable Apply Commands +
+ {{ end }} + {{ end }} +
+
+
+
+
+

Locks

+ {{ $basePath := .CleanedBasePath }} + {{ if .Locks }} +
+
+ Repository + Project + Workspace + Locked By + Date/Time + Status +
+ {{ range .Locks }} + + {{ end }} +
+ {{ else }} +

No locks found.

+ {{ end }} +
+
+
+
+
+

Jobs

+ {{ if .PullToJobMapping }} +
+
+ Repository + Project + Workspace + Date/Time + Step + Description +
+ {{ range .PullToJobMapping }} +
+ {{ .Pull.RepoFullName }} #{{ .Pull.PullNum }} + {{ if .Pull.Path }}{{ .Pull.Path }}{{ end }} + {{ if .Pull.Workspace }}{{ .Pull.Workspace }}{{ end }} + + {{ range .JobIDInfos }} +
{{ .TimeFormatted }}
+ {{ end }} +
+ + {{ range .JobIDInfos }} + + {{ end }} + + + {{ range .JobIDInfos }} +
{{ .JobDescription }}
+ {{ end }} +
+
+ {{ end }} +
+ {{ else }} +

No jobs found.

+ {{ end }} +
+ + +
+
+{{ .AtlantisVersion }} +
+ + + diff --git a/server/controllers/web_templates/templates/lock.html.tmpl b/server/controllers/web_templates/templates/lock.html.tmpl new file mode 100644 index 0000000000..56bf25a06b --- /dev/null +++ b/server/controllers/web_templates/templates/lock.html.tmpl @@ -0,0 +1,97 @@ + + + + + atlantis + + + + + + + + + + +
+
+ +

atlantis

+

{{.LockKey}} Locked

+
+ +
+
+
+
Repo Owner:
{{.RepoOwner}}
+
Repo Name:
{{.RepoName}}
+
Pull Request Link:
+
Locked By:
{{.LockedBy}}
+
Workspace:
{{.Workspace}}
+
+
+ Discard Plan & Unlock +
+
+ +
+v{{ .AtlantisVersion }} +
+ + + \ No newline at end of file diff --git a/server/controllers/web_templates/templates/project-jobs-error.html.tmpl b/server/controllers/web_templates/templates/project-jobs-error.html.tmpl new file mode 100644 index 0000000000..8eead799b7 --- /dev/null +++ b/server/controllers/web_templates/templates/project-jobs-error.html.tmpl @@ -0,0 +1,59 @@ + + + + + atlantis + + + + + + + + + + + +
+
+ +

atlantis

+

+
+
+
+
+
+
+
+
+
+ + + + + + + + + diff --git a/server/controllers/web_templates/templates/project-jobs.html.tmpl b/server/controllers/web_templates/templates/project-jobs.html.tmpl new file mode 100644 index 0000000000..aaeb222568 --- /dev/null +++ b/server/controllers/web_templates/templates/project-jobs.html.tmpl @@ -0,0 +1,95 @@ + + + + + atlantis + + + + + + + + + + + +
+ +

atlantis

+

+
+
+
+
+ +
Initializing... +
+ + + + + + + + + + + diff --git a/server/controllers/web_templates/web_templates.go b/server/controllers/web_templates/web_templates.go new file mode 100644 index 0000000000..0794c80fba --- /dev/null +++ b/server/controllers/web_templates/web_templates.go @@ -0,0 +1,131 @@ +// Copyright 2017 HootSuite Media Inc. +// +// Licensed under the Apache License, Version 2.0 (the License); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// http://www.apache.org/licenses/LICENSE-2.0 +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an AS IS BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. +// Modified hereafter by contributors to runatlantis/atlantis. + +package web_templates + +import ( + "embed" + "html/template" + "io" + "time" + + "github.com/Masterminds/sprig/v3" + "github.com/runatlantis/atlantis/server/jobs" +) + +//go:generate pegomock generate --package mocks -o mocks/mock_template_writer.go TemplateWriter + +//go:embed templates/* +var templatesFS embed.FS + +// Read all the templates from the embedded filesystem +var templates, _ = template.New("").Funcs(sprig.TxtFuncMap()).ParseFS(templatesFS, "templates/*.tmpl") + +var templateFileNames = map[string]string{ + "index": "index.html.tmpl", + "lock": "lock.html.tmpl", + "project-jobs": "project-jobs.html.tmpl", + "project-jobs-error": "project-jobs-error.html.tmpl", + "github-app": "github-app.html.tmpl", +} + +// TemplateWriter is an interface over html/template that's used to enable +// mocking. +type TemplateWriter interface { + // Execute applies a parsed template to the specified data object, + // writing the output to wr. + Execute(wr io.Writer, data interface{}) error +} + +// LockIndexData holds the fields needed to display the index view for locks. +type LockIndexData struct { + LockPath string + RepoFullName string + PullNum int + Path string + Workspace string + LockedBy string + Time time.Time + TimeFormatted string +} + +// ApplyLockData holds the fields to display in the index view +type ApplyLockData struct { + Locked bool + GlobalApplyLockEnabled bool + Time time.Time + TimeFormatted string +} + +// IndexData holds the data for rendering the index page +type IndexData struct { + Locks []LockIndexData + PullToJobMapping []jobs.PullInfoWithJobIDs + + ApplyLock ApplyLockData + AtlantisVersion string + // CleanedBasePath is the path Atlantis is accessible at externally. If + // not using a path-based proxy, this will be an empty string. Never ends + // in a '/' (hence "cleaned"). + CleanedBasePath string +} + +var IndexTemplate = templates.Lookup(templateFileNames["index"]) + +// LockDetailData holds the fields needed to display the lock detail view. +type LockDetailData struct { + LockKeyEncoded string + LockKey string + RepoOwner string + RepoName string + PullRequestLink string + LockedBy string + Workspace string + AtlantisVersion string + // CleanedBasePath is the path Atlantis is accessible at externally. If + // not using a path-based proxy, this will be an empty string. Never ends + // in a '/' (hence "cleaned"). + CleanedBasePath string +} + +var LockTemplate = templates.Lookup(templateFileNames["lock"]) + +// ProjectJobData holds the data needed to stream the current PR information +type ProjectJobData struct { + AtlantisVersion string + ProjectPath string + CleanedBasePath string +} + +var ProjectJobsTemplate = templates.Lookup(templateFileNames["project-jobs"]) + +type ProjectJobsError struct { + AtlantisVersion string + ProjectPath string + CleanedBasePath string +} + +var ProjectJobsErrorTemplate = templates.Lookup(templateFileNames["project-jobs-error"]) + +// GithubSetupData holds the data for rendering the github app setup page +type GithubSetupData struct { + Target string + Manifest string + ID int64 + Key string + WebhookSecret string + URL string + CleanedBasePath string +} + +var GithubAppSetupTemplate = templates.Lookup(templateFileNames["github-app"]) diff --git a/server/controllers/templates/web_templates_test.go b/server/controllers/web_templates/web_templates_test.go similarity index 95% rename from server/controllers/templates/web_templates_test.go rename to server/controllers/web_templates/web_templates_test.go index 5b88c3e1d9..0ce6f00a9a 100644 --- a/server/controllers/templates/web_templates_test.go +++ b/server/controllers/web_templates/web_templates_test.go @@ -1,4 +1,4 @@ -package templates +package web_templates import ( "io" @@ -19,13 +19,13 @@ func TestIndexTemplate(t *testing.T) { Path: "path", Workspace: "workspace", Time: time.Now(), - TimeFormatted: "02-01-2006 15:04:05", + TimeFormatted: "2006-01-02 15:04:05", }, }, ApplyLock: ApplyLockData{ Locked: true, Time: time.Now(), - TimeFormatted: "02-01-2006 15:04:05", + TimeFormatted: "2006-01-02 15:04:05", }, AtlantisVersion: "v0.0.0", CleanedBasePath: "/path", diff --git a/server/core/config/parser_validator_test.go b/server/core/config/parser_validator_test.go index 08be7173b8..815adbd338 100644 --- a/server/core/config/parser_validator_test.go +++ b/server/core/config/parser_validator_test.go @@ -1286,7 +1286,7 @@ func TestParseGlobalCfg(t *testing.T) { input: `repos: - id: /.*/ allowed_overrides: [invalid]`, - expErr: "repos: (0: (allowed_overrides: \"invalid\" is not a valid override, only \"plan_requirements\", \"apply_requirements\", \"import_requirements\", \"workflow\", \"delete_source_branch_on_merge\", \"repo_locking\", \"policy_check\", and \"custom_policy_check\" are supported.).).", + expErr: "repos: (0: (allowed_overrides: \"invalid\" is not a valid override, only \"plan_requirements\", \"apply_requirements\", \"import_requirements\", \"workflow\", \"delete_source_branch_on_merge\", \"repo_locking\", \"repo_locks\", \"policy_check\", \"custom_policy_check\", and \"silence_pr_comments\" are supported.).).", }, "invalid plan_requirement": { input: `repos: @@ -1306,8 +1306,14 @@ func TestParseGlobalCfg(t *testing.T) { import_requirements: [invalid]`, expErr: "repos: (0: (import_requirements: \"invalid\" is not a valid import_requirement, only \"approved\", \"mergeable\" and \"undiverged\" are supported.).).", }, + "invalid silence_pr_comments": { + input: `repos: +- id: /.*/ + silence_pr_comments: [invalid]`, + expErr: "server-side repo config 'silence_pr_comments' key value of 'invalid' is not supported, supported values are [plan, apply]", + }, "disable autodiscover": { - input: `repos: + input: `repos: - id: /.*/ autodiscover: mode: disabled`, @@ -1322,6 +1328,22 @@ func TestParseGlobalCfg(t *testing.T) { Workflows: defaultCfg.Workflows, }, }, + "disable repo locks": { + input: `repos: +- id: /.*/ + repo_locks: + mode: disabled`, + exp: valid.GlobalCfg{ + Repos: []valid.Repo{ + defaultCfg.Repos[0], + { + IDRegex: regexp.MustCompile(".*"), + RepoLocks: &valid.RepoLocks{Mode: valid.RepoLocksDisabledMode}, + }, + }, + Workflows: defaultCfg.Workflows, + }, + }, "no workflows key": { input: `repos: []`, exp: defaultCfg, @@ -1399,6 +1421,8 @@ repos: policy_check: true autodiscover: mode: enabled + repo_locks: + mode: on_apply - id: /.*/ branch: /(master|main)/ pre_workflow_hooks: @@ -1408,6 +1432,8 @@ repos: policy_check: false autodiscover: mode: disabled + repo_locks: + mode: disabled workflows: custom1: plan: @@ -1455,6 +1481,7 @@ policies: AllowCustomWorkflows: Bool(true), PolicyCheck: Bool(true), AutoDiscover: &valid.AutoDiscover{Mode: valid.AutoDiscoverEnabledMode}, + RepoLocks: &valid.RepoLocks{Mode: valid.RepoLocksOnApplyMode}, }, { IDRegex: regexp.MustCompile(".*"), @@ -1463,6 +1490,7 @@ policies: PostWorkflowHooks: postWorkflowHooks, PolicyCheck: Bool(false), AutoDiscover: &valid.AutoDiscover{Mode: valid.AutoDiscoverDisabledMode}, + RepoLocks: &valid.RepoLocks{Mode: valid.RepoLocksDisabledMode}, }, }, Workflows: map[string]valid.Workflow{ @@ -1570,7 +1598,7 @@ workflows: AllowedOverrides: []string{}, AllowCustomWorkflows: Bool(false), DeleteSourceBranchOnMerge: Bool(false), - RepoLocking: Bool(true), + RepoLocks: &valid.DefaultRepoLocks, PolicyCheck: Bool(false), CustomPolicyCheck: Bool(false), AutoDiscover: raw.DefaultAutoDiscover(), @@ -1721,6 +1749,9 @@ func TestParserValidator_ParseGlobalCfgJSON(t *testing.T) { "allow_custom_workflows": true, "autodiscover": { "mode": "enabled" + }, + "repo_locks": { + "mode": "on_apply" } }, { @@ -1782,6 +1813,7 @@ func TestParserValidator_ParseGlobalCfgJSON(t *testing.T) { AllowedOverrides: []string{"workflow", "apply_requirements"}, AllowCustomWorkflows: Bool(true), AutoDiscover: &valid.AutoDiscover{Mode: valid.AutoDiscoverEnabledMode}, + RepoLocks: &valid.RepoLocks{Mode: valid.RepoLocksOnApplyMode}, }, { ID: "github.com/owner/repo", @@ -1790,6 +1822,7 @@ func TestParserValidator_ParseGlobalCfgJSON(t *testing.T) { AllowedOverrides: nil, AllowCustomWorkflows: nil, AutoDiscover: nil, + RepoLocks: nil, }, }, Workflows: map[string]valid.Workflow{ diff --git a/server/core/config/raw/global_cfg.go b/server/core/config/raw/global_cfg.go index b795294239..0c9b2c351d 100644 --- a/server/core/config/raw/global_cfg.go +++ b/server/core/config/raw/global_cfg.go @@ -8,6 +8,7 @@ import ( validation "github.com/go-ozzo/ozzo-validation" "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/core/config/valid" + "github.com/runatlantis/atlantis/server/utils" ) // GlobalCfg is the raw schema for server-side repo config. @@ -34,9 +35,11 @@ type Repo struct { AllowCustomWorkflows *bool `yaml:"allow_custom_workflows,omitempty" json:"allow_custom_workflows,omitempty"` DeleteSourceBranchOnMerge *bool `yaml:"delete_source_branch_on_merge,omitempty" json:"delete_source_branch_on_merge,omitempty"` RepoLocking *bool `yaml:"repo_locking,omitempty" json:"repo_locking,omitempty"` + RepoLocks *RepoLocks `yaml:"repo_locks,omitempty" json:"repo_locks,omitempty"` PolicyCheck *bool `yaml:"policy_check,omitempty" json:"policy_check,omitempty"` CustomPolicyCheck *bool `yaml:"custom_policy_check,omitempty" json:"custom_policy_check,omitempty"` AutoDiscover *AutoDiscover `yaml:"autodiscover,omitempty" json:"autodiscover,omitempty"` + SilencePRComments []string `yaml:"silence_pr_comments,omitempty" json:"silence_pr_comments,omitempty"` } func (g GlobalCfg) Validate() error { @@ -93,6 +96,24 @@ func (g GlobalCfg) Validate() error { } } } + + // Validate supported SilencePRComments values. + for _, repo := range g.Repos { + if repo.SilencePRComments == nil { + continue + } + for _, silenceStage := range repo.SilencePRComments { + if !utils.SlicesContains(valid.AllowedSilencePRComments, silenceStage) { + return fmt.Errorf( + "server-side repo config '%s' key value of '%s' is not supported, supported values are [%s]", + valid.SilencePRCommentsKey, + silenceStage, + strings.Join(valid.AllowedSilencePRComments, ", "), + ) + } + } + } + return nil } @@ -194,8 +215,8 @@ func (r Repo) Validate() error { overridesValid := func(value interface{}) error { overrides := value.([]string) for _, o := range overrides { - if o != valid.PlanRequirementsKey && o != valid.ApplyRequirementsKey && o != valid.ImportRequirementsKey && o != valid.WorkflowKey && o != valid.DeleteSourceBranchOnMergeKey && o != valid.RepoLockingKey && o != valid.PolicyCheckKey && o != valid.CustomPolicyCheckKey { - return fmt.Errorf("%q is not a valid override, only %q, %q, %q, %q, %q, %q, %q, and %q are supported", o, valid.PlanRequirementsKey, valid.ApplyRequirementsKey, valid.ImportRequirementsKey, valid.WorkflowKey, valid.DeleteSourceBranchOnMergeKey, valid.RepoLockingKey, valid.PolicyCheckKey, valid.CustomPolicyCheckKey) + if o != valid.PlanRequirementsKey && o != valid.ApplyRequirementsKey && o != valid.ImportRequirementsKey && o != valid.WorkflowKey && o != valid.DeleteSourceBranchOnMergeKey && o != valid.RepoLockingKey && o != valid.RepoLocksKey && o != valid.PolicyCheckKey && o != valid.CustomPolicyCheckKey && o != valid.SilencePRCommentsKey { + return fmt.Errorf("%q is not a valid override, only %q, %q, %q, %q, %q, %q, %q, %q, %q, and %q are supported", o, valid.PlanRequirementsKey, valid.ApplyRequirementsKey, valid.ImportRequirementsKey, valid.WorkflowKey, valid.DeleteSourceBranchOnMergeKey, valid.RepoLockingKey, valid.RepoLocksKey, valid.PolicyCheckKey, valid.CustomPolicyCheckKey, valid.SilencePRCommentsKey) } } return nil @@ -220,6 +241,14 @@ func (r Repo) Validate() error { return nil } + repoLocksValid := func(value interface{}) error { + repoLocks := value.(*RepoLocks) + if repoLocks != nil { + return repoLocks.Validate() + } + return nil + } + return validation.ValidateStruct(&r, validation.Field(&r.ID, validation.Required, validation.By(idValid)), validation.Field(&r.Branch, validation.By(branchValid)), @@ -231,6 +260,7 @@ func (r Repo) Validate() error { validation.Field(&r.Workflow, validation.By(workflowExists)), validation.Field(&r.DeleteSourceBranchOnMerge, validation.By(deleteSourceBranchOnMergeValid)), validation.Field(&r.AutoDiscover, validation.By(autoDiscoverValid)), + validation.Field(&r.RepoLocks, validation.By(repoLocksValid)), ) } @@ -330,6 +360,11 @@ OuterGlobalImportReqs: autoDiscover = r.AutoDiscover.ToValid() } + var repoLocks *valid.RepoLocks + if r.RepoLocks != nil { + repoLocks = r.RepoLocks.ToValid() + } + return valid.Repo{ ID: id, IDRegex: idRegex, @@ -346,8 +381,10 @@ OuterGlobalImportReqs: AllowCustomWorkflows: r.AllowCustomWorkflows, DeleteSourceBranchOnMerge: r.DeleteSourceBranchOnMerge, RepoLocking: r.RepoLocking, + RepoLocks: repoLocks, PolicyCheck: r.PolicyCheck, CustomPolicyCheck: r.CustomPolicyCheck, AutoDiscover: autoDiscover, + SilencePRComments: r.SilencePRComments, } } diff --git a/server/core/config/raw/project.go b/server/core/config/raw/project.go index d73062cef3..fe0e656a8c 100644 --- a/server/core/config/raw/project.go +++ b/server/core/config/raw/project.go @@ -21,22 +21,24 @@ const ( ) type Project struct { - Name *string `yaml:"name,omitempty"` - Branch *string `yaml:"branch,omitempty"` - Dir *string `yaml:"dir,omitempty"` - Workspace *string `yaml:"workspace,omitempty"` - Workflow *string `yaml:"workflow,omitempty"` - TerraformVersion *string `yaml:"terraform_version,omitempty"` - Autoplan *Autoplan `yaml:"autoplan,omitempty"` - PlanRequirements []string `yaml:"plan_requirements,omitempty"` - ApplyRequirements []string `yaml:"apply_requirements,omitempty"` - ImportRequirements []string `yaml:"import_requirements,omitempty"` - DependsOn []string `yaml:"depends_on,omitempty"` - DeleteSourceBranchOnMerge *bool `yaml:"delete_source_branch_on_merge,omitempty"` - RepoLocking *bool `yaml:"repo_locking,omitempty"` - ExecutionOrderGroup *int `yaml:"execution_order_group,omitempty"` - PolicyCheck *bool `yaml:"policy_check,omitempty"` - CustomPolicyCheck *bool `yaml:"custom_policy_check,omitempty"` + Name *string `yaml:"name,omitempty"` + Branch *string `yaml:"branch,omitempty"` + Dir *string `yaml:"dir,omitempty"` + Workspace *string `yaml:"workspace,omitempty"` + Workflow *string `yaml:"workflow,omitempty"` + TerraformVersion *string `yaml:"terraform_version,omitempty"` + Autoplan *Autoplan `yaml:"autoplan,omitempty"` + PlanRequirements []string `yaml:"plan_requirements,omitempty"` + ApplyRequirements []string `yaml:"apply_requirements,omitempty"` + ImportRequirements []string `yaml:"import_requirements,omitempty"` + DependsOn []string `yaml:"depends_on,omitempty"` + DeleteSourceBranchOnMerge *bool `yaml:"delete_source_branch_on_merge,omitempty"` + RepoLocking *bool `yaml:"repo_locking,omitempty"` + RepoLocks *RepoLocks `yaml:"repo_locks,omitempty"` + ExecutionOrderGroup *int `yaml:"execution_order_group,omitempty"` + PolicyCheck *bool `yaml:"policy_check,omitempty"` + CustomPolicyCheck *bool `yaml:"custom_policy_check,omitempty"` + SilencePRComments []string `yaml:"silence_pr_comments,omitempty"` } func (p Project) Validate() error { @@ -139,6 +141,10 @@ func (p Project) ToValid() valid.Project { v.RepoLocking = p.RepoLocking } + if p.RepoLocks != nil { + v.RepoLocks = p.RepoLocks.ToValid() + } + if p.ExecutionOrderGroup != nil { v.ExecutionOrderGroup = *p.ExecutionOrderGroup } @@ -151,6 +157,10 @@ func (p Project) ToValid() valid.Project { v.CustomPolicyCheck = p.CustomPolicyCheck } + if p.SilencePRComments != nil { + v.SilencePRComments = p.SilencePRComments + } + return v } diff --git a/server/core/config/raw/project_test.go b/server/core/config/raw/project_test.go index 72a8dd78d0..3c69177f96 100644 --- a/server/core/config/raw/project_test.go +++ b/server/core/config/raw/project_test.go @@ -331,6 +331,7 @@ func TestProject_Validate(t *testing.T) { func TestProject_ToValid(t *testing.T) { tfVersionPointEleven, _ := version.NewVersion("v0.11.0") + repoLocksOnApply := valid.RepoLocksOnApplyMode cases := []struct { description string input raw.Project @@ -366,6 +367,9 @@ func TestProject_ToValid(t *testing.T) { WhenModified: []string{"hi"}, Enabled: Bool(false), }, + RepoLocks: &raw.RepoLocks{ + Mode: &repoLocksOnApply, + }, ApplyRequirements: []string{"approved"}, Name: String("myname"), ExecutionOrderGroup: Int(10), @@ -379,6 +383,9 @@ func TestProject_ToValid(t *testing.T) { WhenModified: []string{"hi"}, Enabled: false, }, + RepoLocks: &valid.RepoLocks{ + Mode: repoLocksOnApply, + }, ApplyRequirements: []string{"approved"}, Name: String("myname"), ExecutionOrderGroup: 10, diff --git a/server/core/config/raw/repo_cfg.go b/server/core/config/raw/repo_cfg.go index f3a688725d..9aa18c7733 100644 --- a/server/core/config/raw/repo_cfg.go +++ b/server/core/config/raw/repo_cfg.go @@ -27,6 +27,8 @@ type RepoCfg struct { EmojiReaction *string `yaml:"emoji_reaction,omitempty"` AllowedRegexpPrefixes []string `yaml:"allowed_regexp_prefixes,omitempty"` AbortOnExcecutionOrderFail *bool `yaml:"abort_on_execution_order_fail,omitempty"` + RepoLocks *RepoLocks `yaml:"repo_locks,omitempty"` + SilencePRComments []string `yaml:"silence_pr_comments,omitempty"` } func (r RepoCfg) Validate() error { @@ -77,6 +79,10 @@ func (r RepoCfg) ToValid() valid.RepoCfg { autoDiscover = r.AutoDiscover.ToValid() } + var repoLocks *valid.RepoLocks + if r.RepoLocks != nil { + repoLocks = r.RepoLocks.ToValid() + } return valid.RepoCfg{ Version: *r.Version, Projects: validProjects, @@ -90,5 +96,7 @@ func (r RepoCfg) ToValid() valid.RepoCfg { AllowedRegexpPrefixes: r.AllowedRegexpPrefixes, EmojiReaction: emojiReaction, AbortOnExcecutionOrderFail: abortOnExcecutionOrderFail, + RepoLocks: repoLocks, + SilencePRComments: r.SilencePRComments, } } diff --git a/server/core/config/raw/repo_cfg_test.go b/server/core/config/raw/repo_cfg_test.go index 31d01101dd..b3844ee68c 100644 --- a/server/core/config/raw/repo_cfg_test.go +++ b/server/core/config/raw/repo_cfg_test.go @@ -11,6 +11,8 @@ import ( func TestConfig_UnmarshalYAML(t *testing.T) { autoDiscoverEnabled := valid.AutoDiscoverEnabledMode + repoLocksDisabled := valid.RepoLocksDisabledMode + repoLocksOnApply := valid.RepoLocksOnApplyMode cases := []struct { description string input string @@ -130,6 +132,8 @@ autodiscover: mode: enabled parallel_apply: true parallel_plan: false +repo_locks: + mode: on_apply projects: - dir: mydir workspace: myworkspace @@ -139,6 +143,8 @@ projects: enabled: false when_modified: [] apply_requirements: [mergeable] + repo_locks: + mode: disabled workflows: default: plan: @@ -156,6 +162,7 @@ allowed_regexp_prefixes: Automerge: Bool(true), ParallelApply: Bool(true), ParallelPlan: Bool(false), + RepoLocks: &raw.RepoLocks{Mode: &repoLocksOnApply}, Projects: []raw.Project{ { Dir: String("mydir"), @@ -167,6 +174,7 @@ allowed_regexp_prefixes: Enabled: Bool(false), }, ApplyRequirements: []string{"mergeable"}, + RepoLocks: &raw.RepoLocks{Mode: &repoLocksDisabled}, }, }, Workflows: map[string]raw.Workflow{ @@ -236,6 +244,7 @@ func TestConfig_Validate(t *testing.T) { func TestConfig_ToValid(t *testing.T) { autoDiscoverEnabled := valid.AutoDiscoverEnabledMode + repoLocksOnApply := valid.RepoLocksOnApplyMode cases := []struct { description string input raw.RepoCfg @@ -256,12 +265,14 @@ func TestConfig_ToValid(t *testing.T) { AutoDiscover: &raw.AutoDiscover{}, Workflows: map[string]raw.Workflow{}, Projects: []raw.Project{}, + RepoLocks: &raw.RepoLocks{}, }, exp: valid.RepoCfg{ Version: 2, AutoDiscover: raw.DefaultAutoDiscover(), Workflows: map[string]valid.Workflow{}, Projects: nil, + RepoLocks: &valid.DefaultRepoLocks, }, }, { @@ -333,6 +344,30 @@ func TestConfig_ToValid(t *testing.T) { Workflows: map[string]valid.Workflow{}, }, }, + { + description: "repo_locks omitted", + input: raw.RepoCfg{ + Version: Int(2), + }, + exp: valid.RepoCfg{ + Version: 2, + Workflows: map[string]valid.Workflow{}, + }, + }, + { + description: "repo_locks included", + input: raw.RepoCfg{ + Version: Int(2), + RepoLocks: &raw.RepoLocks{Mode: &repoLocksOnApply}, + }, + exp: valid.RepoCfg{ + Version: 2, + RepoLocks: &valid.RepoLocks{ + Mode: valid.RepoLocksOnApplyMode, + }, + Workflows: map[string]valid.Workflow{}, + }, + }, { description: "only plan stage set", input: raw.RepoCfg{ @@ -372,6 +407,9 @@ func TestConfig_ToValid(t *testing.T) { AutoDiscover: &raw.AutoDiscover{ Mode: &autoDiscoverEnabled, }, + RepoLocks: &raw.RepoLocks{ + Mode: &repoLocksOnApply, + }, Workflows: map[string]raw.Workflow{ "myworkflow": { Apply: &raw.Stage{ @@ -424,6 +462,9 @@ func TestConfig_ToValid(t *testing.T) { AutoDiscover: &valid.AutoDiscover{ Mode: valid.AutoDiscoverEnabledMode, }, + RepoLocks: &valid.RepoLocks{ + Mode: valid.RepoLocksOnApplyMode, + }, Workflows: map[string]valid.Workflow{ "myworkflow": { Name: "myworkflow", diff --git a/server/core/config/raw/repo_locks.go b/server/core/config/raw/repo_locks.go new file mode 100644 index 0000000000..60ab8461fa --- /dev/null +++ b/server/core/config/raw/repo_locks.go @@ -0,0 +1,30 @@ +package raw + +import ( + validation "github.com/go-ozzo/ozzo-validation" + "github.com/runatlantis/atlantis/server/core/config/valid" +) + +type RepoLocks struct { + Mode *valid.RepoLocksMode `yaml:"mode,omitempty"` +} + +func (a RepoLocks) ToValid() *valid.RepoLocks { + var v valid.RepoLocks + + if a.Mode != nil { + v.Mode = *a.Mode + } else { + v.Mode = valid.DefaultRepoLocksMode + } + + return &v +} + +func (a RepoLocks) Validate() error { + res := validation.ValidateStruct(&a, + // If a.Mode is nil, this should still pass validation. + validation.Field(&a.Mode, validation.In(valid.RepoLocksDisabledMode, valid.RepoLocksOnPlanMode, valid.RepoLocksOnApplyMode)), + ) + return res +} diff --git a/server/core/config/raw/repo_locks_test.go b/server/core/config/raw/repo_locks_test.go new file mode 100644 index 0000000000..8a8d45a0fe --- /dev/null +++ b/server/core/config/raw/repo_locks_test.go @@ -0,0 +1,128 @@ +package raw_test + +import ( + "testing" + + "github.com/runatlantis/atlantis/server/core/config/raw" + "github.com/runatlantis/atlantis/server/core/config/valid" + . "github.com/runatlantis/atlantis/testing" +) + +func TestRepoLocks_UnmarshalYAML(t *testing.T) { + repoLocksOnPlan := valid.RepoLocksOnPlanMode + cases := []struct { + description string + input string + exp raw.RepoLocks + }{ + { + description: "omit unset fields", + input: "", + exp: raw.RepoLocks{ + Mode: nil, + }, + }, + { + description: "all fields set", + input: ` +mode: on_plan +`, + exp: raw.RepoLocks{ + Mode: &repoLocksOnPlan, + }, + }, + } + + for _, c := range cases { + t.Run(c.description, func(t *testing.T) { + var a raw.RepoLocks + err := unmarshalString(c.input, &a) + Ok(t, err) + Equals(t, c.exp, a) + }) + } +} + +func TestRepoLocks_Validate(t *testing.T) { + repoLocksDisabled := valid.RepoLocksDisabledMode + repoLocksOnPlan := valid.RepoLocksOnPlanMode + repoLocksOnApply := valid.RepoLocksOnApplyMode + randomString := valid.RepoLocksMode("random_string") + cases := []struct { + description string + input raw.RepoLocks + errContains *string + }{ + { + description: "nothing set", + input: raw.RepoLocks{}, + errContains: nil, + }, + { + description: "mode set to disabled", + input: raw.RepoLocks{ + Mode: &repoLocksDisabled, + }, + errContains: nil, + }, + { + description: "mode set to on_plan", + input: raw.RepoLocks{ + Mode: &repoLocksOnPlan, + }, + errContains: nil, + }, + { + description: "mode set to on_apply", + input: raw.RepoLocks{ + Mode: &repoLocksOnApply, + }, + errContains: nil, + }, + { + description: "mode set to random string", + input: raw.RepoLocks{ + Mode: &randomString, + }, + errContains: String("valid value"), + }, + } + for _, c := range cases { + t.Run(c.description, func(t *testing.T) { + if c.errContains == nil { + Ok(t, c.input.Validate()) + } else { + ErrContains(t, *c.errContains, c.input.Validate()) + } + }) + } +} + +func TestRepoLocks_ToValid(t *testing.T) { + repoLocksOnApply := valid.RepoLocksOnApplyMode + cases := []struct { + description string + input raw.RepoLocks + exp *valid.RepoLocks + }{ + { + description: "nothing set", + input: raw.RepoLocks{}, + exp: &valid.DefaultRepoLocks, + }, + { + description: "value set", + input: raw.RepoLocks{ + Mode: &repoLocksOnApply, + }, + exp: &valid.RepoLocks{ + Mode: valid.RepoLocksOnApplyMode, + }, + }, + } + for _, c := range cases { + t.Run(c.description, func(t *testing.T) { + Equals(t, c.exp, c.input.ToValid()) + }) + } +} diff --git a/server/core/config/raw/step.go b/server/core/config/raw/step.go index 8b0e642566..35d9243b02 100644 --- a/server/core/config/raw/step.go +++ b/server/core/config/raw/step.go @@ -42,6 +42,9 @@ const ( // name: test // command: echo 312 // value: value +// - multienv: +// command: envs.sh +// outpiut: hide // - run: // command: my custom command // output: hide @@ -63,8 +66,8 @@ type Step struct { // Key will be set in case #1 and #3 above to the key. In case #2, there // could be multiple keys (since the element is a map) so we don't set Key. Key *string - // EnvOrRun will be set in case #2 above. - EnvOrRun map[string]map[string]string + // CommandMap will be set in case #2 above. + CommandMap map[string]map[string]string // Map will be set in case #3 above. Map map[string]map[string][]string // StringVal will be set in case #4 above. @@ -152,7 +155,7 @@ func (s Step) Validate() error { return nil } - envOrRunStep := func(value interface{}) error { + envOrRunOrMultiEnvStep := func(value interface{}) error { elem := value.(map[string]map[string]string) var keys []string for k := range elem { @@ -198,20 +201,25 @@ func (s Step) Validate() error { return fmt.Errorf("env steps only support one of the %q or %q keys, found both", ValueArgKey, CommandArgKey) } - case RunStepName: + case RunStepName, MultiEnvStepName: argsCopy := make(map[string]string) for k, v := range args { argsCopy[k] = v } args = argsCopy if _, ok := args[CommandArgKey]; !ok { - return fmt.Errorf("run step must have a %q key set", CommandArgKey) + return fmt.Errorf("%q step must have a %q key set", stepName, CommandArgKey) } delete(args, CommandArgKey) if v, ok := args[OutputArgKey]; ok { - if !valid.MatchesAnyPostProcessRunOutputOptions(v) { + if stepName == RunStepName && !valid.MatchesAnyPostProcessRunOutputOptions(v) { return fmt.Errorf("run step %q option must be one of %q", OutputArgKey, strings.Join(valid.PostProcessRunOutputOptions(), ",")) } + + if stepName == MultiEnvStepName && !(v == valid.PostProcessRunOutputShow || v == valid.PostProcessRunOutputHide) { + return fmt.Errorf("multienv step %q option must be %q or %q", OutputArgKey, valid.PostProcessRunOutputShow, valid.PostProcessRunOutputHide) + } + // When output requires regex option if v == valid.PostProcessRunOutputCustomRegex || v == valid.PostProcessRunOutputStripRefreshingWithCustomRegex { if regex, ok := args[OutputRegexFilterKey]; ok { @@ -232,7 +240,7 @@ func (s Step) Validate() error { } // Sort so tests can be deterministic. sort.Strings(argKeys) - return fmt.Errorf("run steps only support keys %q, %q and %q, found extra keys %q", RunStepName, CommandArgKey, OutputArgKey, strings.Join(argKeys, ",")) + return fmt.Errorf("%q steps only support keys %q and %q, found extra keys %q", stepName, CommandArgKey, OutputArgKey, strings.Join(argKeys, ",")) } default: return fmt.Errorf("%q is not a valid step type", stepName) @@ -241,7 +249,7 @@ func (s Step) Validate() error { return nil } - runStep := func(value interface{}) error { + runOrMultiEnvStep := func(value interface{}) error { elem := value.(map[string]string) var keys []string for k := range elem { @@ -255,7 +263,7 @@ func (s Step) Validate() error { len(keys), strings.Join(keys, ",")) } for stepName := range elem { - if stepName != RunStepName && stepName != MultiEnvStepName { + if !(stepName == RunStepName || stepName == MultiEnvStepName) { return fmt.Errorf("%q is not a valid step type", stepName) } } @@ -268,11 +276,11 @@ func (s Step) Validate() error { if len(s.Map) > 0 { return validation.Validate(s.Map, validation.By(extraArgs)) } - if len(s.EnvOrRun) > 0 { - return validation.Validate(s.EnvOrRun, validation.By(envOrRunStep)) + if len(s.CommandMap) > 0 { + return validation.Validate(s.CommandMap, validation.By(envOrRunOrMultiEnvStep)) } if len(s.StringVal) > 0 { - return validation.Validate(s.StringVal, validation.By(runStep)) + return validation.Validate(s.StringVal, validation.By(runOrMultiEnvStep)) } return errors.New("step element is empty") } @@ -286,10 +294,10 @@ func (s Step) ToValid() valid.Step { } // This will trigger in case #2 (see Step docs). - if len(s.EnvOrRun) > 0 { + if len(s.CommandMap) > 0 { // After validation we assume there's only one key and it's a valid // step name so we just use the first one. - for stepName, stepArgs := range s.EnvOrRun { + for stepName, stepArgs := range s.CommandMap { step := valid.Step{ StepName: stepName, EnvVarName: stepArgs[NameArgKey], @@ -374,7 +382,7 @@ func (s *Step) unmarshalGeneric(unmarshal func(interface{}) error) error { var envStep map[string]map[string]string err = unmarshal(&envStep) if err == nil { - s.EnvOrRun = envStep + s.CommandMap = envStep return nil } @@ -397,8 +405,8 @@ func (s Step) marshalGeneric() (interface{}, error) { return s.StringVal, nil } else if len(s.Map) != 0 { return s.Map, nil - } else if len(s.EnvOrRun) != 0 { - return s.EnvOrRun, nil + } else if len(s.CommandMap) != 0 { + return s.CommandMap, nil } else if s.Key != nil { return s.Key, nil } diff --git a/server/core/config/raw/step_test.go b/server/core/config/raw/step_test.go index c0fdff66ff..85434ded04 100644 --- a/server/core/config/raw/step_test.go +++ b/server/core/config/raw/step_test.go @@ -81,7 +81,7 @@ env: value: direct_value name: test`, exp: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "value": "direct_value", "name": "test", @@ -96,7 +96,7 @@ env: command: echo 123 name: test`, exp: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "command": "echo 123", "name": "test", @@ -134,10 +134,10 @@ key: value`, description: "empty", input: "", exp: raw.Step{ - Key: nil, - Map: nil, - StringVal: nil, - EnvOrRun: nil, + Key: nil, + Map: nil, + StringVal: nil, + CommandMap: nil, }, }, @@ -227,7 +227,7 @@ func TestStep_Validate(t *testing.T) { { description: "env", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "name": "test", "command": "echo 123", @@ -283,7 +283,7 @@ func TestStep_Validate(t *testing.T) { { description: "multiple keys in env", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "key1": nil, "key2": nil, }, @@ -312,7 +312,7 @@ func TestStep_Validate(t *testing.T) { { description: "invalid key in env", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "invalid": nil, }, }, @@ -353,7 +353,7 @@ func TestStep_Validate(t *testing.T) { { description: "env step with no name key set", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "value": "value", }, @@ -364,7 +364,7 @@ func TestStep_Validate(t *testing.T) { { description: "env step with invalid key", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "abc": "", "invalid2": "", @@ -376,7 +376,7 @@ func TestStep_Validate(t *testing.T) { { description: "env step with both command and value set", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "name": "name", "command": "command", @@ -454,7 +454,7 @@ func TestStep_ToValid(t *testing.T) { { description: "env step", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: EnvType{ "env": { "name": "test", "command": "echo 123", @@ -561,7 +561,7 @@ func TestStep_ToValid(t *testing.T) { { description: "run step with output", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: RunType{ "run": { "command": "my 'run command'", "output": "hide", @@ -574,10 +574,38 @@ func TestStep_ToValid(t *testing.T) { Output: "hide", }, }, + { + description: "multienv step", + input: raw.Step{ + StringVal: map[string]string{ + "multienv": "envs.sh", + }, + }, + exp: valid.Step{ + StepName: "multienv", + RunCommand: "envs.sh", + }, + }, + { + description: "multienv step with output", + input: raw.Step{ + CommandMap: MultiEnvType{ + "multienv": { + "command": "envs.sh", + "output": "hide", + }, + }, + }, + exp: valid.Step{ + StepName: "multienv", + RunCommand: "envs.sh", + Output: "hide", + }, + }, { description: "run step with regex", input: raw.Step{ - EnvOrRun: EnvOrRunType{ + CommandMap: RunType{ "run": { "command": "my 'run command'", "output": "regex_filter", @@ -601,4 +629,6 @@ func TestStep_ToValid(t *testing.T) { } type MapType map[string]map[string][]string -type EnvOrRunType map[string]map[string]string +type EnvType map[string]map[string]string +type RunType map[string]map[string]string +type MultiEnvType map[string]map[string]string diff --git a/server/core/config/valid/global_cfg.go b/server/core/config/valid/global_cfg.go index a2c84669e4..ac41b3865e 100644 --- a/server/core/config/valid/global_cfg.go +++ b/server/core/config/valid/global_cfg.go @@ -23,9 +23,13 @@ const AllowCustomWorkflowsKey = "allow_custom_workflows" const DefaultWorkflowName = "default" const DeleteSourceBranchOnMergeKey = "delete_source_branch_on_merge" const RepoLockingKey = "repo_locking" +const RepoLocksKey = "repo_locks" const PolicyCheckKey = "policy_check" const CustomPolicyCheckKey = "custom_policy_check" const AutoDiscoverKey = "autodiscover" +const SilencePRCommentsKey = "silence_pr_comments" + +var AllowedSilencePRComments = []string{"plan", "apply"} // DefaultAtlantisFile is the default name of the config file for each repo. const DefaultAtlantisFile = "atlantis.yaml" @@ -80,9 +84,11 @@ type Repo struct { AllowCustomWorkflows *bool DeleteSourceBranchOnMerge *bool RepoLocking *bool + RepoLocks *RepoLocks PolicyCheck *bool CustomPolicyCheck *bool AutoDiscover *AutoDiscover + SilencePRComments []string } type MergedProjectCfg struct { @@ -102,9 +108,10 @@ type MergedProjectCfg struct { PolicySets PolicySets DeleteSourceBranchOnMerge bool ExecutionOrderGroup int - RepoLocking bool + RepoLocks RepoLocks PolicyCheck bool CustomPolicyCheck bool + SilencePRComments []string } // WorkflowHook is a map of custom run commands to run before or after workflows. @@ -207,11 +214,12 @@ func NewGlobalCfgFromArgs(args GlobalCfgArgs) GlobalCfg { allowCustomWorkflows := false deleteSourceBranchOnMerge := false - repoLockingKey := true + repoLocks := DefaultRepoLocks customPolicyCheck := false autoDiscover := AutoDiscover{Mode: AutoDiscoverAutoMode} + var silencePRComments []string if args.AllowAllRepoSettings { - allowedOverrides = []string{PlanRequirementsKey, ApplyRequirementsKey, ImportRequirementsKey, WorkflowKey, DeleteSourceBranchOnMergeKey, RepoLockingKey, PolicyCheckKey} + allowedOverrides = []string{PlanRequirementsKey, ApplyRequirementsKey, ImportRequirementsKey, WorkflowKey, DeleteSourceBranchOnMergeKey, RepoLockingKey, RepoLocksKey, PolicyCheckKey, SilencePRCommentsKey} allowCustomWorkflows = true } @@ -231,10 +239,11 @@ func NewGlobalCfgFromArgs(args GlobalCfgArgs) GlobalCfg { AllowedOverrides: allowedOverrides, AllowCustomWorkflows: &allowCustomWorkflows, DeleteSourceBranchOnMerge: &deleteSourceBranchOnMerge, - RepoLocking: &repoLockingKey, + RepoLocks: &repoLocks, PolicyCheck: &policyCheck, CustomPolicyCheck: &customPolicyCheck, AutoDiscover: &autoDiscover, + SilencePRComments: silencePRComments, }, }, Workflows: map[string]Workflow{ @@ -271,8 +280,7 @@ func (r Repo) IDString() string { // final config. It assumes that all configs have been validated. func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, proj Project, rCfg RepoCfg) MergedProjectCfg { log.Debug("MergeProjectCfg started") - planReqs, applyReqs, importReqs, workflow, allowedOverrides, allowCustomWorkflows, deleteSourceBranchOnMerge, repoLocking, policyCheck, customPolicyCheck, _ := g.getMatchingCfg(log, repoID) - + planReqs, applyReqs, importReqs, workflow, allowedOverrides, allowCustomWorkflows, deleteSourceBranchOnMerge, repoLocks, policyCheck, customPolicyCheck, _, silencePRComments := g.getMatchingCfg(log, repoID) // If repos are allowed to override certain keys then override them. for _, key := range allowedOverrides { switch key { @@ -335,8 +343,26 @@ func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, pro case RepoLockingKey: if proj.RepoLocking != nil { log.Debug("overriding server-defined %s with repo settings: [%t]", RepoLockingKey, *proj.RepoLocking) - repoLocking = *proj.RepoLocking + if *proj.RepoLocking && repoLocks.Mode == RepoLocksDisabledMode { + repoLocks.Mode = DefaultRepoLocksMode + } else if !*proj.RepoLocking { + repoLocks.Mode = RepoLocksDisabledMode + } + } + case RepoLocksKey: + //We check whether the server configured value and repo-root level + //config is different. If it is then we change to the more granular. + if rCfg.RepoLocks != nil && repoLocks.Mode != rCfg.RepoLocks.Mode { + log.Debug("overriding server-defined %s with repo settings: [%#v]", RepoLocksKey, rCfg.RepoLocks) + repoLocks = *rCfg.RepoLocks } + //Then we check whether the more granular project based config is + //different. If it is then we set it. + if proj.RepoLocks != nil && repoLocks.Mode != proj.RepoLocks.Mode { + log.Debug("overriding repo-root-defined %s with repo settings: [%#v]", RepoLocksKey, *proj.RepoLocks) + repoLocks = *proj.RepoLocks + } + log.Debug("merged repoLocks: [%#v]", repoLocks) case PolicyCheckKey: if proj.PolicyCheck != nil { log.Debug("overriding server-defined %s with repo settings: [%t]", PolicyCheckKey, *proj.PolicyCheck) @@ -347,12 +373,29 @@ func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, pro log.Debug("overriding server-defined %s with repo settings: [%t]", CustomPolicyCheckKey, *proj.CustomPolicyCheck) customPolicyCheck = *proj.CustomPolicyCheck } + case SilencePRCommentsKey: + if proj.SilencePRComments != nil { + log.Debug("overriding repo-root-defined %s with repo settings: [%t]", SilencePRCommentsKey, strings.Join(proj.SilencePRComments, ",")) + silencePRComments = proj.SilencePRComments + } else if rCfg.SilencePRComments != nil { + log.Debug("overriding server-defined %s with repo settings: [%s]", SilencePRCommentsKey, strings.Join(rCfg.SilencePRComments, ",")) + silencePRComments = rCfg.SilencePRComments + } } log.Debug("MergeProjectCfg completed") } - log.Debug("final settings: %s: [%s], %s: [%s], %s: [%s], %s: %s", - PlanRequirementsKey, strings.Join(planReqs, ","), ApplyRequirementsKey, strings.Join(applyReqs, ","), ImportRequirementsKey, strings.Join(importReqs, ","), WorkflowKey, workflow.Name) + log.Debug("final settings: %s: [%s], %s: [%s], %s: [%s], %s: %s, %s: %t, %s: %s, %s: %t, %s: %t, %s: [%s]", + PlanRequirementsKey, strings.Join(planReqs, ","), + ApplyRequirementsKey, strings.Join(applyReqs, ","), + ImportRequirementsKey, strings.Join(importReqs, ","), + WorkflowKey, workflow.Name, + DeleteSourceBranchOnMergeKey, deleteSourceBranchOnMerge, + RepoLockingKey, repoLocks.Mode, + PolicyCheckKey, policyCheck, + CustomPolicyCheckKey, policyCheck, + SilencePRCommentsKey, strings.Join(silencePRComments, ","), + ) return MergedProjectCfg{ PlanRequirements: planReqs, @@ -369,9 +412,10 @@ func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, pro PolicySets: g.PolicySets, DeleteSourceBranchOnMerge: deleteSourceBranchOnMerge, ExecutionOrderGroup: proj.ExecutionOrderGroup, - RepoLocking: repoLocking, + RepoLocks: repoLocks, PolicyCheck: policyCheck, CustomPolicyCheck: customPolicyCheck, + SilencePRComments: silencePRComments, } } @@ -379,7 +423,7 @@ func (g GlobalCfg) MergeProjectCfg(log logging.SimpleLogging, repoID string, pro // repo with id repoID. It is used when there is no repo config. func (g GlobalCfg) DefaultProjCfg(log logging.SimpleLogging, repoID string, repoRelDir string, workspace string) MergedProjectCfg { log.Debug("building config based on server-side config") - planReqs, applyReqs, importReqs, workflow, _, _, deleteSourceBranchOnMerge, repoLocking, policyCheck, customPolicyCheck, _ := g.getMatchingCfg(log, repoID) + planReqs, applyReqs, importReqs, workflow, _, _, deleteSourceBranchOnMerge, repoLocks, policyCheck, customPolicyCheck, _, silencePRComments := g.getMatchingCfg(log, repoID) return MergedProjectCfg{ PlanRequirements: planReqs, ApplyRequirements: applyReqs, @@ -392,9 +436,10 @@ func (g GlobalCfg) DefaultProjCfg(log logging.SimpleLogging, repoID string, repo TerraformVersion: nil, PolicySets: g.PolicySets, DeleteSourceBranchOnMerge: deleteSourceBranchOnMerge, - RepoLocking: repoLocking, + RepoLocks: repoLocks, PolicyCheck: policyCheck, CustomPolicyCheck: customPolicyCheck, + SilencePRComments: silencePRComments, } } @@ -412,7 +457,6 @@ func (g GlobalCfg) RepoAutoDiscoverCfg(repoID string) *AutoDiscover { // ValidateRepoCfg validates that rCfg for repo with id repoID is valid based // on our global config. func (g GlobalCfg) ValidateRepoCfg(rCfg RepoCfg, repoID string) error { - mapContainsF := func(m map[string]Workflow, key string) bool { for k := range m { if k == key { @@ -450,9 +494,32 @@ func (g GlobalCfg) ValidateRepoCfg(rCfg RepoCfg, repoID string) error { if p.RepoLocking != nil && !utils.SlicesContains(allowedOverrides, RepoLockingKey) { return fmt.Errorf("repo config not allowed to set '%s' key: server-side config needs '%s: [%s]'", RepoLockingKey, AllowedOverridesKey, RepoLockingKey) } + if p.RepoLocks != nil && !utils.SlicesContains(allowedOverrides, RepoLocksKey) { + return fmt.Errorf("repo config not allowed to set '%s' key: server-side config needs '%s: [%s]'", RepoLocksKey, AllowedOverridesKey, RepoLocksKey) + } if p.CustomPolicyCheck != nil && !utils.SlicesContains(allowedOverrides, CustomPolicyCheckKey) { return fmt.Errorf("repo config not allowed to set '%s' key: server-side config needs '%s: [%s]'", CustomPolicyCheckKey, AllowedOverridesKey, CustomPolicyCheckKey) } + if p.SilencePRComments != nil { + if !utils.SlicesContains(allowedOverrides, SilencePRCommentsKey) { + return fmt.Errorf( + "repo config not allowed to set '%s' key: server-side config needs '%s: [%s]'", + SilencePRCommentsKey, + AllowedOverridesKey, + SilencePRCommentsKey, + ) + } + for _, silenceStage := range p.SilencePRComments { + if !utils.SlicesContains(AllowedSilencePRComments, silenceStage) { + return fmt.Errorf( + "repo config '%s' key value of '%s' is not supported, supported values are [%s]", + SilencePRCommentsKey, + silenceStage, + strings.Join(AllowedSilencePRComments, ", "), + ) + } + } + } } // Check custom workflows. @@ -511,7 +578,7 @@ func (g GlobalCfg) ValidateRepoCfg(rCfg RepoCfg, repoID string) error { } // getMatchingCfg returns the key settings for repoID. -func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (planReqs []string, applyReqs []string, importReqs []string, workflow Workflow, allowedOverrides []string, allowCustomWorkflows bool, deleteSourceBranchOnMerge bool, repoLocking bool, policyCheck bool, customPolicyCheck bool, autoDiscover AutoDiscover) { +func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (planReqs []string, applyReqs []string, importReqs []string, workflow Workflow, allowedOverrides []string, allowCustomWorkflows bool, deleteSourceBranchOnMerge bool, repoLocks RepoLocks, policyCheck bool, customPolicyCheck bool, autoDiscover AutoDiscover, silencePRComments []string) { toLog := make(map[string]string) traceF := func(repoIdx int, repoID string, key string, val interface{}) string { from := "default server config" @@ -535,8 +602,10 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (pla // Can't use raw.DefaultAutoDiscoverMode() because of an import cycle. Should refactor to avoid that. autoDiscover = AutoDiscover{Mode: AutoDiscoverAutoMode} + repoLocking := true + repoLocks = DefaultRepoLocks - for _, key := range []string{PlanRequirementsKey, ApplyRequirementsKey, ImportRequirementsKey, WorkflowKey, AllowedOverridesKey, AllowCustomWorkflowsKey, DeleteSourceBranchOnMergeKey, RepoLockingKey, PolicyCheckKey, CustomPolicyCheckKey} { + for _, key := range []string{PlanRequirementsKey, ApplyRequirementsKey, ImportRequirementsKey, WorkflowKey, AllowedOverridesKey, AllowCustomWorkflowsKey, DeleteSourceBranchOnMergeKey, RepoLockingKey, RepoLocksKey, PolicyCheckKey, CustomPolicyCheckKey, SilencePRCommentsKey} { for i, repo := range g.Repos { if repo.IDMatches(repoID) { switch key { @@ -580,6 +649,11 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (pla toLog[RepoLockingKey] = traceF(i, repo.IDString(), RepoLockingKey, *repo.RepoLocking) repoLocking = *repo.RepoLocking } + case RepoLocksKey: + if repo.RepoLocks != nil { + toLog[RepoLocksKey] = traceF(i, repo.IDString(), RepoLocksKey, repo.RepoLocks.Mode) + repoLocks = *repo.RepoLocks + } case PolicyCheckKey: if repo.PolicyCheck != nil { toLog[PolicyCheckKey] = traceF(i, repo.IDString(), PolicyCheckKey, *repo.PolicyCheck) @@ -595,6 +669,11 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (pla toLog[AutoDiscoverKey] = traceF(i, repo.IDString(), AutoDiscoverKey, repo.AutoDiscover.Mode) autoDiscover = *repo.AutoDiscover } + case SilencePRCommentsKey: + if repo.SilencePRComments != nil { + toLog[SilencePRCommentsKey] = traceF(i, repo.IDString(), SilencePRCommentsKey, repo.SilencePRComments) + silencePRComments = repo.SilencePRComments + } } } } @@ -602,6 +681,10 @@ func (g GlobalCfg) getMatchingCfg(log logging.SimpleLogging, repoID string) (pla for _, l := range toLog { log.Debug(l) } + // repoLocking is deprecated and enabled by default, disable repo locks if it is explicitly disabled + if !repoLocking { + repoLocks.Mode = RepoLocksDisabledMode + } return } diff --git a/server/core/config/valid/global_cfg_test.go b/server/core/config/valid/global_cfg_test.go index 5c9cfc919a..129b41d462 100644 --- a/server/core/config/valid/global_cfg_test.go +++ b/server/core/config/valid/global_cfg_test.go @@ -80,7 +80,7 @@ func TestNewGlobalCfg(t *testing.T) { AllowedOverrides: []string{}, AllowCustomWorkflows: Bool(false), DeleteSourceBranchOnMerge: Bool(false), - RepoLocking: Bool(true), + RepoLocks: &valid.DefaultRepoLocks, PolicyCheck: Bool(false), CustomPolicyCheck: Bool(false), AutoDiscover: raw.DefaultAutoDiscover(), @@ -129,7 +129,7 @@ func TestNewGlobalCfg(t *testing.T) { if c.allowAllRepoSettings { exp.Repos[0].AllowCustomWorkflows = Bool(true) - exp.Repos[0].AllowedOverrides = []string{"plan_requirements", "apply_requirements", "import_requirements", "workflow", "delete_source_branch_on_merge", "repo_locking", "policy_check"} + exp.Repos[0].AllowedOverrides = []string{"plan_requirements", "apply_requirements", "import_requirements", "workflow", "delete_source_branch_on_merge", "repo_locking", "repo_locks", "policy_check", "silence_pr_comments"} } if c.policyCheckEnabled { exp.Repos[0].PlanRequirements = append(exp.Repos[0].PlanRequirements, "policies_passed") @@ -569,7 +569,7 @@ policies: Workspace: "default", Name: "", AutoplanEnabled: false, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -618,7 +618,7 @@ policies: Workspace: "default", Name: "", AutoplanEnabled: false, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -707,7 +707,7 @@ workflows: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -737,7 +737,7 @@ repos: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -767,7 +767,7 @@ repos: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -798,7 +798,7 @@ repos: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, PolicyCheck: true, }, @@ -829,7 +829,7 @@ repos: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, PolicyCheck: false, }, @@ -860,7 +860,7 @@ repos: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -877,7 +877,6 @@ repos: PlanRequirements: []string{}, ApplyRequirements: []string{}, ImportRequirements: []string{}, - RepoLocking: Bool(true), CustomPolicyCheck: Bool(false), }, repoWorkflows: nil, @@ -891,7 +890,39 @@ repos: Name: "", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: false, + RepoLocks: valid.RepoLocks{Mode: valid.RepoLocksDisabledMode}, + CustomPolicyCheck: false, + }, + }, + "repo-side repo_locks win out if allowed": { + gCfg: ` +repos: +- id: /.*/ + repo_locks: + mode: on_apply +`, + repoID: "github.com/owner/repo", + proj: valid.Project{ + Dir: ".", + Workspace: "default", + PlanRequirements: []string{}, + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + RepoLocks: &valid.DefaultRepoLocks, + CustomPolicyCheck: Bool(false), + }, + repoWorkflows: nil, + exp: valid.MergedProjectCfg{ + PlanRequirements: []string{}, + ApplyRequirements: []string{}, + ImportRequirements: []string{}, + Workflow: defaultWorkflow, + RepoRelDir: ".", + Workspace: "default", + Name: "", + AutoplanEnabled: false, + PolicySets: emptyPolicySets, + RepoLocks: valid.RepoLocks{Mode: valid.RepoLocksOnApplyMode}, CustomPolicyCheck: false, }, }, @@ -928,7 +959,7 @@ repos: Name: "myname", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -955,7 +986,7 @@ repos: Name: "myname", AutoplanEnabled: true, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -984,7 +1015,7 @@ repos: AutoplanEnabled: true, PolicySets: emptyPolicySets, ExecutionOrderGroup: 10, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, CustomPolicyCheck: false, }, }, @@ -1172,7 +1203,7 @@ repos: Name: "myname", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, PolicyCheck: false, CustomPolicyCheck: false, }, @@ -1212,7 +1243,7 @@ repos: Name: "myname", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, PolicyCheck: true, CustomPolicyCheck: false, }, @@ -1253,7 +1284,7 @@ repos: Name: "myname", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, PolicyCheck: false, CustomPolicyCheck: false, }, @@ -1294,7 +1325,7 @@ repos: Name: "myname", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, PolicyCheck: false, CustomPolicyCheck: false, }, @@ -1335,7 +1366,7 @@ repos: Name: "myname", AutoplanEnabled: false, PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocks: valid.DefaultRepoLocks, PolicyCheck: true, // Project will have policy check as true but since it is globally disable it wont actually run CustomPolicyCheck: false, }, diff --git a/server/core/config/valid/repo_cfg.go b/server/core/config/valid/repo_cfg.go index 893c91d3fd..3194fe7101 100644 --- a/server/core/config/valid/repo_cfg.go +++ b/server/core/config/valid/repo_cfg.go @@ -24,11 +24,12 @@ type RepoCfg struct { ParallelPlan *bool ParallelPolicyCheck *bool DeleteSourceBranchOnMerge *bool - RepoLocking *bool + RepoLocks *RepoLocks CustomPolicyCheck *bool EmojiReaction string AllowedRegexpPrefixes []string AbortOnExcecutionOrderFail bool + SilencePRComments []string } func (r RepoCfg) FindProjectsByDirWorkspace(repoRelDir string, workspace string) []Project { @@ -154,9 +155,11 @@ type Project struct { DependsOn []string DeleteSourceBranchOnMerge *bool RepoLocking *bool + RepoLocks *RepoLocks ExecutionOrderGroup int PolicyCheck *bool CustomPolicyCheck *bool + SilencePRComments []string } // GetName returns the name of the project or an empty string if there is no diff --git a/server/core/config/valid/repo_locks.go b/server/core/config/valid/repo_locks.go new file mode 100644 index 0000000000..7a4a77a873 --- /dev/null +++ b/server/core/config/valid/repo_locks.go @@ -0,0 +1,19 @@ +package valid + +// RepoLocksMode enum +type RepoLocksMode string + +var DefaultRepoLocksMode = RepoLocksOnPlanMode +var DefaultRepoLocks = RepoLocks{ + Mode: DefaultRepoLocksMode, +} + +const ( + RepoLocksDisabledMode RepoLocksMode = "disabled" + RepoLocksOnPlanMode RepoLocksMode = "on_plan" + RepoLocksOnApplyMode RepoLocksMode = "on_apply" +) + +type RepoLocks struct { + Mode RepoLocksMode +} diff --git a/server/core/redis/redis.go b/server/core/redis/redis.go index 030cd15b82..2afe336ce4 100644 --- a/server/core/redis/redis.go +++ b/server/core/redis/redis.go @@ -238,7 +238,7 @@ func (r *RedisDB) CheckCommandLock(cmdName command.Name) (*command.Lock, error) return &cmdLock, err } -// UpdatePullWithResults updates pull's status with the latest project results. +// UpdateProjectStatus updates pull's status with the latest project results. // It returns the new PullStatus object. func (r *RedisDB) UpdateProjectStatus(pull models.PullRequest, workspace string, repoRelDir string, newStatus models.ProjectPlanStatus) error { key, err := r.pullKey(pull) diff --git a/server/core/runtime/apply_step_runner.go b/server/core/runtime/apply_step_runner.go index eb1633eea0..2e223f2996 100644 --- a/server/core/runtime/apply_step_runner.go +++ b/server/core/runtime/apply_step_runner.go @@ -12,6 +12,7 @@ import ( version "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/utils" ) // ApplyStepRunner runs `terraform apply`. @@ -56,7 +57,7 @@ func (a *ApplyStepRunner) Run(ctx command.ProjectContext, extraArgs []string, pa // If the apply was successful, delete the plan. if err == nil { ctx.Log.Info("apply successful, deleting planfile") - if removeErr := os.Remove(planPath); removeErr != nil { + if removeErr := utils.RemoveIgnoreNonExistent(planPath); removeErr != nil { ctx.Log.Warn("failed to delete planfile after successful apply: %s", removeErr) } } @@ -116,7 +117,6 @@ func (a *ApplyStepRunner) runRemoteApply( absPlanPath string, tfVersion *version.Version, envs map[string]string) (string, error) { - // The planfile contents are needed to ensure that the plan didn't change // between plan and apply phases. planfileBytes, err := os.ReadFile(absPlanPath) diff --git a/server/core/runtime/import_step_runner.go b/server/core/runtime/import_step_runner.go index 2f4cb8c51c..0d5787a8ad 100644 --- a/server/core/runtime/import_step_runner.go +++ b/server/core/runtime/import_step_runner.go @@ -6,6 +6,7 @@ import ( version "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/utils" ) type importStepRunner struct { @@ -37,7 +38,7 @@ func (p *importStepRunner) Run(ctx command.ProjectContext, extraArgs []string, p if err == nil { if _, planPathErr := os.Stat(planPath); !os.IsNotExist(planPathErr) { ctx.Log.Info("import successful, deleting planfile") - if removeErr := os.Remove(planPath); removeErr != nil { + if removeErr := utils.RemoveIgnoreNonExistent(planPath); removeErr != nil { ctx.Log.Warn("failed to delete planfile after successful import: %s", removeErr) } } diff --git a/server/core/runtime/init_step_runner.go b/server/core/runtime/init_step_runner.go index cd3ab32810..0c6de1b013 100644 --- a/server/core/runtime/init_step_runner.go +++ b/server/core/runtime/init_step_runner.go @@ -1,12 +1,12 @@ package runtime import ( - "os" "path/filepath" version "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/core/runtime/common" "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/utils" ) // InitStep runs `terraform init`. @@ -21,14 +21,13 @@ func (i *InitStepRunner) Run(ctx command.ProjectContext, extraArgs []string, pat terraformLockFileTracked, err := common.IsFileTracked(path, lockFileName) if err != nil { ctx.Log.Warn("Error checking if %s is tracked in %s", lockFileName, path) - } // If .terraform.lock.hcl is not tracked in git and it exists prior to init // delete it as it probably has been created by a previous run of // terraform init if common.FileExists(terraformLockfilePath) && !terraformLockFileTracked { ctx.Log.Debug("Deleting `%s` that was generated by previous terraform init", terraformLockfilePath) - delErr := os.Remove(terraformLockfilePath) + delErr := utils.RemoveIgnoreNonExistent(terraformLockfilePath) if delErr != nil { ctx.Log.Info("Error Deleting `%s`", lockFileName) } diff --git a/server/core/runtime/multienv_step_runner.go b/server/core/runtime/multienv_step_runner.go index cc4ea42df6..22f183ecdd 100644 --- a/server/core/runtime/multienv_step_runner.go +++ b/server/core/runtime/multienv_step_runner.go @@ -16,32 +16,39 @@ type MultiEnvStepRunner struct { // Run runs the multienv step command. // The command must return a json string containing the array of name-value pairs that are being added as extra environment variables -func (r *MultiEnvStepRunner) Run(ctx command.ProjectContext, command string, path string, envs map[string]string) (string, error) { - res, err := r.RunStepRunner.Run(ctx, command, path, envs, false, valid.PostProcessRunOutputShow, "") +func (r *MultiEnvStepRunner) Run(ctx command.ProjectContext, command string, path string, envs map[string]string, postProcessOutput valid.PostProcessRunOutputOption) (string, error) { + res, err := r.RunStepRunner.Run(ctx, command, path, envs, false, postProcessOutput, "") if err != nil { return "", err } + var sb strings.Builder if len(res) == 0 { - return "No dynamic environment variable added", nil - } + sb.WriteString("No dynamic environment variable added") + } else { + sb.WriteString("Dynamic environment variables added:\n") - var sb strings.Builder - sb.WriteString("Dynamic environment variables added:\n") + vars, err := parseMultienvLine(res) + if err != nil { + return "", fmt.Errorf("Invalid environment variable definition: %s (%w)", res, err) + } - vars, err := parseMultienvLine(res) - if err != nil { - return "", fmt.Errorf("Invalid environment variable definition: %s (%w)", res, err) + for i := 0; i < len(vars); i += 2 { + key := vars[i] + envs[key] = vars[i+1] + sb.WriteString(key) + sb.WriteRune('\n') + } } - for i := 0; i < len(vars); i += 2 { - key := vars[i] - envs[key] = vars[i+1] - sb.WriteString(key) - sb.WriteRune('\n') + switch postProcessOutput { + case valid.PostProcessRunOutputHide: + return "", nil + case valid.PostProcessRunOutputShow: + return sb.String(), nil + default: + return sb.String(), nil } - - return sb.String(), nil } func parseMultienvLine(in string) ([]string, error) { diff --git a/server/core/runtime/multienv_step_runner_test.go b/server/core/runtime/multienv_step_runner_test.go index f7d6b1132f..adf51a8b60 100644 --- a/server/core/runtime/multienv_step_runner_test.go +++ b/server/core/runtime/multienv_step_runner_test.go @@ -5,6 +5,7 @@ import ( version "github.com/hashicorp/go-version" . "github.com/petergtz/pegomock/v4" + "github.com/runatlantis/atlantis/server/core/config/valid" "github.com/runatlantis/atlantis/server/core/runtime" "github.com/runatlantis/atlantis/server/core/terraform/mocks" "github.com/runatlantis/atlantis/server/events/command" @@ -84,7 +85,7 @@ func TestMultiEnvStepRunner_Run(t *testing.T) { ProjectName: c.ProjectName, } envMap := make(map[string]string) - value, err := multiEnvStepRunner.Run(ctx, c.Command, tmpDir, envMap) + value, err := multiEnvStepRunner.Run(ctx, c.Command, tmpDir, envMap, valid.PostProcessRunOutputShow) if c.ExpErr != "" { ErrContains(t, c.ExpErr, err) return diff --git a/server/core/runtime/policy/conftest_client_test.go b/server/core/runtime/policy/conftest_client_test.go index d8c5f5b9dc..c50875e996 100644 --- a/server/core/runtime/policy/conftest_client_test.go +++ b/server/core/runtime/policy/conftest_client_test.go @@ -33,7 +33,6 @@ func TestConfTestVersionDownloader(t *testing.T) { t.Run("success", func(t *testing.T) { - When(mockDownloader.GetFile(Eq(destPath), Eq(fullURL))).ThenReturn(nil) binPath, err := subject.downloadConfTestVersion(version, destPath) mockDownloader.VerifyWasCalledOnce().GetAny(Eq(destPath), Eq(fullURL)) diff --git a/server/core/runtime/run_step_runner.go b/server/core/runtime/run_step_runner.go index a58341455f..42edaa8cbd 100644 --- a/server/core/runtime/run_step_runner.go +++ b/server/core/runtime/run_step_runner.go @@ -71,6 +71,11 @@ func (r *RunStepRunner) Run(ctx command.ProjectContext, command string, path str runner := models.NewShellCommandRunner(command, finalEnvVars, path, streamOutput, r.ProjectCmdOutputHandler) output, err := runner.Run(ctx) + if postProcessOutput == valid.PostProcessRunOutputStripRefreshing { + output = StripRefreshingFromPlanOutput(output, tfVersion) + + } + if err != nil { err = fmt.Errorf("%s: running %q in %q: \n%s", err, command, path, output) if !ctx.CustomPolicyCheck { diff --git a/server/core/runtime/show_step_runner.go b/server/core/runtime/show_step_runner.go index e7c0f7ff14..ba89479b56 100644 --- a/server/core/runtime/show_step_runner.go +++ b/server/core/runtime/show_step_runner.go @@ -49,7 +49,7 @@ func (p *showStepRunner) Run(ctx command.ProjectContext, _ []string, path string return "", errors.Wrap(err, "running terraform show") } - if err := os.WriteFile(showResultFile, []byte(output), os.ModePerm); err != nil { + if err := os.WriteFile(showResultFile, []byte(output), 0600); err != nil { return "", errors.Wrap(err, "writing terraform show result") } diff --git a/server/core/runtime/state_rm_step_runner.go b/server/core/runtime/state_rm_step_runner.go index 74a0d18875..3b4a08f102 100644 --- a/server/core/runtime/state_rm_step_runner.go +++ b/server/core/runtime/state_rm_step_runner.go @@ -6,6 +6,7 @@ import ( version "github.com/hashicorp/go-version" "github.com/runatlantis/atlantis/server/events/command" + "github.com/runatlantis/atlantis/server/utils" ) type stateRmStepRunner struct { @@ -37,7 +38,7 @@ func (p *stateRmStepRunner) Run(ctx command.ProjectContext, extraArgs []string, if err == nil { if _, planPathErr := os.Stat(planPath); !os.IsNotExist(planPathErr) { ctx.Log.Info("state rm successful, deleting planfile") - if removeErr := os.Remove(planPath); removeErr != nil { + if removeErr := utils.RemoveIgnoreNonExistent(planPath); removeErr != nil { ctx.Log.Warn("failed to delete planfile after successful state rm: %s", removeErr) } } diff --git a/server/core/terraform/mocks/mock_downloader.go b/server/core/terraform/mocks/mock_downloader.go index ed16db0eff..06b82f6706 100644 --- a/server/core/terraform/mocks/mock_downloader.go +++ b/server/core/terraform/mocks/mock_downloader.go @@ -4,6 +4,7 @@ package mocks import ( + go_version "github.com/hashicorp/go-version" pegomock "github.com/petergtz/pegomock/v4" "reflect" "time" @@ -39,19 +40,23 @@ func (mock *MockDownloader) GetAny(dst string, src string) error { return ret0 } -func (mock *MockDownloader) GetFile(dst string, src string) error { +func (mock *MockDownloader) Install(dir string, downloadURL string, v *go_version.Version) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockDownloader().") } - params := []pegomock.Param{dst, src} - result := pegomock.GetGenericMockFrom(mock).Invoke("GetFile", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 error + params := []pegomock.Param{dir, downloadURL, v} + result := pegomock.GetGenericMockFrom(mock).Invoke("Install", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 string + var ret1 error if len(result) != 0 { if result[0] != nil { - ret0 = result[0].(error) + ret0 = result[0].(string) + } + if result[1] != nil { + ret1 = result[1].(error) } } - return ret0 + return ret0, ret1 } func (mock *MockDownloader) VerifyWasCalledOnce() *VerifierMockDownloader { @@ -122,23 +127,23 @@ func (c *MockDownloader_GetAny_OngoingVerification) GetAllCapturedArguments() (_ return } -func (verifier *VerifierMockDownloader) GetFile(dst string, src string) *MockDownloader_GetFile_OngoingVerification { - params := []pegomock.Param{dst, src} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetFile", params, verifier.timeout) - return &MockDownloader_GetFile_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +func (verifier *VerifierMockDownloader) Install(dir string, downloadURL string, v *go_version.Version) *MockDownloader_Install_OngoingVerification { + params := []pegomock.Param{dir, downloadURL, v} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Install", params, verifier.timeout) + return &MockDownloader_Install_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } -type MockDownloader_GetFile_OngoingVerification struct { +type MockDownloader_Install_OngoingVerification struct { mock *MockDownloader methodInvocations []pegomock.MethodInvocation } -func (c *MockDownloader_GetFile_OngoingVerification) GetCapturedArguments() (string, string) { - dst, src := c.GetAllCapturedArguments() - return dst[len(dst)-1], src[len(src)-1] +func (c *MockDownloader_Install_OngoingVerification) GetCapturedArguments() (string, string, *go_version.Version) { + dir, downloadURL, v := c.GetAllCapturedArguments() + return dir[len(dir)-1], downloadURL[len(downloadURL)-1], v[len(v)-1] } -func (c *MockDownloader_GetFile_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string) { +func (c *MockDownloader_Install_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []string, _param2 []*go_version.Version) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { _param0 = make([]string, len(c.methodInvocations)) @@ -149,6 +154,10 @@ func (c *MockDownloader_GetFile_OngoingVerification) GetAllCapturedArguments() ( for u, param := range params[1] { _param1[u] = param.(string) } + _param2 = make([]*go_version.Version, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(*go_version.Version) + } } return } diff --git a/server/core/terraform/mocks/mock_terraform_client.go b/server/core/terraform/mocks/mock_terraform_client.go index f5fe812a36..dae620f2e1 100644 --- a/server/core/terraform/mocks/mock_terraform_client.go +++ b/server/core/terraform/mocks/mock_terraform_client.go @@ -57,25 +57,6 @@ func (mock *MockClient) EnsureVersion(log logging.SimpleLogging, v *go_version.V return ret0 } -func (mock *MockClient) ListAvailableVersions(log logging.SimpleLogging) ([]string, error) { - if mock == nil { - panic("mock must not be nil. Use myMock := NewMockClient().") - } - params := []pegomock.Param{log} - result := pegomock.GetGenericMockFrom(mock).Invoke("ListAvailableVersions", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) - var ret0 []string - var ret1 error - if len(result) != 0 { - if result[0] != nil { - ret0 = result[0].([]string) - } - if result[1] != nil { - ret1 = result[1].(error) - } - } - return ret0, ret1 -} - func (mock *MockClient) RunCommandWithVersion(ctx command.ProjectContext, path string, args []string, envs map[string]string, v *go_version.Version, workspace string) (string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockClient().") @@ -194,33 +175,6 @@ func (c *MockClient_EnsureVersion_OngoingVerification) GetAllCapturedArguments() return } -func (verifier *VerifierMockClient) ListAvailableVersions(log logging.SimpleLogging) *MockClient_ListAvailableVersions_OngoingVerification { - params := []pegomock.Param{log} - methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ListAvailableVersions", params, verifier.timeout) - return &MockClient_ListAvailableVersions_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} -} - -type MockClient_ListAvailableVersions_OngoingVerification struct { - mock *MockClient - methodInvocations []pegomock.MethodInvocation -} - -func (c *MockClient_ListAvailableVersions_OngoingVerification) GetCapturedArguments() logging.SimpleLogging { - log := c.GetAllCapturedArguments() - return log[len(log)-1] -} - -func (c *MockClient_ListAvailableVersions_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging) { - params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) - if len(params) > 0 { - _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) - for u, param := range params[0] { - _param0[u] = param.(logging.SimpleLogging) - } - } - return -} - func (verifier *VerifierMockClient) RunCommandWithVersion(ctx command.ProjectContext, path string, args []string, envs map[string]string, v *go_version.Version, workspace string) *MockClient_RunCommandWithVersion_OngoingVerification { params := []pegomock.Param{ctx, path, args, envs, v, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "RunCommandWithVersion", params, verifier.timeout) diff --git a/server/core/terraform/terraform_client.go b/server/core/terraform/terraform_client.go index 7ca6fc7277..792b028612 100644 --- a/server/core/terraform/terraform_client.go +++ b/server/core/terraform/terraform_client.go @@ -19,23 +19,23 @@ package terraform import ( "context" "fmt" - "net/http" "os" "os/exec" "path/filepath" "regexp" - "runtime" - "sort" "strings" "sync" "time" "github.com/hashicorp/go-getter/v2" "github.com/hashicorp/go-version" + install "github.com/hashicorp/hc-install" + "github.com/hashicorp/hc-install/product" + "github.com/hashicorp/hc-install/releases" + "github.com/hashicorp/hc-install/src" "github.com/hashicorp/terraform-config-inspect/tfconfig" "github.com/mitchellh/go-homedir" "github.com/pkg/errors" - "github.com/warrensbox/terraform-switcher/lib" "github.com/runatlantis/atlantis/server/core/runtime/models" "github.com/runatlantis/atlantis/server/events/command" @@ -57,9 +57,6 @@ type Client interface { // EnsureVersion makes sure that terraform version `v` is available to use EnsureVersion(log logging.SimpleLogging, v *version.Version) error - // ListAvailableVersions returns all available version of Terraform, if available; otherwise this will return an empty list. - ListAvailableVersions(log logging.SimpleLogging) ([]string, error) - // DetectVersion Extracts required_version from Terraform configuration in the specified project directory. Returns nil if unable to determine the version. DetectVersion(log logging.SimpleLogging, projectDirectory string) *version.Version } @@ -97,7 +94,7 @@ type DefaultClient struct { // Downloader is for downloading terraform versions. type Downloader interface { - GetFile(dst, src string) error + Install(dir string, downloadURL string, v *version.Version) (string, error) GetAny(dst, src string) error } @@ -278,94 +275,83 @@ func (c *DefaultClient) TerraformBinDir() string { return c.binDir } -// ListAvailableVersions returns all available version of Terraform. If downloads are not allowed, this will return an empty list. -func (c *DefaultClient) ListAvailableVersions(log logging.SimpleLogging) ([]string, error) { - url := fmt.Sprintf("%s/terraform", c.downloadBaseURL) - - if !c.downloadAllowed { - log.Debug("Terraform downloads disabled. Won't list Terraform versions available at %s", url) - return []string{}, nil - } - - log.Debug("Listing Terraform versions available at: %s", url) - - // terraform-switcher calls os.Exit(1) if it fails to successfully GET the configured URL. - // So, before calling it, test if we can connect. Then we can return an error instead if the request fails. - resp, err := http.Get(url) // #nosec G107 -- terraform-switch makes this same call below. Also, we don't process the response payload. - if err != nil { - return nil, fmt.Errorf("Unable to list Terraform versions: %s", err) - } - defer resp.Body.Close() // nolint: errcheck - - if resp.StatusCode != http.StatusOK { - return nil, fmt.Errorf("Unable to list Terraform versions: response code %d from %s", resp.StatusCode, url) +// ExtractExactRegex attempts to extract an exact version number from the provided string as a fallback. +// The function expects the version string to be in one of the following formats: "= x.y.z", "=x.y.z", or "x.y.z" where x, y, and z are integers. +// If the version string matches one of these formats, the function returns a slice containing the exact version number. +// If the version string does not match any of these formats, the function logs a debug message and returns nil. +func (c *DefaultClient) ExtractExactRegex(log logging.SimpleLogging, version string) []string { + re := regexp.MustCompile(`^=?\s*([0-9.]+)\s*$`) + matched := re.FindStringSubmatch(version) + if len(matched) == 0 { + log.Debug("exact version regex not found in the version %q", version) + return nil } - - versions, err := lib.GetTFList(url, true) - return versions, err + // The first element of the slice is the entire string, so we want the second element (the first capture group) + tfVersions := []string{matched[1]} + log.Debug("extracted exact version %q from version %q", tfVersions[0], version) + return tfVersions } -// DetectVersion Extracts required_version from Terraform configuration in the specified project directory. Returns nil if unable to determine the version. -// This will also try to intelligently evaluate non-exact matches by listing the available versions of Terraform and picking the best match. +// DetectVersion extracts required_version from Terraform configuration in the specified project directory. Returns nil if unable to determine the version. +// It will also try to evaluate non-exact matches by passing the Constraints to the hc-install Releases API, which will return a list of available versions. +// It will then select the highest version that satisfies the constraint. func (c *DefaultClient) DetectVersion(log logging.SimpleLogging, projectDirectory string) *version.Version { module, diags := tfconfig.LoadModule(projectDirectory) if diags.HasErrors() { - log.Err("Trying to detect required version: %s", diags.Error()) + log.Err("trying to detect required version: %s", diags.Error()) } if len(module.RequiredCore) != 1 { - log.Info("Cannot determine which version to use from terraform configuration, detected %d possibilities.", len(module.RequiredCore)) + log.Info("cannot determine which version to use from terraform configuration, detected %d possibilities.", len(module.RequiredCore)) return nil } requiredVersionSetting := module.RequiredCore[0] log.Debug("Found required_version setting of %q", requiredVersionSetting) - tfVersions, err := c.ListAvailableVersions(log) - if err != nil { - log.Err("Unable to list Terraform versions, may fall back to default: %s", err) - } - - if len(tfVersions) == 0 { - // Fall back to an exact required version string - // We allow `= x.y.z`, `=x.y.z` or `x.y.z` where `x`, `y` and `z` are integers. - re := regexp.MustCompile(`^=?\s*([0-9.]+)\s*$`) - matched := re.FindStringSubmatch(requiredVersionSetting) + if !c.downloadAllowed { + log.Debug("terraform downloads disabled.") + matched := c.ExtractExactRegex(log, requiredVersionSetting) if len(matched) == 0 { - log.Debug("Did not specify exact version in terraform configuration, found %q", requiredVersionSetting) + log.Debug("did not specify exact version in terraform configuration, found %q", requiredVersionSetting) return nil } - tfVersions = []string{matched[1]} - } - - constraint, _ := version.NewConstraint(requiredVersionSetting) - versions := make([]*version.Version, len(tfVersions)) - for i, tfvals := range tfVersions { - newVersion, err := version.NewVersion(tfvals) - if err == nil { - versions[i] = newVersion + version, err := version.NewVersion(matched[0]) + if err != nil { + log.Err("error parsing version string: %s", err) + return nil } + return version } - if len(versions) == 0 { - log.Debug("Did not specify exact valid version in terraform configuration, found %q", requiredVersionSetting) + constraintStr := requiredVersionSetting + vc, err := version.NewConstraint(constraintStr) + if err != nil { + log.Err("Error parsing constraint string: %s", err) return nil } - sort.Sort(sort.Reverse(version.Collection(versions))) - - for _, element := range versions { - if constraint.Check(element) { // Validate a version against a constraint - tfversionStr := element.String() - if lib.ValidVersionFormat(tfversionStr) { //check if version format is correct - tfversion, _ := version.NewVersion(tfversionStr) - log.Info("Detected module requires version: %s", tfversionStr) - return tfversion - } - } + constrainedVersions := &releases.Versions{ + Product: product.Terraform, + Constraints: vc, } - log.Debug("Could not match any valid terraform version with %q", requiredVersionSetting) - return nil + installCandidates, err := constrainedVersions.List(context.Background()) + if err != nil { + log.Err("error listing available versions: %s", err) + return nil + } + if len(installCandidates) == 0 { + log.Err("no Terraform versions found for constraints %s", constraintStr) + return nil + } + + // We want to select the highest version that satisfies the constraint. + versionDownloader := installCandidates[len(installCandidates)-1] + + // Get the Version object from the versionDownloader. + downloadVersion := versionDownloader.(*releases.ExactVersion).Version + + return downloadVersion } // See Client.EnsureVersion. @@ -527,7 +513,15 @@ func MustConstraint(v string) version.Constraints { // ensureVersion returns the path to a terraform binary of version v. // It will download this version if we don't have it. -func ensureVersion(log logging.SimpleLogging, dl Downloader, versions map[string]string, v *version.Version, binDir string, downloadURL string, downloadsAllowed bool) (string, error) { +func ensureVersion( + log logging.SimpleLogging, + dl Downloader, + versions map[string]string, + v *version.Version, + binDir string, + downloadURL string, + downloadsAllowed bool, +) (string, error) { if binPath, ok := versions[v.String()]; ok { return binPath, nil } @@ -549,21 +543,25 @@ func ensureVersion(log logging.SimpleLogging, dl Downloader, versions map[string return dest, nil } if !downloadsAllowed { - return "", fmt.Errorf("Could not find terraform version %s in PATH or %s, and downloads are disabled", v.String(), binDir) + return "", fmt.Errorf( + "could not find terraform version %s in PATH or %s, and downloads are disabled", + v.String(), + binDir, + ) } - log.Info("Could not find terraform version %s in PATH or %s, downloading from %s", v.String(), binDir, downloadURL) - urlPrefix := fmt.Sprintf("%s/terraform/%s/terraform_%s", downloadURL, v.String(), v.String()) - binURL := fmt.Sprintf("%s_%s_%s.zip", urlPrefix, runtime.GOOS, runtime.GOARCH) - checksumURL := fmt.Sprintf("%s_SHA256SUMS", urlPrefix) - fullSrcURL := fmt.Sprintf("%s?checksum=file:%s", binURL, checksumURL) - if err := dl.GetFile(dest, fullSrcURL); err != nil { - return "", errors.Wrapf(err, "downloading terraform version %s at %q", v.String(), fullSrcURL) + log.Info("could not find terraform version %s in PATH or %s", v.String(), binDir) + + log.Info("using Hashicorp's 'hc-install' to download Terraform version %s from download URL %s", v.String(), downloadURL) + execPath, err := dl.Install(binDir, downloadURL, v) + + if err != nil { + return "", errors.Wrapf(err, "error downloading terraform version %s", v.String()) } - log.Info("Downloaded terraform %s to %s", v.String(), dest) - versions[v.String()] = dest - return dest, nil + log.Info("Downloaded terraform %s to %s", v.String(), execPath) + versions[v.String()] = execPath + return execPath, nil } // generateRCFile generates a .terraformrc file containing config for tfeToken @@ -627,13 +625,31 @@ var rcFileContents = `credentials "%s" { type DefaultDownloader struct{} -// See go-getter.GetFile. -func (d *DefaultDownloader) GetFile(dst, src string) error { - _, err := getter.GetFile(context.Background(), dst, src) - return err +func (d *DefaultDownloader) Install(dir string, downloadURL string, v *version.Version) (string, error) { + installer := install.NewInstaller() + execPath, err := installer.Install(context.Background(), []src.Installable{ + &releases.ExactVersion{ + Product: product.Terraform, + Version: v, + InstallDir: dir, + ApiBaseURL: downloadURL, + }, + }) + if err != nil { + return "", err + } + + // hc-install installs terraform binary as just "terraform". + // We need to rename it to terraform{version} to be consistent with current naming convention. + newPath := filepath.Join(dir, "terraform"+v.String()) + if err := os.Rename(execPath, newPath); err != nil { + return "", err + } + + return newPath, nil } -// See go-getter.GetFile. +// See go-getter.GetAny. func (d *DefaultDownloader) GetAny(dst, src string) error { _, err := getter.GetAny(context.Background(), dst, src) return err diff --git a/server/core/terraform/terraform_client_test.go b/server/core/terraform/terraform_client_test.go index 29fccb4579..6903b75791 100644 --- a/server/core/terraform/terraform_client_test.go +++ b/server/core/terraform/terraform_client_test.go @@ -17,7 +17,7 @@ import ( "fmt" "os" "path/filepath" - "runtime" + "reflect" "strings" "testing" "time" @@ -211,22 +211,18 @@ func TestNewClient_DefaultTFFlagDownload(t *testing.T) { defer tempSetEnv(t, "PATH", "")() mockDownloader := mocks.NewMockDownloader() - When(mockDownloader.GetFile(Any[string](), Any[string]())).Then(func(params []pegomock.Param) pegomock.ReturnValues { - err := os.WriteFile(params[0].(string), []byte("#!/bin/sh\necho '\nTerraform v0.11.10\n'"), 0700) // #nosec G306 - return []pegomock.ReturnValue{err} + When(mockDownloader.Install(Any[string](), Any[string](), Any[*version.Version]())).Then(func(params []pegomock.Param) pegomock.ReturnValues { + binPath := filepath.Join(params[0].(string), "terraform0.11.10") + err := os.WriteFile(binPath, []byte("#!/bin/sh\necho '\nTerraform v0.11.10\n'"), 0700) // #nosec G306 + return []pegomock.ReturnValue{binPath, err} }) - c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, "https://my-mirror.releases.mycompany.com", mockDownloader, true, true, projectCmdOutputHandler) + c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, mockDownloader, true, true, projectCmdOutputHandler) Ok(t, err) Ok(t, err) Equals(t, "0.11.10", c.DefaultVersion().String()) - baseURL := "https://my-mirror.releases.mycompany.com/terraform/0.11.10" - expURL := fmt.Sprintf("%s/terraform_0.11.10_%s_%s.zip?checksum=file:%s/terraform_0.11.10_SHA256SUMS", - baseURL, - runtime.GOOS, - runtime.GOARCH, - baseURL) - mockDownloader.VerifyWasCalledEventually(Once(), 2*time.Second).GetFile(filepath.Join(tmp, "bin", "terraform0.11.10"), expURL) + + mockDownloader.VerifyWasCalledEventually(Once(), 2*time.Second).Install(binDir, cmd.DefaultTFDownloadURL, version.Must(version.NewVersion("0.11.10"))) // Reset PATH so that it has sh. Ok(t, os.Setenv("PATH", orig)) @@ -257,26 +253,21 @@ func TestRunCommandWithVersion_DLsTF(t *testing.T) { RepoRelDir: ".", } + v, err := version.NewVersion("99.99.99") + Ok(t, err) + mockDownloader := mocks.NewMockDownloader() // Set up our mock downloader to write a fake tf binary when it's called. - baseURL := fmt.Sprintf("%s/terraform/99.99.99", cmd.DefaultTFDownloadURL) - expURL := fmt.Sprintf("%s/terraform_99.99.99_%s_%s.zip?checksum=file:%s/terraform_99.99.99_SHA256SUMS", - baseURL, - runtime.GOOS, - runtime.GOARCH, - baseURL) - When(mockDownloader.GetFile(filepath.Join(tmp, "bin", "terraform99.99.99"), expURL)).Then(func(params []pegomock.Param) pegomock.ReturnValues { - err := os.WriteFile(params[0].(string), []byte("#!/bin/sh\necho '\nTerraform v99.99.99\n'"), 0700) // #nosec G306 - return []pegomock.ReturnValue{err} + When(mockDownloader.Install(binDir, cmd.DefaultTFDownloadURL, v)).Then(func(params []pegomock.Param) pegomock.ReturnValues { + binPath := filepath.Join(params[0].(string), "terraform99.99.99") + err := os.WriteFile(binPath, []byte("#!/bin/sh\necho '\nTerraform v99.99.99\n'"), 0700) // #nosec G306 + return []pegomock.ReturnValue{binPath, err} }) c, err := terraform.NewClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, mockDownloader, true, true, projectCmdOutputHandler) Ok(t, err) Equals(t, "0.11.10", c.DefaultVersion().String()) - v, err := version.NewVersion("99.99.99") - Ok(t, err) - output, err := c.RunCommandWithVersion(ctx, tmp, []string{"terraform", "init"}, map[string]string{}, v, "") Assert(t, err == nil, "err: %s: %s", err, output) @@ -287,7 +278,7 @@ func TestRunCommandWithVersion_DLsTF(t *testing.T) { func TestEnsureVersion_downloaded(t *testing.T) { logger := logging.NewNoopLogger(t) RegisterMockTestingT(t) - tmp, binDir, cacheDir := mkSubDirs(t) + _, binDir, cacheDir := mkSubDirs(t) projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() mockDownloader := mocks.NewMockDownloader() @@ -300,17 +291,49 @@ func TestEnsureVersion_downloaded(t *testing.T) { v, err := version.NewVersion("99.99.99") Ok(t, err) + When(mockDownloader.Install(binDir, cmd.DefaultTFDownloadURL, v)).Then(func(params []pegomock.Param) pegomock.ReturnValues { + binPath := filepath.Join(params[0].(string), "terraform99.99.99") + err := os.WriteFile(binPath, []byte("#!/bin/sh\necho '\nTerraform v99.99.99\n'"), 0700) // #nosec G306 + return []pegomock.ReturnValue{binPath, err} + }) + err = c.EnsureVersion(logger, v) Ok(t, err) - baseURL := fmt.Sprintf("%s/terraform/99.99.99", cmd.DefaultTFDownloadURL) - expURL := fmt.Sprintf("%s/terraform_99.99.99_%s_%s.zip?checksum=file:%s/terraform_99.99.99_SHA256SUMS", - baseURL, - runtime.GOOS, - runtime.GOARCH, - baseURL) - mockDownloader.VerifyWasCalledEventually(Once(), 2*time.Second).GetFile(filepath.Join(tmp, "bin", "terraform99.99.99"), expURL) + mockDownloader.VerifyWasCalledEventually(Once(), 2*time.Second).Install(binDir, cmd.DefaultTFDownloadURL, v) +} + +// Test that EnsureVersion downloads terraform from a custom URL. +func TestEnsureVersion_downloaded_customURL(t *testing.T) { + logger := logging.NewNoopLogger(t) + RegisterMockTestingT(t) + _, binDir, cacheDir := mkSubDirs(t) + projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() + + mockDownloader := mocks.NewMockDownloader() + downloadsAllowed := true + customURL := "http://releases.example.com" + + c, err := terraform.NewTestClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, customURL, mockDownloader, downloadsAllowed, true, projectCmdOutputHandler) + Ok(t, err) + + Equals(t, "0.11.10", c.DefaultVersion().String()) + + v, err := version.NewVersion("99.99.99") + Ok(t, err) + + When(mockDownloader.Install(binDir, customURL, v)).Then(func(params []pegomock.Param) pegomock.ReturnValues { + binPath := filepath.Join(params[0].(string), "terraform99.99.99") + err := os.WriteFile(binPath, []byte("#!/bin/sh\necho '\nTerraform v99.99.99\n'"), 0700) // #nosec G306 + return []pegomock.ReturnValue{binPath, err} + }) + + err = c.EnsureVersion(logger, v) + + Ok(t, err) + + mockDownloader.VerifyWasCalledEventually(Once(), 2*time.Second).Install(binDir, customURL, v) } // Test that EnsureVersion throws an error when downloads are disabled @@ -332,7 +355,7 @@ func TestEnsureVersion_downloaded_downloadingDisabled(t *testing.T) { Ok(t, err) err = c.EnsureVersion(logger, v) - ErrContains(t, "Could not find terraform version", err) + ErrContains(t, "could not find terraform version", err) ErrContains(t, "downloads are disabled", err) mockDownloader.VerifyWasCalled(Never()) } @@ -491,3 +514,55 @@ terraform { runDetectVersionTestCase(t, name+": Downloads Disabled", testCase, false) } } + +func TestInstall(t *testing.T) { + d := &terraform.DefaultDownloader{} + RegisterMockTestingT(t) + _, binDir, _ := mkSubDirs(t) + + v, _ := version.NewVersion("1.8.1") + + newPath, err := d.Install(binDir, cmd.DefaultTFDownloadURL, v) + if err != nil { + t.Fatalf("Unexpected error: %v", err) + } + + if _, err := os.Stat(newPath); os.IsNotExist(err) { + t.Errorf("Binary not found at %s", newPath) + } +} + +func TestExtractExactRegex(t *testing.T) { + logger := logging.NewNoopLogger(t) + RegisterMockTestingT(t) + _, binDir, cacheDir := mkSubDirs(t) + projectCmdOutputHandler := jobmocks.NewMockProjectCommandOutputHandler() + + mockDownloader := mocks.NewMockDownloader() + + c, err := terraform.NewTestClient(logger, binDir, cacheDir, "", "", "0.11.10", cmd.DefaultTFVersionFlag, cmd.DefaultTFDownloadURL, mockDownloader, true, true, projectCmdOutputHandler) + Ok(t, err) + + tests := []struct { + version string + want []string + }{ + {"= 1.2.3", []string{"1.2.3"}}, + {"=1.2.3", []string{"1.2.3"}}, + {"1.2.3", []string{"1.2.3"}}, + {"v1.2.3", nil}, + {">= 1.2.3", nil}, + {">=1.2.3", nil}, + {"<= 1.2.3", nil}, + {"<=1.2.3", nil}, + {"~> 1.2.3", nil}, + } + + for _, tt := range tests { + t.Run(tt.version, func(t *testing.T) { + if got := c.ExtractExactRegex(logger, tt.version); !reflect.DeepEqual(got, tt.want) { + t.Errorf("ExtractExactRegex() = %v, want %v", got, tt.want) + } + }) + } +} diff --git a/server/events/apply_command_runner.go b/server/events/apply_command_runner.go index 2a40c454d9..ee6bf8ab1f 100644 --- a/server/events/apply_command_runner.go +++ b/server/events/apply_command_runner.go @@ -60,6 +60,7 @@ type ApplyCommandRunner struct { // SilenceVCSStatusNoPlans is whether any plan should set commit status if no projects // are found silenceVCSStatusNoProjects bool + SilencePRComments []string } func (a *ApplyCommandRunner) Run(ctx *command.Context, cmd *CommentCommand) { diff --git a/server/events/apply_command_runner_test.go b/server/events/apply_command_runner_test.go index 62c29458fa..43c000801c 100644 --- a/server/events/apply_command_runner_test.go +++ b/server/events/apply_command_runner_test.go @@ -261,7 +261,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) { Once(), }, ExpComment: "Ran Apply for 2 projects:\n\n" + - "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + + "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + "2. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### Apply Summary\n\n2 projects, 1 successful, 0 failed, 1 errored", }, { @@ -346,7 +346,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) { Never(), }, ExpComment: "Ran Apply for 2 projects:\n\n" + - "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + + "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + "2. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### Apply Summary\n\n2 projects, 1 successful, 0 failed, 1 errored", }, { @@ -399,7 +399,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) { Once(), }, ExpComment: "Ran Apply for 4 projects:\n\n" + - "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + + "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + "2. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### " + "3. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " + "4. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### Apply Summary\n\n4 projects, 3 successful, 0 failed, 1 errored", @@ -433,7 +433,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) { Once(), }, ExpComment: "Ran Apply for 2 projects:\n\n" + - "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " + + "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " + "2. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### Apply Summary\n\n2 projects, 1 successful, 0 failed, 1 errored", }, { @@ -463,7 +463,7 @@ func TestApplyCommandRunner_ExecutionOrder(t *testing.T) { Once(), }, ExpComment: "Ran Apply for 2 projects:\n\n" + - "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n\n### 1. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " + + "1. dir: `` workspace: ``\n1. dir: `` workspace: ``\n---\n\n### 1. dir: `` workspace: ``\n**Apply Error**\n```\nshabang\n```\n\n---\n### " + "2. dir: `` workspace: ``\n```diff\nGreat success!\n```\n\n---\n### Apply Summary\n\n2 projects, 1 successful, 0 failed, 1 errored", }, } diff --git a/server/events/command/context.go b/server/events/command/context.go index 1d6748915c..623c49588a 100644 --- a/server/events/command/context.go +++ b/server/events/command/context.go @@ -43,4 +43,7 @@ type Context struct { ClearPolicyApproval bool Trigger Trigger + + // API is true if plan/apply by API endpoints + API bool } diff --git a/server/events/command/name_test.go b/server/events/command/name_test.go index c45bba1db7..8d91941b46 100644 --- a/server/events/command/name_test.go +++ b/server/events/command/name_test.go @@ -8,6 +8,7 @@ import ( "github.com/runatlantis/atlantis/server/events/command" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestName_TitleString(t *testing.T) { @@ -182,7 +183,7 @@ func TestParseCommandName(t *testing.T) { for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { got, err := command.ParseCommandName(tt.name) - assert.NoError(t, err) + require.NoError(t, err) assert.Equal(t, tt.exp, got) }) } diff --git a/server/events/command/project_context.go b/server/events/command/project_context.go index c06681ef82..6e221ff4d1 100644 --- a/server/events/command/project_context.go +++ b/server/events/command/project_context.go @@ -113,8 +113,8 @@ type ProjectContext struct { ClearPolicyApproval bool // DeleteSourceBranchOnMerge will attempt to allow a branch to be deleted when merged (AzureDevOps & GitLab Support Only) DeleteSourceBranchOnMerge bool - // RepoLocking will get a lock when plan - RepoLocking bool + // Repo locks mode: disabled, on plan or on apply + RepoLocksMode valid.RepoLocksMode // RepoConfigFile RepoConfigFile string // UUID for atlantis logs @@ -125,6 +125,7 @@ type ProjectContext struct { AbortOnExcecutionOrderFail bool // Allows custom policy check tools outside of Conftest to run in checks CustomPolicyCheck bool + SilencePRComments []string } // SetProjectScopeTags adds ProjectContext tags to a new returned scope. diff --git a/server/events/command/project_result.go b/server/events/command/project_result.go index 0d59c4e9ab..8f72f1d168 100644 --- a/server/events/command/project_result.go +++ b/server/events/command/project_result.go @@ -19,6 +19,7 @@ type ProjectResult struct { ImportSuccess *models.ImportSuccess StateRmSuccess *models.StateRmSuccess ProjectName string + SilencePRComments []string } // CommitStatus returns the vcs commit status of this project result. diff --git a/server/events/command_requirement_handler.go b/server/events/command_requirement_handler.go index 5c7b1c1d54..bf95a255ce 100644 --- a/server/events/command_requirement_handler.go +++ b/server/events/command_requirement_handler.go @@ -33,7 +33,7 @@ func (a *DefaultCommandRequirementHandler) ValidatePlanProject(repoDir string, c return "Pull request must be mergeable before running plan.", nil } case raw.UnDivergedRequirement: - if a.WorkingDir.HasDiverged(repoDir) { + if a.WorkingDir.HasDiverged(ctx.Log, repoDir) { return "Default branch must be rebased onto pull request before running plan.", nil } } @@ -60,7 +60,7 @@ func (a *DefaultCommandRequirementHandler) ValidateApplyProject(repoDir string, return "Pull request must be mergeable before running apply.", nil } case raw.UnDivergedRequirement: - if a.WorkingDir.HasDiverged(repoDir) { + if a.WorkingDir.HasDiverged(ctx.Log, repoDir) { return "Default branch must be rebased onto pull request before running apply.", nil } } @@ -95,7 +95,7 @@ func (a *DefaultCommandRequirementHandler) ValidateImportProject(repoDir string, return "Pull request must be mergeable before running import.", nil } case raw.UnDivergedRequirement: - if a.WorkingDir.HasDiverged(repoDir) { + if a.WorkingDir.HasDiverged(ctx.Log, repoDir) { return "Default branch must be rebased onto pull request before running import.", nil } } diff --git a/server/events/command_requirement_handler_test.go b/server/events/command_requirement_handler_test.go index 1c737f05aa..149e3a608b 100644 --- a/server/events/command_requirement_handler_test.go +++ b/server/events/command_requirement_handler_test.go @@ -9,6 +9,7 @@ import ( "github.com/runatlantis/atlantis/server/core/config/valid" "github.com/runatlantis/atlantis/server/events" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/mocks" @@ -46,7 +47,7 @@ func TestAggregateApplyRequirements_ValidatePlanProject(t *testing.T) { ProjectPlanStatus: models.PassedPolicyCheckStatus, }, setup: func(workingDir *mocks.MockWorkingDir) { - When(workingDir.HasDiverged(Any[string]())).ThenReturn(false) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(false) }, wantErr: assert.NoError, }, @@ -76,7 +77,7 @@ func TestAggregateApplyRequirements_ValidatePlanProject(t *testing.T) { PlanRequirements: []string{raw.UnDivergedRequirement}, }, setup: func(workingDir *mocks.MockWorkingDir) { - When(workingDir.HasDiverged(Any[string]())).ThenReturn(true) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(true) }, wantFailure: "Default branch must be rebased onto pull request before running plan.", wantErr: assert.NoError, @@ -130,7 +131,7 @@ func TestAggregateApplyRequirements_ValidateApplyProject(t *testing.T) { ProjectPlanStatus: models.PassedPolicyCheckStatus, }, setup: func(workingDir *mocks.MockWorkingDir) { - When(workingDir.HasDiverged(Any[string]())).ThenReturn(false) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(false) }, wantErr: assert.NoError, }, @@ -184,7 +185,7 @@ func TestAggregateApplyRequirements_ValidateApplyProject(t *testing.T) { ApplyRequirements: []string{raw.UnDivergedRequirement}, }, setup: func(workingDir *mocks.MockWorkingDir) { - When(workingDir.HasDiverged(Any[string]())).ThenReturn(true) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(true) }, wantFailure: "Default branch must be rebased onto pull request before running apply.", wantErr: assert.NoError, @@ -363,7 +364,7 @@ func TestAggregateApplyRequirements_ValidateImportProject(t *testing.T) { ProjectPlanStatus: models.PassedPolicyCheckStatus, }, setup: func(workingDir *mocks.MockWorkingDir) { - When(workingDir.HasDiverged(Any[string]())).ThenReturn(false) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(false) }, wantErr: assert.NoError, }, @@ -393,7 +394,7 @@ func TestAggregateApplyRequirements_ValidateImportProject(t *testing.T) { ImportRequirements: []string{raw.UnDivergedRequirement}, }, setup: func(workingDir *mocks.MockWorkingDir) { - When(workingDir.HasDiverged(Any[string]())).ThenReturn(true) + When(workingDir.HasDiverged(Any[logging.SimpleLogging](), Any[string]())).ThenReturn(true) }, wantFailure: "Default branch must be rebased onto pull request before running import.", wantErr: assert.NoError, diff --git a/server/events/command_runner.go b/server/events/command_runner.go index b08690d1ec..14cdbce146 100644 --- a/server/events/command_runner.go +++ b/server/events/command_runner.go @@ -24,6 +24,7 @@ import ( "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs" + "github.com/runatlantis/atlantis/server/events/vcs/gitea" "github.com/runatlantis/atlantis/server/logging" "github.com/runatlantis/atlantis/server/metrics" "github.com/runatlantis/atlantis/server/recovery" @@ -97,6 +98,7 @@ type DefaultCommandRunner struct { GithubPullGetter GithubPullGetter AzureDevopsPullGetter AzureDevopsPullGetter GitlabMergeRequestGetter GitlabMergeRequestGetter + GiteaPullGetter *gitea.GiteaClient // User config option: Disables autoplan when a pull request is opened or updated. DisableAutoplan bool DisableAutoplanLabel string @@ -386,6 +388,21 @@ func (c *DefaultCommandRunner) getGithubData(logger logging.SimpleLogging, baseR return pull, headRepo, nil } +func (c *DefaultCommandRunner) getGiteaData(logger logging.SimpleLogging, baseRepo models.Repo, pullNum int) (models.PullRequest, models.Repo, error) { + if c.GiteaPullGetter == nil { + return models.PullRequest{}, models.Repo{}, errors.New("Atlantis not configured to support Gitea") + } + giteaPull, err := c.GiteaPullGetter.GetPullRequest(logger, baseRepo, pullNum) + if err != nil { + return models.PullRequest{}, models.Repo{}, errors.Wrap(err, "making pull request API call to Gitea") + } + pull, _, headRepo, err := c.EventParser.ParseGiteaPull(giteaPull) + if err != nil { + return pull, headRepo, errors.Wrap(err, "extracting required fields from comment data") + } + return pull, headRepo, nil +} + func (c *DefaultCommandRunner) getGitlabData(logger logging.SimpleLogging, baseRepo models.Repo, pullNum int) (models.PullRequest, error) { if c.GitlabMergeRequestGetter == nil { return models.PullRequest{}, errors.New("Atlantis not configured to support GitLab") @@ -446,6 +463,8 @@ func (c *DefaultCommandRunner) ensureValidRepoMetadata( pull = *maybePull case models.AzureDevops: pull, headRepo, err = c.getAzureDevopsData(log, baseRepo, pullNum) + case models.Gitea: + pull, headRepo, err = c.getGiteaData(log, baseRepo, pullNum) default: err = errors.New("Unknown VCS type–this is a bug") } diff --git a/server/events/command_runner_test.go b/server/events/command_runner_test.go index 1b5c77f461..8acea27b98 100644 --- a/server/events/command_runner_test.go +++ b/server/events/command_runner_test.go @@ -666,12 +666,16 @@ func TestRunUnlockCommand_VCSComment(t *testing.T) { State: tc.prState, } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, + testdata.GithubRepo, nil) - ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock}) + ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, + &events.CommentCommand{Name: command.Unlock}) - deleteLockCommand.VerifyWasCalledOnce().DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num) + deleteLockCommand.VerifyWasCalledOnce().DeleteLocksByPull(Any[logging.SimpleLogging](), + Eq(testdata.GithubRepo.FullName), Eq(testdata.Pull.Num)) vcsClient.VerifyWasCalledOnce().CreateComment( Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), Eq("All Atlantis locks for this PR have been unlocked and plans discarded"), Eq("unlock")) @@ -688,11 +692,15 @@ func TestRunUnlockCommandFail_VCSComment(t *testing.T) { State: github.String("open"), } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) - When(deleteLockCommand.DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(0, errors.New("err")) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, + testdata.GithubRepo, nil) + When(deleteLockCommand.DeleteLocksByPull(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo.FullName), + Eq(testdata.Pull.Num))).ThenReturn(0, errors.New("err")) - ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock}) + ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, + &events.CommentCommand{Name: command.Unlock}) vcsClient.VerifyWasCalledOnce().CreateComment( Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), Eq("Failed to delete PR locks"), Eq("unlock")) @@ -708,15 +716,20 @@ func TestRunUnlockCommandFail_DisableUnlockLabel(t *testing.T) { State: github.String("open"), } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) - When(deleteLockCommand.DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(0, errors.New("err")) - When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull))).ThenReturn([]string{doNotUnlock, "need-help"}, nil) - - ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock}) - - vcsClient.VerifyWasCalledOnce().CreateComment( - Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), Eq("Not allowed to unlock PR with "+doNotUnlock+" label"), Eq("unlock")) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, + testdata.GithubRepo, nil) + When(deleteLockCommand.DeleteLocksByPull(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo.FullName), + Eq(testdata.Pull.Num))).ThenReturn(0, errors.New("err")) + When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(modelPull))).ThenReturn([]string{doNotUnlock, "need-help"}, nil) + + ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, + &events.CommentCommand{Name: command.Unlock}) + + vcsClient.VerifyWasCalledOnce().CreateComment(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(testdata.Pull.Num), Eq("Not allowed to unlock PR with "+doNotUnlock+" label"), Eq("unlock")) } func TestRunUnlockCommandFail_GetLabelsFail(t *testing.T) { @@ -727,15 +740,20 @@ func TestRunUnlockCommandFail_GetLabelsFail(t *testing.T) { State: github.String("open"), } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) - When(deleteLockCommand.DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(0, errors.New("err")) - When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull))).ThenReturn(nil, errors.New("err")) - - ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock}) - - vcsClient.VerifyWasCalledOnce().CreateComment( - Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), Eq("Failed to retrieve PR labels... Not unlocking"), Eq("unlock")) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, + testdata.GithubRepo, nil) + When(deleteLockCommand.DeleteLocksByPull(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo.FullName), + Eq(testdata.Pull.Num))).ThenReturn(0, errors.New("err")) + When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(modelPull))).ThenReturn(nil, errors.New("err")) + + ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, + &events.CommentCommand{Name: command.Unlock}) + + vcsClient.VerifyWasCalledOnce().CreateComment(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num), + Eq("Failed to retrieve PR labels... Not unlocking"), Eq("unlock")) } func TestRunUnlockCommandDoesntRetrieveLabelsIfDisableUnlockLabelNotSet(t *testing.T) { @@ -748,13 +766,18 @@ func TestRunUnlockCommandDoesntRetrieveLabelsIfDisableUnlockLabelNotSet(t *testi State: github.String("open"), } modelPull := models.PullRequest{BaseRepo: testdata.GithubRepo, State: models.OpenPullState, Num: testdata.Pull.Num} - When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(testdata.Pull.Num))).ThenReturn(pull, nil) - When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, testdata.GithubRepo, nil) - When(deleteLockCommand.DeleteLocksByPull(testdata.GithubRepo.FullName, testdata.Pull.Num)).ThenReturn(0, errors.New("err")) - When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull))).ThenReturn([]string{doNotUnlock, "need-help"}, nil) + When(githubGetter.GetPullRequest(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(testdata.Pull.Num))).ThenReturn(pull, nil) + When(eventParsing.ParseGithubPull(Any[logging.SimpleLogging](), Eq(pull))).ThenReturn(modelPull, modelPull.BaseRepo, + testdata.GithubRepo, nil) + When(deleteLockCommand.DeleteLocksByPull(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo.FullName), + Eq(testdata.Pull.Num))).ThenReturn(0, errors.New("err")) + When(ch.VCSClient.GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), + Eq(modelPull))).ThenReturn([]string{doNotUnlock, "need-help"}, nil) unlockCommandRunner.DisableUnlockLabel = "" - ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, &events.CommentCommand{Name: command.Unlock}) + ch.RunCommentCommand(testdata.GithubRepo, &testdata.GithubRepo, nil, testdata.User, testdata.Pull.Num, + &events.CommentCommand{Name: command.Unlock}) vcsClient.VerifyWasCalled(Never()).GetPullLabels(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(modelPull)) } diff --git a/server/events/comment_parser.go b/server/events/comment_parser.go index c4ec87bb6d..3b3d2d3b0a 100644 --- a/server/events/comment_parser.go +++ b/server/events/comment_parser.go @@ -79,6 +79,7 @@ type CommentBuilder interface { type CommentParser struct { GithubUser string GitlabUser string + GiteaUser string BitbucketUser string AzureDevopsUser string ExecutableName string @@ -86,7 +87,7 @@ type CommentParser struct { } // NewCommentParser returns a CommentParser -func NewCommentParser(githubUser, gitlabUser, bitbucketUser, azureDevopsUser, executableName string, allowCommands []command.Name) *CommentParser { +func NewCommentParser(githubUser, gitlabUser, giteaUser, bitbucketUser, azureDevopsUser, executableName string, allowCommands []command.Name) *CommentParser { var commentAllowCommands []command.Name for _, acceptableCommand := range command.AllCommentCommands { for _, allowCommand := range allowCommands { @@ -100,6 +101,7 @@ func NewCommentParser(githubUser, gitlabUser, bitbucketUser, azureDevopsUser, ex return &CommentParser{ GithubUser: githubUser, GitlabUser: gitlabUser, + GiteaUser: giteaUser, BitbucketUser: bitbucketUser, AzureDevopsUser: azureDevopsUser, ExecutableName: executableName, @@ -174,6 +176,8 @@ func (e *CommentParser) Parse(rawComment string, vcsHost models.VCSHostType) Com vcsUser = e.GithubUser case models.Gitlab: vcsUser = e.GitlabUser + case models.Gitea: + vcsUser = e.GiteaUser case models.BitbucketCloud, models.BitbucketServer: vcsUser = e.BitbucketUser case models.AzureDevops: diff --git a/server/events/comment_parser_test.go b/server/events/comment_parser_test.go index 9c4b19d4f5..45c22e7e5f 100644 --- a/server/events/comment_parser_test.go +++ b/server/events/comment_parser_test.go @@ -28,6 +28,7 @@ import ( var commentParser = events.CommentParser{ GithubUser: "github-user", GitlabUser: "gitlab-user", + GiteaUser: "gitea-user", ExecutableName: "atlantis", AllowCommands: command.AllCommentCommands, } @@ -36,6 +37,7 @@ func TestNewCommentParser(t *testing.T) { type args struct { githubUser string gitlabUser string + giteaUser string bitbucketUser string azureDevopsUser string executableName string @@ -68,7 +70,7 @@ func TestNewCommentParser(t *testing.T) { } for _, tt := range tests { t.Run(tt.name, func(t *testing.T) { - assert.Equalf(t, tt.want, events.NewCommentParser(tt.args.githubUser, tt.args.gitlabUser, tt.args.bitbucketUser, tt.args.azureDevopsUser, tt.args.executableName, tt.args.allowCommands), "NewCommentParser(%v, %v, %v, %v, %v, %v)", tt.args.githubUser, tt.args.gitlabUser, tt.args.bitbucketUser, tt.args.azureDevopsUser, tt.args.executableName, tt.args.allowCommands) + assert.Equalf(t, tt.want, events.NewCommentParser(tt.args.githubUser, tt.args.gitlabUser, tt.args.giteaUser, tt.args.bitbucketUser, tt.args.azureDevopsUser, tt.args.executableName, tt.args.allowCommands), "NewCommentParser(%v, %v, %v, %v, %v, %v)", tt.args.githubUser, tt.args.gitlabUser, tt.args.bitbucketUser, tt.args.azureDevopsUser, tt.args.executableName, tt.args.allowCommands) }) } } @@ -266,6 +268,7 @@ func TestParse_InvalidCommand(t *testing.T) { cp := events.NewCommentParser( "github-user", "gitlab-user", + "gitea-user", "bitbucket-user", "azure-devops-user", "atlantis", diff --git a/server/events/delete_lock_command.go b/server/events/delete_lock_command.go index 89016503fb..1c9abcdda0 100644 --- a/server/events/delete_lock_command.go +++ b/server/events/delete_lock_command.go @@ -6,25 +6,24 @@ import ( "github.com/runatlantis/atlantis/server/logging" ) -//go:generate pegomock generate --package mocks -o mocks/mock_delete_lock_command.go DeleteLockCommand +//go:generate pegomock generate github.com/runatlantis/atlantis/server/events --package mocks -o mocks/mock_delete_lock_command.go DeleteLockCommand // DeleteLockCommand is the first step after a command request has been parsed. type DeleteLockCommand interface { - DeleteLock(id string) (*models.ProjectLock, error) - DeleteLocksByPull(repoFullName string, pullNum int) (int, error) + DeleteLock(logger logging.SimpleLogging, id string) (*models.ProjectLock, error) + DeleteLocksByPull(logger logging.SimpleLogging, repoFullName string, pullNum int) (int, error) } // DefaultDeleteLockCommand deletes a specific lock after a request from the LocksController. type DefaultDeleteLockCommand struct { Locker locking.Locker - Logger logging.SimpleLogging WorkingDir WorkingDir WorkingDirLocker WorkingDirLocker Backend locking.Backend } // DeleteLock handles deleting the lock at id -func (l *DefaultDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, error) { +func (l *DefaultDeleteLockCommand) DeleteLock(logger logging.SimpleLogging, id string) (*models.ProjectLock, error) { lock, err := l.Locker.Unlock(id) if err != nil { return nil, err @@ -33,9 +32,9 @@ func (l *DefaultDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, e return nil, nil } - removeErr := l.WorkingDir.DeletePlan(lock.Pull.BaseRepo, lock.Pull, lock.Workspace, lock.Project.Path, lock.Project.ProjectName) + removeErr := l.WorkingDir.DeletePlan(logger, lock.Pull.BaseRepo, lock.Pull, lock.Workspace, lock.Project.Path, lock.Project.ProjectName) if removeErr != nil { - l.Logger.Warn("Failed to delete plan: %s", removeErr) + logger.Warn("Failed to delete plan: %s", removeErr) return nil, removeErr } @@ -43,23 +42,23 @@ func (l *DefaultDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, e } // DeleteLocksByPull handles deleting all locks for the pull request -func (l *DefaultDeleteLockCommand) DeleteLocksByPull(repoFullName string, pullNum int) (int, error) { +func (l *DefaultDeleteLockCommand) DeleteLocksByPull(logger logging.SimpleLogging, repoFullName string, pullNum int) (int, error) { locks, err := l.Locker.UnlockByPull(repoFullName, pullNum) numLocks := len(locks) if err != nil { return numLocks, err } if numLocks == 0 { - l.Logger.Debug("No locks found for repo '%v', pull request: %v", repoFullName, pullNum) + logger.Debug("No locks found for repo '%v', pull request: %v", repoFullName, pullNum) return numLocks, nil } for i := 0; i < numLocks; i++ { lock := locks[i] - err := l.WorkingDir.DeletePlan(lock.Pull.BaseRepo, lock.Pull, lock.Workspace, lock.Project.Path, lock.Project.ProjectName) + err := l.WorkingDir.DeletePlan(logger, lock.Pull.BaseRepo, lock.Pull, lock.Workspace, lock.Project.Path, lock.Project.ProjectName) if err != nil { - l.Logger.Warn("Failed to delete plan: %s", err) + logger.Warn("Failed to delete plan: %s", err) return numLocks, err } } diff --git a/server/events/delete_lock_command_test.go b/server/events/delete_lock_command_test.go index 75ffe0488b..2e652770b9 100644 --- a/server/events/delete_lock_command_test.go +++ b/server/events/delete_lock_command_test.go @@ -15,33 +15,30 @@ import ( func TestDeleteLock_LockerErr(t *testing.T) { t.Log("If there is an error retrieving the lock, we return the error") + logger := logging.NewNoopLogger(t) RegisterMockTestingT(t) l := lockmocks.NewMockLocker() When(l.Unlock("id")).ThenReturn(nil, errors.New("err")) - dlc := events.DefaultDeleteLockCommand{ - Locker: l, - Logger: logging.NewNoopLogger(t), - } - _, err := dlc.DeleteLock("id") + dlc := events.DefaultDeleteLockCommand{Locker: l} + _, err := dlc.DeleteLock(logger, "id") ErrEquals(t, "err", err) } func TestDeleteLock_None(t *testing.T) { t.Log("If there is no lock at that ID we return nil") + logger := logging.NewNoopLogger(t) RegisterMockTestingT(t) l := lockmocks.NewMockLocker() When(l.Unlock("id")).ThenReturn(nil, nil) - dlc := events.DefaultDeleteLockCommand{ - Locker: l, - Logger: logging.NewNoopLogger(t), - } - lock, err := dlc.DeleteLock("id") + dlc := events.DefaultDeleteLockCommand{Locker: l} + lock, err := dlc.DeleteLock(logger, "id") Ok(t, err) Assert(t, lock == nil, "lock was not nil") } func TestDeleteLock_Success(t *testing.T) { t.Log("Delete lock deletes successfully the plan file") + logger := logging.NewNoopLogger(t) RegisterMockTestingT(t) l := lockmocks.NewMockLocker() When(l.Unlock("id")).ThenReturn(&models.ProjectLock{}, nil) @@ -66,19 +63,20 @@ func TestDeleteLock_Success(t *testing.T) { Ok(t, err) dlc := events.DefaultDeleteLockCommand{ Locker: l, - Logger: logging.NewNoopLogger(t), Backend: db, WorkingDirLocker: workingDirLocker, WorkingDir: workingDir, } - lock, err := dlc.DeleteLock("id") + lock, err := dlc.DeleteLock(logger, "id") Ok(t, err) Assert(t, lock != nil, "lock was nil") - workingDir.VerifyWasCalledOnce().DeletePlan(pull.BaseRepo, pull, workspace, path, projectName) + workingDir.VerifyWasCalledOnce().DeletePlan(Any[logging.SimpleLogging](), Eq(pull.BaseRepo), Eq(pull), Eq(workspace), + Eq(path), Eq(projectName)) } func TestDeleteLocksByPull_LockerErr(t *testing.T) { t.Log("If there is an error retrieving the lock, returned a failed status") + logger := logging.NewNoopLogger(t) repoName := "reponame" pullNum := 2 RegisterMockTestingT(t) @@ -87,16 +85,17 @@ func TestDeleteLocksByPull_LockerErr(t *testing.T) { When(l.UnlockByPull(repoName, pullNum)).ThenReturn(nil, errors.New("err")) dlc := events.DefaultDeleteLockCommand{ Locker: l, - Logger: logging.NewNoopLogger(t), WorkingDir: workingDir, } - _, err := dlc.DeleteLocksByPull(repoName, pullNum) + _, err := dlc.DeleteLocksByPull(logger, repoName, pullNum) ErrEquals(t, "err", err) - workingDir.VerifyWasCalled(Never()).DeletePlan(Any[models.Repo](), Any[models.PullRequest](), Any[string](), Any[string](), Any[string]()) + workingDir.VerifyWasCalled(Never()).DeletePlan(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string](), Any[string](), Any[string]()) } func TestDeleteLocksByPull_None(t *testing.T) { t.Log("If there is no lock at that ID there is no error") + logger := logging.NewNoopLogger(t) repoName := "reponame" pullNum := 2 RegisterMockTestingT(t) @@ -105,16 +104,17 @@ func TestDeleteLocksByPull_None(t *testing.T) { When(l.UnlockByPull(repoName, pullNum)).ThenReturn([]models.ProjectLock{}, nil) dlc := events.DefaultDeleteLockCommand{ Locker: l, - Logger: logging.NewNoopLogger(t), WorkingDir: workingDir, } - _, err := dlc.DeleteLocksByPull(repoName, pullNum) + _, err := dlc.DeleteLocksByPull(logger, repoName, pullNum) Ok(t, err) - workingDir.VerifyWasCalled(Never()).DeletePlan(Any[models.Repo](), Any[models.PullRequest](), Any[string](), Any[string](), Any[string]()) + workingDir.VerifyWasCalled(Never()).DeletePlan(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string](), Any[string](), Any[string]()) } func TestDeleteLocksByPull_SingleSuccess(t *testing.T) { t.Log("If a single lock is successfully deleted") + logger := logging.NewNoopLogger(t) repoName := "reponame" pullNum := 2 path := "." @@ -142,16 +142,17 @@ func TestDeleteLocksByPull_SingleSuccess(t *testing.T) { ) dlc := events.DefaultDeleteLockCommand{ Locker: l, - Logger: logging.NewNoopLogger(t), WorkingDir: workingDir, } - _, err := dlc.DeleteLocksByPull(repoName, pullNum) + _, err := dlc.DeleteLocksByPull(logger, repoName, pullNum) Ok(t, err) - workingDir.VerifyWasCalled(Once()).DeletePlan(pull.BaseRepo, pull, workspace, path, projectName) + workingDir.VerifyWasCalled(Once()).DeletePlan(Any[logging.SimpleLogging](), Eq(pull.BaseRepo), Eq(pull), Eq(workspace), + Eq(path), Eq(projectName)) } func TestDeleteLocksByPull_MultipleSuccess(t *testing.T) { t.Log("If multiple locks are successfully deleted") + logger := logging.NewNoopLogger(t) repoName := "reponame" pullNum := 2 path1 := "path1" @@ -187,11 +188,10 @@ func TestDeleteLocksByPull_MultipleSuccess(t *testing.T) { ) dlc := events.DefaultDeleteLockCommand{ Locker: l, - Logger: logging.NewNoopLogger(t), WorkingDir: workingDir, } - _, err := dlc.DeleteLocksByPull(repoName, pullNum) + _, err := dlc.DeleteLocksByPull(logger, repoName, pullNum) Ok(t, err) - workingDir.VerifyWasCalled(Once()).DeletePlan(pull.BaseRepo, pull, workspace, path1, projectName) - workingDir.VerifyWasCalled(Once()).DeletePlan(pull.BaseRepo, pull, workspace, path2, projectName) + workingDir.VerifyWasCalled(Once()).DeletePlan(logger, pull.BaseRepo, pull, workspace, path1, projectName) + workingDir.VerifyWasCalled(Once()).DeletePlan(logger, pull.BaseRepo, pull, workspace, path2, projectName) } diff --git a/server/events/event_parser.go b/server/events/event_parser.go index 988d051f27..8a02f476e1 100644 --- a/server/events/event_parser.go +++ b/server/events/event_parser.go @@ -20,6 +20,8 @@ import ( "path" "strings" + giteasdk "code.gitea.io/sdk/gitea" + "github.com/go-playground/validator/v10" "github.com/google/go-github/v59/github" lru "github.com/hashicorp/golang-lru/v2" @@ -29,6 +31,7 @@ import ( "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketcloud" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketserver" + "github.com/runatlantis/atlantis/server/events/vcs/gitea" "github.com/runatlantis/atlantis/server/logging" "github.com/xanzy/go-gitlab" ) @@ -337,6 +340,14 @@ type EventParsing interface { // ParseAzureDevopsRepo parses the response from the Azure DevOps API endpoint that // returns a repo into the Atlantis model. ParseAzureDevopsRepo(adRepo *azuredevops.GitRepository) (models.Repo, error) + + ParseGiteaPullRequestEvent(event giteasdk.PullRequest) ( + pull models.PullRequest, pullEventType models.PullRequestEventType, + baseRepo models.Repo, headRepo models.Repo, user models.User, err error) + + ParseGiteaIssueCommentEvent(event gitea.GiteaIssueCommentPayload) (baseRepo models.Repo, user models.User, pullNum int, err error) + + ParseGiteaPull(pull *giteasdk.PullRequest) (pullModel models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error) } // EventParser parses VCS events. @@ -345,6 +356,8 @@ type EventParser struct { GithubToken string GitlabUser string GitlabToken string + GiteaUser string + GiteaToken string AllowDraftPRs bool BitbucketUser string BitbucketToken string @@ -357,6 +370,8 @@ func (e *EventParser) ParseAPIPlanRequest(vcsHostType models.VCSHostType, repoFu switch vcsHostType { case models.Github: return models.NewRepo(vcsHostType, repoFullName, cloneURL, e.GithubUser, e.GithubToken) + case models.Gitea: + return models.NewRepo(vcsHostType, repoFullName, cloneURL, e.GiteaUser, e.GiteaToken) case models.Gitlab: return models.NewRepo(vcsHostType, repoFullName, cloneURL, e.GitlabUser, e.GitlabToken) } @@ -611,6 +626,13 @@ func (e *EventParser) ParseGithubRepo(ghRepo *github.Repository) (models.Repo, e return models.NewRepo(models.Github, ghRepo.GetFullName(), ghRepo.GetCloneURL(), e.GithubUser, e.GithubToken) } +// ParseGiteaRepo parses the response from the Gitea API endpoint that +// returns a repo into the Atlantis model. +// See EventParsing for return value docs. +func (e *EventParser) ParseGiteaRepo(repo giteasdk.Repository) (models.Repo, error) { + return models.NewRepo(models.Gitea, repo.FullName, repo.CloneURL, e.GiteaUser, e.GiteaToken) +} + // ParseGitlabMergeRequestUpdateEvent dives deeper into Gitlab merge request update events func (e *EventParser) ParseGitlabMergeRequestUpdateEvent(event gitlab.MergeEvent) models.PullRequestEventType { // New commit to opened MR @@ -703,6 +725,27 @@ func (e *EventParser) ParseGitlabMergeRequestCommentEvent(event gitlab.MergeComm return } +func (e *EventParser) ParseGiteaIssueCommentEvent(comment gitea.GiteaIssueCommentPayload) (baseRepo models.Repo, user models.User, pullNum int, err error) { + baseRepo, err = e.ParseGiteaRepo(comment.Repository) + if err != nil { + return + } + if comment.Comment.Body == "" || comment.Comment.Poster.UserName == "" { + err = errors.New("comment.user.login is null") + return + } + commenterUsername := comment.Comment.Poster.UserName + user = models.User{ + Username: commenterUsername, + } + pullNum = int(comment.Issue.Index) + if pullNum == 0 { + err = errors.New("issue.number is null") + return + } + return +} + // ParseGitlabMergeRequest parses the merge requests and returns a pull request // model. We require passing in baseRepo because we can't get this information // from the merge request. The only caller of this function already has that @@ -989,3 +1032,121 @@ func (e *EventParser) ParseAzureDevopsRepo(adRepo *azuredevops.GitRepository) (m fullName := fmt.Sprintf("%s/%s/%s", owner, project, repo) return models.NewRepo(models.AzureDevops, fullName, cloneURL, e.AzureDevopsUser, e.AzureDevopsToken) } + +func (e *EventParser) ParseGiteaPullRequestEvent(event giteasdk.PullRequest) (models.PullRequest, models.PullRequestEventType, models.Repo, models.Repo, models.User, error) { + var pullEventType models.PullRequestEventType + + // Determine the event type based on the state of the pull request and whether it's merged. + switch { + case event.State == giteasdk.StateOpen: + pullEventType = models.OpenedPullEvent + case event.HasMerged: + pullEventType = models.ClosedPullEvent + default: + pullEventType = models.OtherPullEvent + } + + // Parse the base repository. + baseRepo, err := models.NewRepo( + models.Gitea, + event.Base.Repository.FullName, + event.Base.Repository.CloneURL, + e.GiteaUser, + e.GiteaToken, + ) + if err != nil { + return models.PullRequest{}, models.OtherPullEvent, models.Repo{}, models.Repo{}, models.User{}, err + } + + // Parse the head repository. + headRepo, err := models.NewRepo( + models.Gitea, + event.Head.Repository.FullName, + event.Head.Repository.CloneURL, + e.GiteaUser, + e.GiteaToken, + ) + if err != nil { + return models.PullRequest{}, models.OtherPullEvent, models.Repo{}, models.Repo{}, models.User{}, err + } + + // Construct the pull request model. + pull := models.PullRequest{ + Num: int(event.Index), + URL: event.HTMLURL, + HeadCommit: event.Head.Sha, + HeadBranch: (*event.Head).Ref, + BaseBranch: event.Base.Ref, + Author: event.Poster.UserName, + BaseRepo: baseRepo, + } + + // Parse the user who made the pull request. + user := models.User{ + Username: event.Poster.UserName, + } + return pull, pullEventType, baseRepo, headRepo, user, nil +} + +// ParseGiteaPull parses the response from the Gitea API endpoint (not +// from a webhook) that returns a pull request. +// See EventParsing for return value docs. +func (e *EventParser) ParseGiteaPull(pull *giteasdk.PullRequest) (pullModel models.PullRequest, baseRepo models.Repo, headRepo models.Repo, err error) { + commit := pull.Head.Sha + if commit == "" { + err = errors.New("head.sha is null") + return + } + url := pull.HTMLURL + if url == "" { + err = errors.New("html_url is null") + return + } + headBranch := pull.Head.Ref + if headBranch == "" { + err = errors.New("head.ref is null") + return + } + baseBranch := pull.Base.Ref + if baseBranch == "" { + err = errors.New("base.ref is null") + return + } + + authorUsername := pull.Poster.UserName + if authorUsername == "" { + err = errors.New("user.login is null") + return + } + num := pull.Index + if num == 0 { + err = errors.New("number is null") + return + } + + baseRepo, err = e.ParseGiteaRepo(*pull.Base.Repository) + if err != nil { + return + } + headRepo, err = e.ParseGiteaRepo(*pull.Head.Repository) + if err != nil { + return + } + + pullState := models.ClosedPullState + if pull.State == "open" { + pullState = models.OpenPullState + } + + pullModel = models.PullRequest{ + Author: authorUsername, + HeadBranch: headBranch, + HeadCommit: commit, + URL: url, + Num: int(num), + State: pullState, + BaseRepo: baseRepo, + BaseBranch: baseBranch, + } + return +} diff --git a/server/events/github_app_working_dir.go b/server/events/github_app_working_dir.go index 85435f8590..a06599efe0 100644 --- a/server/events/github_app_working_dir.go +++ b/server/events/github_app_working_dir.go @@ -5,6 +5,7 @@ import ( "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/events/vcs" + "github.com/runatlantis/atlantis/server/logging" ) const redactedReplacement = "://:@" @@ -19,7 +20,7 @@ type GithubAppWorkingDir struct { } // Clone writes a fresh token for Github App authentication -func (g *GithubAppWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (g *GithubAppWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { baseRepo := &p.BaseRepo // Realistically, this is a super brittle way of supporting clones using gh app installation tokens @@ -35,5 +36,5 @@ func (g *GithubAppWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, headRepo.CloneURL = strings.Replace(headRepo.CloneURL, "://:@", replacement, 1) headRepo.SanitizedCloneURL = strings.Replace(baseRepo.SanitizedCloneURL, redactedReplacement, replacement, 1) - return g.WorkingDir.Clone(headRepo, p, workspace) + return g.WorkingDir.Clone(logger, headRepo, p, workspace) } diff --git a/server/events/github_app_working_dir_test.go b/server/events/github_app_working_dir_test.go index 28983da870..78e64d4e0b 100644 --- a/server/events/github_app_working_dir_test.go +++ b/server/events/github_app_working_dir_test.go @@ -29,7 +29,6 @@ func TestClone_GithubAppNoneExisting(t *testing.T) { DataDir: dataDir, CheckoutMerge: false, TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), - Logger: logger, } defer disableSSLVerification()() @@ -46,7 +45,7 @@ func TestClone_GithubAppNoneExisting(t *testing.T) { GithubHostname: testServer, } - cloneDir, _, err := gwd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, _, err := gwd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") @@ -58,6 +57,8 @@ func TestClone_GithubAppNoneExisting(t *testing.T) { } func TestClone_GithubAppSetsCorrectUrl(t *testing.T) { + logger := logging.NewNoopLogger(t) + RegisterMockTestingT(t) workingDir := eventMocks.NewMockWorkingDir() @@ -88,13 +89,12 @@ func TestClone_GithubAppSetsCorrectUrl(t *testing.T) { modifiedBaseRepo.SanitizedCloneURL = "https://github.com/runatlantis/atlantis.git" When(credentials.GetToken()).ThenReturn("token", nil) - When(workingDir.Clone(modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default")).ThenReturn( - "", true, nil, - ) + When(workingDir.Clone(Any[logging.SimpleLogging](), Eq(modifiedBaseRepo), Eq(models.PullRequest{BaseRepo: modifiedBaseRepo}), + Eq("default"))).ThenReturn("", true, nil) - _, success, _ := ghAppWorkingDir.Clone(headRepo, models.PullRequest{BaseRepo: baseRepo}, "default") + _, success, _ := ghAppWorkingDir.Clone(logger, headRepo, models.PullRequest{BaseRepo: baseRepo}, "default") - workingDir.VerifyWasCalledOnce().Clone(modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default") + workingDir.VerifyWasCalledOnce().Clone(logger, modifiedBaseRepo, models.PullRequest{BaseRepo: modifiedBaseRepo}, "default") Assert(t, success == true, "clone url mutation error") } diff --git a/server/events/markdown_renderer.go b/server/events/markdown_renderer.go index 74a72c6719..5bbfc8a47e 100644 --- a/server/events/markdown_renderer.go +++ b/server/events/markdown_renderer.go @@ -72,6 +72,7 @@ type commonData struct { EnableDiffMarkdownFormat bool ExecutableName string HideUnchangedPlanComments bool + VcsRequestType string } // errData is data about an error response. @@ -170,13 +171,20 @@ func NewMarkdownRenderer( // Render formats the data into a markdown string. // nolint: interfacer -func (m *MarkdownRenderer) Render(res command.Result, cmdName command.Name, subCmd, log string, verbose bool, vcsHost models.VCSHostType) string { - commandStr := cases.Title(language.English).String(strings.Replace(cmdName.String(), "_", " ", -1)) +func (m *MarkdownRenderer) Render(ctx *command.Context, res command.Result, cmd PullCommand) string { + commandStr := cases.Title(language.English).String(strings.Replace(cmd.CommandName().String(), "_", " ", -1)) + var vcsRequestType string + if ctx.Pull.BaseRepo.VCSHost.Type == models.Gitlab { + vcsRequestType = "Merge Request" + } else { + vcsRequestType = "Pull Request" + } + common := commonData{ Command: commandStr, - SubCommand: subCmd, - Verbose: verbose, - Log: log, + SubCommand: cmd.SubCommandName(), + Verbose: cmd.IsVerbose(), + Log: ctx.Log.GetHistory(), PlansDeleted: res.PlansDeleted, DisableApplyAll: m.disableApplyAll || m.disableApply, DisableApply: m.disableApply, @@ -184,6 +192,7 @@ func (m *MarkdownRenderer) Render(res command.Result, cmdName command.Name, subC EnableDiffMarkdownFormat: m.enableDiffMarkdownFormat, ExecutableName: m.executableName, HideUnchangedPlanComments: m.hideUnchangedPlanComments, + VcsRequestType: vcsRequestType, } templates := m.markdownTemplates @@ -194,10 +203,12 @@ func (m *MarkdownRenderer) Render(res command.Result, cmdName command.Name, subC if res.Failure != "" { return m.renderTemplateTrimSpace(templates.Lookup("failureWithLog"), failureData{res.Failure, "", common}) } - return m.renderProjectResults(res.ProjectResults, common, vcsHost) + return m.renderProjectResults(ctx, res.ProjectResults, common) } -func (m *MarkdownRenderer) renderProjectResults(results []command.ProjectResult, common commonData, vcsHost models.VCSHostType) string { +func (m *MarkdownRenderer) renderProjectResults(ctx *command.Context, results []command.ProjectResult, common commonData) string { + vcsHost := ctx.Pull.BaseRepo.VCSHost.Type + var resultsTmplData []projectResultTmplData numPlanSuccesses := 0 numPolicyCheckSuccesses := 0 diff --git a/server/events/markdown_renderer_test.go b/server/events/markdown_renderer_test.go index eebd1a8b87..ace23c443a 100644 --- a/server/events/markdown_renderer_test.go +++ b/server/events/markdown_renderer_test.go @@ -23,6 +23,7 @@ import ( "github.com/runatlantis/atlantis/server/events" "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" ) @@ -60,17 +61,36 @@ func TestRenderErr(t *testing.T) { } r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", false) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } for _, c := range cases { res := command.Result{ Error: c.Error, } for _, verbose := range []bool{true, false} { t.Run(fmt.Sprintf("%s_%t", c.Description, verbose), func(t *testing.T) { - s := r.Render(res, c.Command, "", "log", verbose, models.Github) + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected)+"\n\n
Log\n

\n\n```\nlog```\n

", normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log)), normalize(s)) } }) } @@ -88,34 +108,54 @@ func TestRenderFailure(t *testing.T) { "apply failure", command.Apply, "failure", - "**Apply Failed**: failure\n", + "**Apply Failed**: failure", }, { "plan failure", command.Plan, "failure", - "**Plan Failed**: failure\n", + "**Plan Failed**: failure", }, { "policy check failure", command.PolicyCheck, "failure", - "**Policy Check Failed**: failure\n", + "**Policy Check Failed**: failure", }, } r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", false) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } + for _, c := range cases { res := command.Result{ Failure: c.Failure, } for _, verbose := range []bool{true, false} { t.Run(fmt.Sprintf("%s_%t", c.Description, verbose), func(t *testing.T) { - s := r.Render(res, c.Command, "", "log", verbose, models.Github) + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log)), normalize(s)) } }) } @@ -124,11 +164,27 @@ func TestRenderFailure(t *testing.T) { func TestRenderErrAndFailure(t *testing.T) { r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", false) + logger := logging.NewNoopLogger(t).WithHistory() + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } res := command.Result{ Error: errors.New("error"), Failure: "failure", } - s := r.Render(res, command.Plan, "", "", false, models.Github) + cmd := &events.CommentCommand{ + Name: command.Plan, + Verbose: false, + } + + s := r.Render(ctx, res, cmd) Equals(t, "**Plan Error**\n```\nerror\n```", normalize(s)) } @@ -147,7 +203,7 @@ func TestRenderProjectResults(t *testing.T) { "", []command.ProjectResult{}, models.Github, - "Ran Plan for 0 projects:\n\n\n", + "Ran Plan for 0 projects:\n\n", }, { "single successful plan", @@ -166,23 +222,32 @@ func TestRenderProjectResults(t *testing.T) { }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -203,25 +268,33 @@ $$$ }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ - + $$$shell + atlantis plan -d path -w workspace + $$$ :twisted_rightwards_arrows: Upstream was modified, a new merge was performed. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -242,23 +315,32 @@ $$$ }, }, models.Github, - `Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -296,7 +378,8 @@ $$$ }, }, models.Github, - `Ran Policy Check for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Policy Check for project: $projectname$ dir: $path$ workspace: $workspace$ #### Policy Set: $policy1$ $$$diff @@ -317,16 +400,24 @@ policy set: policy1: requires: 1 approval(s), have: 0. policy set: policy2: passed. $$$ * :heavy_check_mark: To **approve** this project, comment: - * $$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -357,7 +448,8 @@ $$$ }, }, models.Github, - `Ran Policy Check for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Policy Check for project: $projectname$ dir: $path$ workspace: $workspace$
Show Output @@ -382,26 +474,33 @@ FAIL - - main - WARNING: Null Resource creation is prohibit $$$ +
#### Policy Approval Status: $$$ policy set: policy1: requires: 1 approval(s), have: 0. $$$ * :heavy_check_mark: To **approve** this project, comment: - * $$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path -w workspace$ - - + $$$shell + atlantis plan -d path -w workspace + $$$ $$$ policy set: policy1: 2 tests, 1 passed, 0 warnings, 1 failure, 0 exceptions $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -420,7 +519,8 @@ $$$ }, }, models.Github, - `Ran Import for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Import for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff import-output @@ -429,7 +529,9 @@ $$$ :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ `, }, { @@ -448,7 +550,8 @@ $$$ }, }, models.Github, - `Ran State $rm$ for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran State $rm$ for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff state-rm-output @@ -457,7 +560,9 @@ $$$ :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ `, }, { @@ -472,7 +577,8 @@ $$$ }, }, models.Github, - `Ran Apply for dir: $path$ workspace: $workspace$ + ` +Ran Apply for dir: $path$ workspace: $workspace$ $$$diff success @@ -492,7 +598,8 @@ $$$ }, }, models.Github, - `Ran Apply for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Apply for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff success @@ -527,10 +634,12 @@ $$$ }, }, models.Github, - `Ran Plan for 2 projects: + ` +Ran Plan for 2 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -538,10 +647,14 @@ terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. project: $projectname$ dir: $path2$ workspace: $workspace$ @@ -550,20 +663,28 @@ terraform-output2 $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path2 -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url2) + $$$shell + atlantis apply -d path2 -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url2) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path2 -w workspace$ + $$$shell + atlantis plan -d path2 -w workspace + $$$ --- ### Plan Summary 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -605,10 +726,12 @@ $$$ }, }, models.Github, - `Ran Policy Check for 2 projects: + ` +Ran Policy Check for 2 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ #### Policy Set: $policy1$ @@ -618,10 +741,14 @@ $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. project: $projectname$ dir: $path2$ workspace: $workspace$ @@ -632,16 +759,24 @@ $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path2 -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url2) + $$$shell + atlantis apply -d path2 -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url2) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path2 -w workspace$ + $$$shell + atlantis plan -d path2 -w workspace + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -662,10 +797,12 @@ $$$ }, }, models.Github, - `Ran Apply for 2 projects: + ` +Ran Apply for 2 projects: 1. project: $projectname$ dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ +--- ### 1. project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff @@ -696,7 +833,8 @@ $$$ }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ **Plan Error** $$$ @@ -716,7 +854,8 @@ $$$ }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ **Plan Failed**: failure `, @@ -749,11 +888,13 @@ $$$ }, }, models.Github, - `Ran Plan for 3 projects: + ` +Ran Plan for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. project: $projectname$ dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -761,10 +902,14 @@ terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. dir: $path2$ workspace: $workspace$ @@ -782,10 +927,14 @@ $$$ 3 projects, 1 with changes, 0 with no changes, 2 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -833,11 +982,13 @@ $$$ }, }, models.Github, - `Ran Policy Check for 3 projects: + ` +Ran Policy Check for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. project: $projectname$ dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ #### Policy Set: $policy1$ @@ -847,10 +998,14 @@ $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. dir: $path2$ workspace: $workspace$ @@ -866,10 +1021,14 @@ $$$ policy set: policy1: requires: 1 approval(s), have: 0. $$$ * :heavy_check_mark: To **approve** this project, comment: - * $$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 3. project: $projectname$ dir: $path3$ workspace: $workspace$ @@ -879,12 +1038,18 @@ error $$$ --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * $atlantis approve_policies$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :heavy_check_mark: To **approve** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis approve_policies + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan$ + $$$shell + atlantis plan + $$$ `, }, { @@ -909,11 +1074,13 @@ $$$ }, }, models.Github, - `Ran Apply for 3 projects: + ` +Ran Apply for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -959,11 +1126,13 @@ $$$ }, }, models.Github, - `Ran Apply for 3 projects: + ` +Ran Apply for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -990,6 +1159,19 @@ $$$ } r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", false) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } for _, c := range cases { t.Run(c.Description, func(t *testing.T) { res := command.Result{ @@ -997,11 +1179,18 @@ $$$ } for _, verbose := range []bool{true, false} { t.Run(c.Description, func(t *testing.T) { - s := r.Render(res, c.Command, c.SubCommand, "log", verbose, c.VCSHost) + cmd := &events.CommentCommand{ + Name: c.Command, + SubName: c.SubCommand, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected+ + fmt.Sprintf("
Log\n

\n\n```\n%s\n```\n

", log)), normalize(s)) } }) } @@ -1034,17 +1223,22 @@ func TestRenderProjectResultsDisableApplyAll(t *testing.T) { }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ `, }, { @@ -1064,17 +1258,22 @@ $$$ }, }, models.Github, - `Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ `, }, { @@ -1104,10 +1303,12 @@ $$$ }, }, models.Github, - `Ran Plan for 2 projects: + ` +Ran Plan for 2 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -1115,10 +1316,14 @@ terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. project: $projectname$ dir: $path2$ workspace: $workspace$ @@ -1127,10 +1332,14 @@ terraform-output2 $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path2 -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url2) + $$$shell + atlantis apply -d path2 -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url2) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path2 -w workspace$ + $$$shell + atlantis plan -d path2 -w workspace + $$$ --- ### Plan Summary @@ -1150,6 +1359,19 @@ $$$ "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } for _, c := range cases { t.Run(c.Description, func(t *testing.T) { res := command.Result{ @@ -1157,11 +1379,17 @@ $$$ } for _, verbose := range []bool{true, false} { t.Run(c.Description, func(t *testing.T) { - s := r.Render(res, c.Command, "", "log", verbose, c.VCSHost) + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected)+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log), normalize(s)) } }) } @@ -1169,7 +1397,7 @@ $$$ } } -// Test that if disable apply is set then the apply footer is not added +// Test that if disable apply is set then the apply footer is not added func TestRenderProjectResultsDisableApply(t *testing.T) { cases := []struct { Description string @@ -1194,15 +1422,18 @@ func TestRenderProjectResultsDisableApply(t *testing.T) { }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ `, }, { @@ -1222,15 +1453,18 @@ $$$ }, }, models.Github, - `Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ `, }, { @@ -1260,19 +1494,23 @@ $$$ }, }, models.Github, - `Ran Plan for 2 projects: + ` +Ran Plan for 2 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. project: $projectname$ dir: $path2$ workspace: $workspace$ @@ -1280,9 +1518,11 @@ $$$diff terraform-output2 $$$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url2) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url2) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path2 -w workspace$ + $$$shell + atlantis plan -d path2 -w workspace + $$$ --- ### Plan Summary @@ -1303,6 +1543,19 @@ $$$ "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } for _, c := range cases { t.Run(c.Description, func(t *testing.T) { res := command.Result{ @@ -1310,11 +1563,17 @@ $$$ } for _, verbose := range []bool{true, false} { t.Run(c.Description, func(t *testing.T) { - s := r.Render(res, c.Command, "", "log", verbose, c.VCSHost) + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected)+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log), normalize(s)) } }) } @@ -1342,8 +1601,21 @@ func TestRenderCustomPolicyCheckTemplate_DisableApplyAll(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } - rendered := r.Render(command.Result{ + res := command.Result{ ProjectResults: []command.ProjectResult{ { Workspace: "workspace", @@ -1361,8 +1633,14 @@ func TestRenderCustomPolicyCheckTemplate_DisableApplyAll(t *testing.T) { }, }, }, - }, command.PolicyCheck, "", "log", false, models.Github) - exp = `Ran Policy Check for dir: $path$ workspace: $workspace$ + } + cmd := &events.CommentCommand{ + Name: command.PolicyCheck, + Verbose: false, + } + rendered := r.Render(ctx, res, cmd) + exp = ` +Ran Policy Check for dir: $path$ workspace: $workspace$ #### Policy Set: $policy1$ $$$diff @@ -1371,10 +1649,15 @@ $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To re-run policies **plan** this project again by commenting: - * $atlantis plan -d path -w workspace$` + $$$shell + atlantis plan -d path -w workspace + $$$ +` Equals(t, normalize(exp), normalize(rendered)) } @@ -1392,8 +1675,20 @@ func TestRenderProjectResults_DisableFolding(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) - - rendered := mr.Render(command.Result{ + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } + res := command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1401,7 +1696,12 @@ func TestRenderProjectResults_DisableFolding(t *testing.T) { Error: errors.New(strings.Repeat("line\n", 13)), }, }, - }, command.Plan, "", "log", false, models.Github) + } + cmd := &events.CommentCommand{ + Name: command.Plan, + Verbose: false, + } + rendered := mr.Render(ctx, res, cmd) Equals(t, false, strings.Contains(rendered, "\n
")) } @@ -1484,8 +1784,20 @@ func TestRenderProjectResults_WrappedErr(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) - - rendered := mr.Render(command.Result{ + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: c.VCSHost, + }, + }, + }, + } + res := command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1493,10 +1805,16 @@ func TestRenderProjectResults_WrappedErr(t *testing.T) { Error: errors.New(c.Output), }, }, - }, command.Plan, "", "log", false, c.VCSHost) + } + cmd := &events.CommentCommand{ + Name: command.Plan, + Verbose: false, + } + rendered := mr.Render(ctx, res, cmd) var exp string if c.ShouldWrap { - exp = `Ran Plan for dir: $.$ workspace: $default$ + exp = ` +Ran Plan for dir: $.$ workspace: $default$ **Plan Error**
Show Output @@ -1504,14 +1822,16 @@ func TestRenderProjectResults_WrappedErr(t *testing.T) { $$$ ` + c.Output + ` $$$ -
` +
+` } else { exp = `Ran Plan for dir: $.$ workspace: $default$ **Plan Error** $$$ ` + c.Output + ` -$$$` +$$$ +` } Equals(t, normalize(exp), normalize(rendered)) }) @@ -1523,69 +1843,80 @@ $$$` func TestRenderProjectResults_WrapSingleProject(t *testing.T) { cases := []struct { VCSHost models.VCSHostType + VcsRequestType string GitlabCommonMarkSupport bool Output string ShouldWrap bool }{ { - VCSHost: models.Github, - Output: strings.Repeat("line\n", 1), - ShouldWrap: false, + VCSHost: models.Github, + VcsRequestType: "Pull Request", + Output: strings.Repeat("line\n", 1), + ShouldWrap: false, }, { - VCSHost: models.Github, - Output: strings.Repeat("line\n", 13) + "No changes. Infrastructure is up-to-date.", - ShouldWrap: true, + VCSHost: models.Github, + VcsRequestType: "Pull Request", + Output: strings.Repeat("line\n", 13) + "No changes. Infrastructure is up-to-date.", + ShouldWrap: true, }, { VCSHost: models.Gitlab, + VcsRequestType: "Merge Request", GitlabCommonMarkSupport: false, Output: strings.Repeat("line\n", 1), ShouldWrap: false, }, { VCSHost: models.Gitlab, + VcsRequestType: "Merge Request", GitlabCommonMarkSupport: false, Output: strings.Repeat("line\n", 13), ShouldWrap: false, }, { VCSHost: models.Gitlab, + VcsRequestType: "Merge Request", GitlabCommonMarkSupport: true, Output: strings.Repeat("line\n", 1), ShouldWrap: false, }, { VCSHost: models.Gitlab, + VcsRequestType: "Merge Request", GitlabCommonMarkSupport: true, Output: strings.Repeat("line\n", 13) + "No changes. Infrastructure is up-to-date.", ShouldWrap: true, }, { - VCSHost: models.BitbucketCloud, - Output: strings.Repeat("line\n", 1), - ShouldWrap: false, + VCSHost: models.BitbucketCloud, + VcsRequestType: "Pull Request", + Output: strings.Repeat("line\n", 1), + ShouldWrap: false, }, { - VCSHost: models.BitbucketCloud, - Output: strings.Repeat("line\n", 13), - ShouldWrap: false, + VCSHost: models.BitbucketCloud, + VcsRequestType: "Pull Request", + Output: strings.Repeat("line\n", 13), + ShouldWrap: false, }, { - VCSHost: models.BitbucketServer, - Output: strings.Repeat("line\n", 1), - ShouldWrap: false, + VCSHost: models.BitbucketServer, + VcsRequestType: "Pull Request", + Output: strings.Repeat("line\n", 1), + ShouldWrap: false, }, { - VCSHost: models.BitbucketServer, - Output: strings.Repeat("line\n", 13), - ShouldWrap: false, + VCSHost: models.BitbucketServer, + VcsRequestType: "Pull Request", + Output: strings.Repeat("line\n", 13), + ShouldWrap: false, }, } for _, c := range cases { - for _, cmd := range []command.Name{command.Plan, command.Apply} { - t.Run(fmt.Sprintf("%s_%s_%v", c.VCSHost.String(), cmd.String(), c.ShouldWrap), + for _, cmdName := range []command.Name{command.Plan, command.Apply} { + t.Run(fmt.Sprintf("%s_%s_%v", c.VCSHost.String(), cmdName.String(), c.ShouldWrap), func(t *testing.T) { mr := events.NewMarkdownRenderer( c.GitlabCommonMarkSupport, // gitlabSupportsCommonMark @@ -1598,8 +1929,22 @@ func TestRenderProjectResults_WrapSingleProject(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: c.VCSHost, + }, + }, + }, + } + var pr command.ProjectResult - switch cmd { + switch cmdName { case command.Plan: pr = command.ProjectResult{ RepoRelDir: ".", @@ -1618,58 +1963,84 @@ func TestRenderProjectResults_WrapSingleProject(t *testing.T) { ApplySuccess: c.Output, } } - rendered := mr.Render(command.Result{ + res := command.Result{ ProjectResults: []command.ProjectResult{pr}, - }, cmd, "", "log", false, c.VCSHost) + } + cmd := &events.CommentCommand{ + Name: cmdName, + Verbose: false, + } + rendered := mr.Render(ctx, res, cmd) // Check result. var exp string - switch cmd { + switch cmdName { case command.Plan: if c.ShouldWrap { - exp = `Ran Plan for dir: $.$ workspace: $default$ + exp = ` +Ran Plan for dir: $.$ workspace: $default$
Show Output $$$diff ` + strings.TrimSpace(c.Output) + ` $$$ +
* :arrow_forward: To **apply** this plan, comment: - * $applycmd$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + applycmd + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $replancmd$ - + $$$shell + replancmd + $$$ No changes. Infrastructure is up-to-date. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$` +* :fast_forward: To **apply** all unapplied plans from this ` + c.VcsRequestType + `, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this ` + c.VcsRequestType + `, comment: + $$$shell + atlantis unlock + $$$ +` } else { - exp = `Ran Plan for dir: $.$ workspace: $default$ + exp = ` +Ran Plan for dir: $.$ workspace: $default$ $$$diff ` + strings.TrimSpace(c.Output) + ` $$$ * :arrow_forward: To **apply** this plan, comment: - * $applycmd$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + applycmd + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $replancmd$ + $$$shell + replancmd + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$` +* :fast_forward: To **apply** all unapplied plans from this ` + c.VcsRequestType + `, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this ` + c.VcsRequestType + `, comment: + $$$shell + atlantis unlock + $$$ +` } case command.Apply: if c.ShouldWrap { - exp = `Ran Apply for dir: $.$ workspace: $default$ + exp = ` +Ran Apply for dir: $.$ workspace: $default$
Show Output @@ -1677,13 +2048,16 @@ $$$diff ` + strings.TrimSpace(c.Output) + ` $$$ -
` + +` } else { - exp = `Ran Apply for dir: $.$ workspace: $default$ + exp = ` +Ran Apply for dir: $.$ workspace: $default$ $$$diff ` + strings.TrimSpace(c.Output) + ` -$$$` +$$$ +` } } @@ -1705,8 +2079,21 @@ func TestRenderProjectResults_MultiProjectApplyWrapped(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } tfOut := strings.Repeat("line\n", 13) - rendered := mr.Render(command.Result{ + res := command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1719,11 +2106,18 @@ func TestRenderProjectResults_MultiProjectApplyWrapped(t *testing.T) { ApplySuccess: tfOut, }, }, - }, command.Apply, "", "log", false, models.Github) - exp := `Ran Apply for 2 projects: + } + cmd := &events.CommentCommand{ + Name: command.Apply, + Verbose: false, + } + rendered := mr.Render(ctx, res, cmd) + exp := ` +Ran Apply for 2 projects: 1. dir: $.$ workspace: $staging$ 1. dir: $.$ workspace: $production$ +--- ### 1. dir: $.$ workspace: $staging$
Show Output @@ -1764,8 +2158,21 @@ func TestRenderProjectResults_MultiProjectPlanWrapped(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } tfOut := strings.Repeat("line\n", 13) + "Plan: 1 to add, 0 to change, 0 to destroy." - rendered := mr.Render(command.Result{ + res := command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1788,11 +2195,18 @@ func TestRenderProjectResults_MultiProjectPlanWrapped(t *testing.T) { }, }, }, - }, command.Plan, "", "log", false, models.Github) - exp := `Ran Plan for 2 projects: + } + cmd := &events.CommentCommand{ + Name: command.Plan, + Verbose: false, + } + rendered := mr.Render(ctx, res, cmd) + exp := ` +Ran Plan for 2 projects: 1. dir: $.$ workspace: $staging$ 1. dir: $.$ workspace: $production$ +--- ### 1. dir: $.$ workspace: $staging$
Show Output @@ -1800,13 +2214,17 @@ func TestRenderProjectResults_MultiProjectPlanWrapped(t *testing.T) { $$$diff ` + tfOut + ` $$$ +
* :arrow_forward: To **apply** this plan, comment: - * $staging-apply-cmd$ -* :put_litter_in_its_place: To **delete** this plan click [here](staging-lock-url) + $$$shell + staging-apply-cmd + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](staging-lock-url) * :repeat: To **plan** this project again, comment: - * $staging-replan-cmd$ -
+ $$$shell + staging-replan-cmd + $$$ Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -1816,13 +2234,17 @@ Plan: 1 to add, 0 to change, 0 to destroy. $$$diff ` + tfOut + ` $$$ + * :arrow_forward: To **apply** this plan, comment: - * $production-apply-cmd$ -* :put_litter_in_its_place: To **delete** this plan click [here](production-lock-url) + $$$shell + production-apply-cmd + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](production-lock-url) * :repeat: To **plan** this project again, comment: - * $production-replan-cmd$ - + $$$shell + production-replan-cmd + $$$ Plan: 1 to add, 0 to change, 0 to destroy. --- @@ -1830,10 +2252,14 @@ Plan: 1 to add, 0 to change, 0 to destroy. 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ ` Equals(t, normalize(exp), normalize(rendered)) } @@ -1842,11 +2268,11 @@ Plan: 1 to add, 0 to change, 0 to destroy. // all the plans as a result. func TestRenderProjectResults_PlansDeleted(t *testing.T) { cases := map[string]struct { - cr command.Result + res command.Result exp string }{ "one failure": { - cr: command.Result{ + res: command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1856,12 +2282,14 @@ func TestRenderProjectResults_PlansDeleted(t *testing.T) { }, PlansDeleted: true, }, - exp: `Ran Plan for dir: $.$ workspace: $staging$ + exp: ` +Ran Plan for dir: $.$ workspace: $staging$ -**Plan Failed**: failure`, +**Plan Failed**: failure +`, }, "two failures": { - cr: command.Result{ + res: command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1876,10 +2304,12 @@ func TestRenderProjectResults_PlansDeleted(t *testing.T) { }, PlansDeleted: true, }, - exp: `Ran Plan for 2 projects: + exp: ` +Ran Plan for 2 projects: 1. dir: $.$ workspace: $staging$ 1. dir: $.$ workspace: $production$ +--- ### 1. dir: $.$ workspace: $staging$ **Plan Failed**: failure @@ -1895,7 +2325,7 @@ func TestRenderProjectResults_PlansDeleted(t *testing.T) { `, }, "one failure, one success": { - cr: command.Result{ + res: command.Result{ ProjectResults: []command.ProjectResult{ { RepoRelDir: ".", @@ -1915,10 +2345,12 @@ func TestRenderProjectResults_PlansDeleted(t *testing.T) { }, PlansDeleted: true, }, - exp: `Ran Plan for 2 projects: + exp: ` +Ran Plan for 2 projects: 1. dir: $.$ workspace: $staging$ 1. dir: $.$ workspace: $production$ +--- ### 1. dir: $.$ workspace: $staging$ **Plan Failed**: failure @@ -1952,7 +2384,24 @@ This plan was not saved because one or more projects failed and automerge requir "atlantis", // executableName false, // hideUnchangedPlanComments ) - rendered := mr.Render(c.cr, command.Plan, "", "log", false, models.Github) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } + cmd := &events.CommentCommand{ + Name: command.Plan, + Verbose: false, + } + rendered := mr.Render(ctx, c.res, cmd) Equals(t, normalize(c.exp), normalize(rendered)) }) } @@ -1972,7 +2421,7 @@ func TestRenderProjectResultsWithRepoLockingDisabled(t *testing.T) { command.Plan, []command.ProjectResult{}, models.Github, - "Ran Plan for 0 projects:\n\n\n", + "Ran Plan for 0 projects:\n\n", }, { "single successful plan", @@ -1990,22 +2439,31 @@ func TestRenderProjectResultsWithRepoLockingDisabled(t *testing.T) { }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ + $$$shell + atlantis apply -d path -w workspace + $$$ * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -2025,24 +2483,32 @@ $$$ }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ + $$$shell + atlantis apply -d path -w workspace + $$$ * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ - + $$$shell + atlantis plan -d path -w workspace + $$$ :twisted_rightwards_arrows: Upstream was modified, a new merge was performed. --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -2062,22 +2528,31 @@ $$$ }, }, models.Github, - `Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Plan for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ + $$$shell + atlantis apply -d path -w workspace + $$$ * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -2091,7 +2566,8 @@ $$$ }, }, models.Github, - `Ran Apply for dir: $path$ workspace: $workspace$ + ` +Ran Apply for dir: $path$ workspace: $workspace$ $$$diff success @@ -2110,7 +2586,8 @@ $$$ }, }, models.Github, - `Ran Apply for project: $projectname$ dir: $path$ workspace: $workspace$ + ` +Ran Apply for project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff success @@ -2144,10 +2621,12 @@ $$$ }, }, models.Github, - `Ran Plan for 2 projects: + ` +Ran Plan for 2 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -2155,9 +2634,13 @@ terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ + $$$shell + atlantis apply -d path -w workspace + $$$ * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. project: $projectname$ dir: $path2$ workspace: $workspace$ @@ -2166,19 +2649,27 @@ terraform-output2 $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path2 -w workspace$ + $$$shell + atlantis apply -d path2 -w workspace + $$$ * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path2 -w workspace$ + $$$shell + atlantis plan -d path2 -w workspace + $$$ --- ### Plan Summary 2 projects, 2 with changes, 0 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -2198,10 +2689,12 @@ $$$ }, }, models.Github, - `Ran Apply for 2 projects: + ` +Ran Apply for 2 projects: 1. project: $projectname$ dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ +--- ### 1. project: $projectname$ dir: $path$ workspace: $workspace$ $$$diff @@ -2231,7 +2724,8 @@ $$$ }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ **Plan Error** $$$ @@ -2250,7 +2744,8 @@ $$$ }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$ **Plan Failed**: failure `, @@ -2282,11 +2777,13 @@ $$$ }, }, models.Github, - `Ran Plan for 3 projects: + ` +Ran Plan for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. project: $projectname$ dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -2294,9 +2791,13 @@ terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ + $$$shell + atlantis apply -d path -w workspace + $$$ * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 2. dir: $path2$ workspace: $workspace$ @@ -2314,10 +2815,14 @@ $$$ 3 projects, 1 with changes, 0 with no changes, 2 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -2341,11 +2846,13 @@ $$$ }, }, models.Github, - `Ran Apply for 3 projects: + ` +Ran Apply for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -2390,11 +2897,13 @@ $$$ }, }, models.Github, - `Ran Apply for 3 projects: + ` +Ran Apply for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. dir: $path2$ workspace: $workspace$ 1. dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -2431,6 +2940,19 @@ $$$ "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } for _, c := range cases { t.Run(c.Description, func(t *testing.T) { res := command.Result{ @@ -2438,11 +2960,17 @@ $$$ } for _, verbose := range []bool{true, false} { t.Run(c.Description, func(t *testing.T) { - s := r.Render(res, c.Command, "", "log", verbose, c.VCSHost) + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected+ + fmt.Sprintf("
Log\n

\n\n```\n%s\n```\n

", log)), normalize(s)) } }) } @@ -2450,7 +2978,145 @@ $$$ } } -const tfOutput = `An execution plan has been generated and is shown below. +func TestRenderProjectResultsWithGitLab(t *testing.T) { + cases := []struct { + Description string + Command command.Name + ProjectResults []command.ProjectResult + VCSHost models.VCSHostType + Expected string + }{ + { + "multiple successful plans", + command.Plan, + []command.ProjectResult{ + { + Workspace: "workspace", + RepoRelDir: "path", + PlanSuccess: &models.PlanSuccess{ + TerraformOutput: "terraform-output", + LockURL: "lock-url", + ApplyCmd: "atlantis apply -d path -w workspace", + RePlanCmd: "atlantis plan -d path -w workspace", + }, + }, + { + Workspace: "workspace", + RepoRelDir: "path2", + ProjectName: "projectname", + PlanSuccess: &models.PlanSuccess{ + TerraformOutput: "terraform-output2", + LockURL: "lock-url2", + ApplyCmd: "atlantis apply -d path2 -w workspace", + RePlanCmd: "atlantis plan -d path2 -w workspace", + }, + }, + }, + models.Gitlab, + ` +Ran Plan for 2 projects: + +1. dir: $path$ workspace: $workspace$ +1. project: $projectname$ dir: $path2$ workspace: $workspace$ +--- + +### 1. dir: $path$ workspace: $workspace$ +$$$diff +terraform-output +$$$ + +* :arrow_forward: To **apply** this plan, comment: + $$$shell + atlantis apply -d path -w workspace + $$$ +* :repeat: To **plan** this project again, comment: + $$$shell + atlantis plan -d path -w workspace + $$$ + +--- +### 2. project: $projectname$ dir: $path2$ workspace: $workspace$ +$$$diff +terraform-output2 +$$$ + +* :arrow_forward: To **apply** this plan, comment: + $$$shell + atlantis apply -d path2 -w workspace + $$$ +* :repeat: To **plan** this project again, comment: + $$$shell + atlantis plan -d path2 -w workspace + $$$ + +--- +### Plan Summary + +2 projects, 2 with changes, 0 with no changes, 0 failed + +* :fast_forward: To **apply** all unapplied plans from this Merge Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Merge Request, comment: + $$$shell + atlantis unlock + $$$ +`, + }, + } + + r := events.NewMarkdownRenderer( + false, // gitlabSupportsCommonMark + false, // disableApplyAll + false, // disableApply + false, // disableMarkdownFolding + true, // disableRepoLocking + false, // enableDiffMarkdownFormat + "", // MarkdownTemplateOverridesDir + "atlantis", // executableName + false, // hideUnchangedPlanComments + ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + for _, c := range cases { + t.Run(c.Description, func(t *testing.T) { + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: c.VCSHost, + }, + }, + }, + } + res := command.Result{ + ProjectResults: c.ProjectResults, + } + for _, verbose := range []bool{true, false} { + t.Run(c.Description, func(t *testing.T) { + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) + if !verbose { + Equals(t, normalize(c.Expected), normalize(s)) + } else { + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected)+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log), normalize(s)) + } + }) + } + }) + } +} + +const tfOutput = ` +An execution plan has been generated and is shown below. Resource actions are indicated with the following symbols: ~ update in-place -/+ destroy and then create replacement @@ -2657,7 +3323,8 @@ var cases = []struct { }, }, models.Github, - `Ran Plan for dir: $path$ workspace: $workspace$ + ` +Ran Plan for dir: $path$ workspace: $workspace$
Show Output @@ -2845,11 +3512,13 @@ Terraform will perform the following actions: Plan: 1 to add, 2 to change, 1 to destroy. $$$ +
-* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ - + $$$shell + atlantis plan -d path -w workspace + $$$ Plan: 1 to add, 2 to change, 1 to destroy. `, }, @@ -2867,19 +3536,38 @@ func TestRenderProjectResultsWithEnableDiffMarkdownFormat(t *testing.T) { "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) for _, c := range cases { t.Run(c.Description, func(t *testing.T) { + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: models.Github, + }, + }, + }, + } res := command.Result{ ProjectResults: c.ProjectResults, } for _, verbose := range []bool{true, false} { t.Run(c.Description, func(t *testing.T) { - s := r.Render(res, c.Command, "", "log", verbose, c.VCSHost) + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected)+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log), normalize(s)) } }) } @@ -2903,17 +3591,34 @@ func BenchmarkRenderProjectResultsWithEnableDiffMarkdownFormat(b *testing.B) { "atlantis", // executableName false, // hideUnchangedPlanComments ) + logger := logging.NewNoopLogger(b).WithHistory() + logText := "log" + logger.Info(logText) for _, c := range cases { b.Run(c.Description, func(b *testing.B) { + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: c.VCSHost, + }, + }, + }, + } res := command.Result{ ProjectResults: c.ProjectResults, } for _, verbose := range []bool{true, false} { b.Run(fmt.Sprintf("verbose %t", verbose), func(b *testing.B) { + cmd := &events.CommentCommand{ + Name: c.Command, + Verbose: verbose, + } b.ReportAllocs() for i := 0; i < b.N; i++ { - render = r.Render(res, c.Command, "", "log", verbose, c.VCSHost) + render = r.Render(ctx, res, cmd) } Render = render }) @@ -2970,11 +3675,13 @@ func TestRenderProjectResultsHideUnchangedPlans(t *testing.T) { }, }, models.Github, - `Ran Plan for 3 projects: + ` +Ran Plan for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ 1. project: $projectname2$ dir: $path3$ workspace: $workspace$ +--- ### 1. dir: $path$ workspace: $workspace$ $$$diff @@ -2982,10 +3689,14 @@ terraform-output $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url) + $$$shell + atlantis apply -d path -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path -w workspace$ + $$$shell + atlantis plan -d path -w workspace + $$$ --- ### 3. project: $projectname2$ dir: $path3$ workspace: $workspace$ @@ -2994,20 +3705,28 @@ terraform-output3 $$$ * :arrow_forward: To **apply** this plan, comment: - * $atlantis apply -d path3 -w workspace$ -* :put_litter_in_its_place: To **delete** this plan click [here](lock-url3) + $$$shell + atlantis apply -d path3 -w workspace + $$$ +* :put_litter_in_its_place: To **delete** this plan and lock, click [here](lock-url3) * :repeat: To **plan** this project again, comment: - * $atlantis plan -d path3 -w workspace$ + $$$shell + atlantis plan -d path3 -w workspace + $$$ --- ### Plan Summary 3 projects, 2 with changes, 1 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, { @@ -3049,37 +3768,64 @@ $$$ }, }, models.Github, - `Ran Plan for 3 projects: + ` +Ran Plan for 3 projects: 1. dir: $path$ workspace: $workspace$ 1. project: $projectname$ dir: $path2$ workspace: $workspace$ 1. project: $projectname2$ dir: $path3$ workspace: $workspace$ +--- ### Plan Summary 3 projects, 0 with changes, 3 with no changes, 0 failed -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * $atlantis apply$ -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * $atlantis unlock$ +* :fast_forward: To **apply** all unapplied plans from this Pull Request, comment: + $$$shell + atlantis apply + $$$ +* :put_litter_in_its_place: To **delete** all plans and locks from this Pull Request, comment: + $$$shell + atlantis unlock + $$$ `, }, } r := events.NewMarkdownRenderer(false, false, false, false, false, false, "", "atlantis", true) + logger := logging.NewNoopLogger(t).WithHistory() + logText := "log" + logger.Info(logText) + for _, c := range cases { t.Run(c.Description, func(t *testing.T) { + ctx := &command.Context{ + Log: logger, + Pull: models.PullRequest{ + BaseRepo: models.Repo{ + VCSHost: models.VCSHost{ + Type: c.VCSHost, + }, + }, + }, + } res := command.Result{ ProjectResults: c.ProjectResults, } for _, verbose := range []bool{true, false} { t.Run(c.Description, func(t *testing.T) { - s := r.Render(res, c.Command, c.SubCommand, "log", verbose, c.VCSHost) + cmd := &events.CommentCommand{ + Name: c.Command, + SubName: c.SubCommand, + Verbose: verbose, + } + s := r.Render(ctx, res, cmd) if !verbose { Equals(t, normalize(c.Expected), normalize(s)) } else { - Equals(t, normalize(c.Expected+"\n
Log\n

\n\n```\nlog```\n

"), normalize(s)) + log := fmt.Sprintf("[INFO] %s", logText) + Equals(t, normalize(c.Expected)+ + fmt.Sprintf("\n
Log\n

\n\n```\n%s\n```\n

", log), normalize(s)) } }) } diff --git a/server/events/mock_workingdir_test.go b/server/events/mock_workingdir_test.go index 30b344ea3a..d298b2cee7 100644 --- a/server/events/mock_workingdir_test.go +++ b/server/events/mock_workingdir_test.go @@ -4,10 +4,12 @@ package events import ( - pegomock "github.com/petergtz/pegomock/v4" - models "github.com/runatlantis/atlantis/server/events/models" "reflect" "time" + + pegomock "github.com/petergtz/pegomock/v4" + models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" ) type MockWorkingDir struct { @@ -25,11 +27,11 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{headRepo, p, workspace} + params := []pegomock.Param{logger, headRepo, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 bool @@ -48,11 +50,11 @@ func (mock *MockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, wo return ret0, ret1, ret2 } -func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error { +func (mock *MockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p} + params := []pegomock.Param{logger, r, p} result := pegomock.GetGenericMockFrom(mock).Invoke("Delete", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -63,11 +65,11 @@ func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error { return ret0 } -func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error { +func (mock *MockWorkingDir) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p, workspace} + params := []pegomock.Param{logger, r, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteForWorkspace", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -78,11 +80,11 @@ func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullReque return ret0 } -func (mock *MockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error { +func (mock *MockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p, workspace, path, projectName} + params := []pegomock.Param{logger, r, p, workspace, path, projectName} result := pegomock.GetGenericMockFrom(mock).Invoke("DeletePlan", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -93,11 +95,11 @@ func (mock *MockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, work return ret0 } -func (mock *MockWorkingDir) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) ([]string, error) { +func (mock *MockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p, workspace} + params := []pegomock.Param{logger, r, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("GetGitUntrackedFiles", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 []string var ret1 error @@ -150,11 +152,11 @@ func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, w return ret0, ret1 } -func (mock *MockWorkingDir) HasDiverged(cloneDir string) bool { +func (mock *MockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir string) bool { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{cloneDir} + params := []pegomock.Param{logger, cloneDir} result := pegomock.GetGenericMockFrom(mock).Invoke("HasDiverged", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) var ret0 bool if len(result) != 0 { @@ -210,8 +212,8 @@ type VerifierMockWorkingDir struct { timeout time.Duration } -func (verifier *VerifierMockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification { - params := []pegomock.Param{headRepo, p, workspace} +func (verifier *VerifierMockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification { + params := []pegomock.Param{logger, headRepo, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout) return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -221,32 +223,36 @@ type MockWorkingDir_Clone_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - headRepo, p, workspace := c.GetAllCapturedArguments() - return headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, headRepo, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return } -func (verifier *VerifierMockWorkingDir) Delete(r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification { - params := []pegomock.Param{r, p} +func (verifier *VerifierMockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification { + params := []pegomock.Param{logger, r, p} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Delete", params, verifier.timeout) return &MockWorkingDir_Delete_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -256,28 +262,32 @@ type MockWorkingDir_Delete_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - r, p := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1] +func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) { + logger, r, p := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1] } -func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) } } return } -func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification { - params := []pegomock.Param{r, p, workspace} +func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification { + params := []pegomock.Param{logger, r, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteForWorkspace", params, verifier.timeout) return &MockWorkingDir_DeleteForWorkspace_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -287,32 +297,36 @@ type MockWorkingDir_DeleteForWorkspace_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - r, p, workspace := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, r, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return } -func (verifier *VerifierMockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) *MockWorkingDir_DeletePlan_OngoingVerification { - params := []pegomock.Param{r, p, workspace, path, projectName} +func (verifier *VerifierMockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) *MockWorkingDir_DeletePlan_OngoingVerification { + params := []pegomock.Param{logger, r, p, workspace, path, projectName} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeletePlan", params, verifier.timeout) return &MockWorkingDir_DeletePlan_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -322,25 +336,25 @@ type MockWorkingDir_DeletePlan_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string, string, string) { - r, p, workspace, path, projectName := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1], path[len(path)-1], projectName[len(projectName)-1] +func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string, string, string) { + logger, r, p, workspace, path, projectName := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1], path[len(path)-1], projectName[len(projectName)-1] } -func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string, _param3 []string, _param4 []string) { +func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string, _param4 []string, _param5 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) } _param3 = make([]string, len(c.methodInvocations)) for u, param := range params[3] { @@ -350,12 +364,16 @@ func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments( for u, param := range params[4] { _param4[u] = param.(string) } + _param5 = make([]string, len(c.methodInvocations)) + for u, param := range params[5] { + _param5[u] = param.(string) + } } return } -func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification { - params := []pegomock.Param{r, p, workspace} +func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification { + params := []pegomock.Param{logger, r, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetGitUntrackedFiles", params, verifier.timeout) return &MockWorkingDir_GetGitUntrackedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -365,25 +383,29 @@ type MockWorkingDir_GetGitUntrackedFiles_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - r, p, workspace := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, r, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return @@ -455,8 +477,8 @@ func (c *MockWorkingDir_GetWorkingDir_OngoingVerification) GetAllCapturedArgumen return } -func (verifier *VerifierMockWorkingDir) HasDiverged(cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification { - params := []pegomock.Param{cloneDir} +func (verifier *VerifierMockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification { + params := []pegomock.Param{logger, cloneDir} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HasDiverged", params, verifier.timeout) return &MockWorkingDir_HasDiverged_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -466,17 +488,21 @@ type MockWorkingDir_HasDiverged_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() string { - cloneDir := c.GetAllCapturedArguments() - return cloneDir[len(cloneDir)-1] +func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string) { + logger, cloneDir := c.GetAllCapturedArguments() + return logger[len(logger)-1], cloneDir[len(cloneDir)-1] } -func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { +func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(string) + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(string) } } return diff --git a/server/events/mocks/mock_delete_lock_command.go b/server/events/mocks/mock_delete_lock_command.go index ce1afd3b72..a8511f28c8 100644 --- a/server/events/mocks/mock_delete_lock_command.go +++ b/server/events/mocks/mock_delete_lock_command.go @@ -6,6 +6,7 @@ package mocks import ( pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -25,11 +26,11 @@ func NewMockDeleteLockCommand(options ...pegomock.Option) *MockDeleteLockCommand func (mock *MockDeleteLockCommand) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockDeleteLockCommand) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, error) { +func (mock *MockDeleteLockCommand) DeleteLock(logger logging.SimpleLogging, id string) (*models.ProjectLock, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockDeleteLockCommand().") } - params := []pegomock.Param{id} + params := []pegomock.Param{logger, id} result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteLock", params, []reflect.Type{reflect.TypeOf((**models.ProjectLock)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 *models.ProjectLock var ret1 error @@ -44,11 +45,11 @@ func (mock *MockDeleteLockCommand) DeleteLock(id string) (*models.ProjectLock, e return ret0, ret1 } -func (mock *MockDeleteLockCommand) DeleteLocksByPull(repoFullName string, pullNum int) (int, error) { +func (mock *MockDeleteLockCommand) DeleteLocksByPull(logger logging.SimpleLogging, repoFullName string, pullNum int) (int, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockDeleteLockCommand().") } - params := []pegomock.Param{repoFullName, pullNum} + params := []pegomock.Param{logger, repoFullName, pullNum} result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteLocksByPull", params, []reflect.Type{reflect.TypeOf((*int)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 int var ret1 error @@ -100,8 +101,8 @@ type VerifierMockDeleteLockCommand struct { timeout time.Duration } -func (verifier *VerifierMockDeleteLockCommand) DeleteLock(id string) *MockDeleteLockCommand_DeleteLock_OngoingVerification { - params := []pegomock.Param{id} +func (verifier *VerifierMockDeleteLockCommand) DeleteLock(logger logging.SimpleLogging, id string) *MockDeleteLockCommand_DeleteLock_OngoingVerification { + params := []pegomock.Param{logger, id} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteLock", params, verifier.timeout) return &MockDeleteLockCommand_DeleteLock_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -111,24 +112,28 @@ type MockDeleteLockCommand_DeleteLock_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetCapturedArguments() string { - id := c.GetAllCapturedArguments() - return id[len(id)-1] +func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string) { + logger, id := c.GetAllCapturedArguments() + return logger[len(logger)-1], id[len(id)-1] } -func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { +func (c *MockDeleteLockCommand_DeleteLock_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(string) + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(string) } } return } -func (verifier *VerifierMockDeleteLockCommand) DeleteLocksByPull(repoFullName string, pullNum int) *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification { - params := []pegomock.Param{repoFullName, pullNum} +func (verifier *VerifierMockDeleteLockCommand) DeleteLocksByPull(logger logging.SimpleLogging, repoFullName string, pullNum int) *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification { + params := []pegomock.Param{logger, repoFullName, pullNum} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteLocksByPull", params, verifier.timeout) return &MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -138,21 +143,25 @@ type MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetCapturedArguments() (string, int) { - repoFullName, pullNum := c.GetAllCapturedArguments() - return repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1] +func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string, int) { + logger, repoFullName, pullNum := c.GetAllCapturedArguments() + return logger[len(logger)-1], repoFullName[len(repoFullName)-1], pullNum[len(pullNum)-1] } -func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetAllCapturedArguments() (_param0 []string, _param1 []int) { +func (c *MockDeleteLockCommand_DeleteLocksByPull_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string, _param2 []int) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(string) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]int, len(c.methodInvocations)) + _param1 = make([]string, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(int) + _param1[u] = param.(string) + } + _param2 = make([]int, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(int) } } return diff --git a/server/events/mocks/mock_event_parsing.go b/server/events/mocks/mock_event_parsing.go index 22690d2e3e..505fadb5fa 100644 --- a/server/events/mocks/mock_event_parsing.go +++ b/server/events/mocks/mock_event_parsing.go @@ -4,10 +4,12 @@ package mocks import ( + gitea "code.gitea.io/sdk/gitea" github "github.com/google/go-github/v59/github" azuredevops "github.com/mcdafydd/go-azuredevops/azuredevops" pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + gitea0 "github.com/runatlantis/atlantis/server/events/vcs/gitea" logging "github.com/runatlantis/atlantis/server/logging" go_gitlab "github.com/xanzy/go-gitlab" "reflect" @@ -291,6 +293,95 @@ func (mock *MockEventParsing) ParseBitbucketServerPullEvent(body []byte) (models return ret0, ret1, ret2, ret3, ret4 } +func (mock *MockEventParsing) ParseGiteaIssueCommentEvent(event gitea0.GiteaIssueCommentPayload) (models.Repo, models.User, int, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockEventParsing().") + } + params := []pegomock.Param{event} + result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGiteaIssueCommentEvent", params, []reflect.Type{reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*int)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 models.Repo + var ret1 models.User + var ret2 int + var ret3 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(models.Repo) + } + if result[1] != nil { + ret1 = result[1].(models.User) + } + if result[2] != nil { + ret2 = result[2].(int) + } + if result[3] != nil { + ret3 = result[3].(error) + } + } + return ret0, ret1, ret2, ret3 +} + +func (mock *MockEventParsing) ParseGiteaPull(pull *gitea.PullRequest) (models.PullRequest, models.Repo, models.Repo, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockEventParsing().") + } + params := []pegomock.Param{pull} + result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGiteaPull", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 models.PullRequest + var ret1 models.Repo + var ret2 models.Repo + var ret3 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(models.PullRequest) + } + if result[1] != nil { + ret1 = result[1].(models.Repo) + } + if result[2] != nil { + ret2 = result[2].(models.Repo) + } + if result[3] != nil { + ret3 = result[3].(error) + } + } + return ret0, ret1, ret2, ret3 +} + +func (mock *MockEventParsing) ParseGiteaPullRequestEvent(event gitea.PullRequest) (models.PullRequest, models.PullRequestEventType, models.Repo, models.Repo, models.User, error) { + if mock == nil { + panic("mock must not be nil. Use myMock := NewMockEventParsing().") + } + params := []pegomock.Param{event} + result := pegomock.GetGenericMockFrom(mock).Invoke("ParseGiteaPullRequestEvent", params, []reflect.Type{reflect.TypeOf((*models.PullRequest)(nil)).Elem(), reflect.TypeOf((*models.PullRequestEventType)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.Repo)(nil)).Elem(), reflect.TypeOf((*models.User)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) + var ret0 models.PullRequest + var ret1 models.PullRequestEventType + var ret2 models.Repo + var ret3 models.Repo + var ret4 models.User + var ret5 error + if len(result) != 0 { + if result[0] != nil { + ret0 = result[0].(models.PullRequest) + } + if result[1] != nil { + ret1 = result[1].(models.PullRequestEventType) + } + if result[2] != nil { + ret2 = result[2].(models.Repo) + } + if result[3] != nil { + ret3 = result[3].(models.Repo) + } + if result[4] != nil { + ret4 = result[4].(models.User) + } + if result[5] != nil { + ret5 = result[5].(error) + } + } + return ret0, ret1, ret2, ret3, ret4, ret5 +} + func (mock *MockEventParsing) ParseGithubIssueCommentEvent(logger logging.SimpleLogging, comment *github.IssueCommentEvent) (models.Repo, models.User, int, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockEventParsing().") @@ -818,6 +909,87 @@ func (c *MockEventParsing_ParseBitbucketServerPullEvent_OngoingVerification) Get return } +func (verifier *VerifierMockEventParsing) ParseGiteaIssueCommentEvent(event gitea0.GiteaIssueCommentPayload) *MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification { + params := []pegomock.Param{event} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGiteaIssueCommentEvent", params, verifier.timeout) + return &MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification struct { + mock *MockEventParsing + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification) GetCapturedArguments() gitea0.GiteaIssueCommentPayload { + event := c.GetAllCapturedArguments() + return event[len(event)-1] +} + +func (c *MockEventParsing_ParseGiteaIssueCommentEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []gitea0.GiteaIssueCommentPayload) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]gitea0.GiteaIssueCommentPayload, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(gitea0.GiteaIssueCommentPayload) + } + } + return +} + +func (verifier *VerifierMockEventParsing) ParseGiteaPull(pull *gitea.PullRequest) *MockEventParsing_ParseGiteaPull_OngoingVerification { + params := []pegomock.Param{pull} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGiteaPull", params, verifier.timeout) + return &MockEventParsing_ParseGiteaPull_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockEventParsing_ParseGiteaPull_OngoingVerification struct { + mock *MockEventParsing + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockEventParsing_ParseGiteaPull_OngoingVerification) GetCapturedArguments() *gitea.PullRequest { + pull := c.GetAllCapturedArguments() + return pull[len(pull)-1] +} + +func (c *MockEventParsing_ParseGiteaPull_OngoingVerification) GetAllCapturedArguments() (_param0 []*gitea.PullRequest) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]*gitea.PullRequest, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(*gitea.PullRequest) + } + } + return +} + +func (verifier *VerifierMockEventParsing) ParseGiteaPullRequestEvent(event gitea.PullRequest) *MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification { + params := []pegomock.Param{event} + methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGiteaPullRequestEvent", params, verifier.timeout) + return &MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} +} + +type MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification struct { + mock *MockEventParsing + methodInvocations []pegomock.MethodInvocation +} + +func (c *MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification) GetCapturedArguments() gitea.PullRequest { + event := c.GetAllCapturedArguments() + return event[len(event)-1] +} + +func (c *MockEventParsing_ParseGiteaPullRequestEvent_OngoingVerification) GetAllCapturedArguments() (_param0 []gitea.PullRequest) { + params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) + if len(params) > 0 { + _param0 = make([]gitea.PullRequest, len(c.methodInvocations)) + for u, param := range params[0] { + _param0[u] = param.(gitea.PullRequest) + } + } + return +} + func (verifier *VerifierMockEventParsing) ParseGithubIssueCommentEvent(logger logging.SimpleLogging, comment *github.IssueCommentEvent) *MockEventParsing_ParseGithubIssueCommentEvent_OngoingVerification { params := []pegomock.Param{logger, comment} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "ParseGithubIssueCommentEvent", params, verifier.timeout) diff --git a/server/events/mocks/mock_working_dir.go b/server/events/mocks/mock_working_dir.go index 55ecc1ca4c..9c162fc4a2 100644 --- a/server/events/mocks/mock_working_dir.go +++ b/server/events/mocks/mock_working_dir.go @@ -6,6 +6,7 @@ package mocks import ( pegomock "github.com/petergtz/pegomock/v4" models "github.com/runatlantis/atlantis/server/events/models" + logging "github.com/runatlantis/atlantis/server/logging" "reflect" "time" ) @@ -25,11 +26,11 @@ func NewMockWorkingDir(options ...pegomock.Option) *MockWorkingDir { func (mock *MockWorkingDir) SetFailHandler(fh pegomock.FailHandler) { mock.fail = fh } func (mock *MockWorkingDir) FailHandler() pegomock.FailHandler { return mock.fail } -func (mock *MockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { +func (mock *MockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{headRepo, p, workspace} + params := []pegomock.Param{logger, headRepo, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("Clone", params, []reflect.Type{reflect.TypeOf((*string)(nil)).Elem(), reflect.TypeOf((*bool)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 string var ret1 bool @@ -48,11 +49,11 @@ func (mock *MockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, wo return ret0, ret1, ret2 } -func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error { +func (mock *MockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p} + params := []pegomock.Param{logger, r, p} result := pegomock.GetGenericMockFrom(mock).Invoke("Delete", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -63,11 +64,11 @@ func (mock *MockWorkingDir) Delete(r models.Repo, p models.PullRequest) error { return ret0 } -func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error { +func (mock *MockWorkingDir) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p, workspace} + params := []pegomock.Param{logger, r, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("DeleteForWorkspace", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -78,11 +79,11 @@ func (mock *MockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullReque return ret0 } -func (mock *MockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error { +func (mock *MockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p, workspace, path, projectName} + params := []pegomock.Param{logger, r, p, workspace, path, projectName} result := pegomock.GetGenericMockFrom(mock).Invoke("DeletePlan", params, []reflect.Type{reflect.TypeOf((*error)(nil)).Elem()}) var ret0 error if len(result) != 0 { @@ -93,11 +94,11 @@ func (mock *MockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, work return ret0 } -func (mock *MockWorkingDir) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) ([]string, error) { +func (mock *MockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{r, p, workspace} + params := []pegomock.Param{logger, r, p, workspace} result := pegomock.GetGenericMockFrom(mock).Invoke("GetGitUntrackedFiles", params, []reflect.Type{reflect.TypeOf((*[]string)(nil)).Elem(), reflect.TypeOf((*error)(nil)).Elem()}) var ret0 []string var ret1 error @@ -150,11 +151,11 @@ func (mock *MockWorkingDir) GetWorkingDir(r models.Repo, p models.PullRequest, w return ret0, ret1 } -func (mock *MockWorkingDir) HasDiverged(cloneDir string) bool { +func (mock *MockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir string) bool { if mock == nil { panic("mock must not be nil. Use myMock := NewMockWorkingDir().") } - params := []pegomock.Param{cloneDir} + params := []pegomock.Param{logger, cloneDir} result := pegomock.GetGenericMockFrom(mock).Invoke("HasDiverged", params, []reflect.Type{reflect.TypeOf((*bool)(nil)).Elem()}) var ret0 bool if len(result) != 0 { @@ -210,8 +211,8 @@ type VerifierMockWorkingDir struct { timeout time.Duration } -func (verifier *VerifierMockWorkingDir) Clone(headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification { - params := []pegomock.Param{headRepo, p, workspace} +func (verifier *VerifierMockWorkingDir) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_Clone_OngoingVerification { + params := []pegomock.Param{logger, headRepo, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Clone", params, verifier.timeout) return &MockWorkingDir_Clone_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -221,32 +222,36 @@ type MockWorkingDir_Clone_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - headRepo, p, workspace := c.GetAllCapturedArguments() - return headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_Clone_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, headRepo, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], headRepo[len(headRepo)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockWorkingDir_Clone_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return } -func (verifier *VerifierMockWorkingDir) Delete(r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification { - params := []pegomock.Param{r, p} +func (verifier *VerifierMockWorkingDir) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) *MockWorkingDir_Delete_OngoingVerification { + params := []pegomock.Param{logger, r, p} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "Delete", params, verifier.timeout) return &MockWorkingDir_Delete_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -256,28 +261,32 @@ type MockWorkingDir_Delete_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest) { - r, p := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1] +func (c *MockWorkingDir_Delete_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest) { + logger, r, p := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1] } -func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest) { +func (c *MockWorkingDir_Delete_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) + } + _param2 = make([]models.PullRequest, len(c.methodInvocations)) + for u, param := range params[2] { + _param2[u] = param.(models.PullRequest) } } return } -func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification { - params := []pegomock.Param{r, p, workspace} +func (verifier *VerifierMockWorkingDir) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_DeleteForWorkspace_OngoingVerification { + params := []pegomock.Param{logger, r, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeleteForWorkspace", params, verifier.timeout) return &MockWorkingDir_DeleteForWorkspace_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -287,32 +296,36 @@ type MockWorkingDir_DeleteForWorkspace_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - r, p, workspace := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, r, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockWorkingDir_DeleteForWorkspace_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return } -func (verifier *VerifierMockWorkingDir) DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) *MockWorkingDir_DeletePlan_OngoingVerification { - params := []pegomock.Param{r, p, workspace, path, projectName} +func (verifier *VerifierMockWorkingDir) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) *MockWorkingDir_DeletePlan_OngoingVerification { + params := []pegomock.Param{logger, r, p, workspace, path, projectName} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "DeletePlan", params, verifier.timeout) return &MockWorkingDir_DeletePlan_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -322,25 +335,25 @@ type MockWorkingDir_DeletePlan_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string, string, string) { - r, p, workspace, path, projectName := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1], path[len(path)-1], projectName[len(projectName)-1] +func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string, string, string) { + logger, r, p, workspace, path, projectName := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1], path[len(path)-1], projectName[len(projectName)-1] } -func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string, _param3 []string, _param4 []string) { +func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string, _param4 []string, _param5 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) } _param3 = make([]string, len(c.methodInvocations)) for u, param := range params[3] { @@ -350,12 +363,16 @@ func (c *MockWorkingDir_DeletePlan_OngoingVerification) GetAllCapturedArguments( for u, param := range params[4] { _param4[u] = param.(string) } + _param5 = make([]string, len(c.methodInvocations)) + for u, param := range params[5] { + _param5[u] = param.(string) + } } return } -func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification { - params := []pegomock.Param{r, p, workspace} +func (verifier *VerifierMockWorkingDir) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification { + params := []pegomock.Param{logger, r, p, workspace} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "GetGitUntrackedFiles", params, verifier.timeout) return &MockWorkingDir_GetGitUntrackedFiles_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -365,25 +382,29 @@ type MockWorkingDir_GetGitUntrackedFiles_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetCapturedArguments() (models.Repo, models.PullRequest, string) { - r, p, workspace := c.GetAllCapturedArguments() - return r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] +func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, models.Repo, models.PullRequest, string) { + logger, r, p, workspace := c.GetAllCapturedArguments() + return logger[len(logger)-1], r[len(r)-1], p[len(p)-1], workspace[len(workspace)-1] } -func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []models.Repo, _param1 []models.PullRequest, _param2 []string) { +func (c *MockWorkingDir_GetGitUntrackedFiles_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []models.Repo, _param2 []models.PullRequest, _param3 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]models.Repo, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(models.Repo) + _param0[u] = param.(logging.SimpleLogging) } - _param1 = make([]models.PullRequest, len(c.methodInvocations)) + _param1 = make([]models.Repo, len(c.methodInvocations)) for u, param := range params[1] { - _param1[u] = param.(models.PullRequest) + _param1[u] = param.(models.Repo) } - _param2 = make([]string, len(c.methodInvocations)) + _param2 = make([]models.PullRequest, len(c.methodInvocations)) for u, param := range params[2] { - _param2[u] = param.(string) + _param2[u] = param.(models.PullRequest) + } + _param3 = make([]string, len(c.methodInvocations)) + for u, param := range params[3] { + _param3[u] = param.(string) } } return @@ -455,8 +476,8 @@ func (c *MockWorkingDir_GetWorkingDir_OngoingVerification) GetAllCapturedArgumen return } -func (verifier *VerifierMockWorkingDir) HasDiverged(cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification { - params := []pegomock.Param{cloneDir} +func (verifier *VerifierMockWorkingDir) HasDiverged(logger logging.SimpleLogging, cloneDir string) *MockWorkingDir_HasDiverged_OngoingVerification { + params := []pegomock.Param{logger, cloneDir} methodInvocations := pegomock.GetGenericMockFrom(verifier.mock).Verify(verifier.inOrderContext, verifier.invocationCountMatcher, "HasDiverged", params, verifier.timeout) return &MockWorkingDir_HasDiverged_OngoingVerification{mock: verifier.mock, methodInvocations: methodInvocations} } @@ -466,17 +487,21 @@ type MockWorkingDir_HasDiverged_OngoingVerification struct { methodInvocations []pegomock.MethodInvocation } -func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() string { - cloneDir := c.GetAllCapturedArguments() - return cloneDir[len(cloneDir)-1] +func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetCapturedArguments() (logging.SimpleLogging, string) { + logger, cloneDir := c.GetAllCapturedArguments() + return logger[len(logger)-1], cloneDir[len(cloneDir)-1] } -func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []string) { +func (c *MockWorkingDir_HasDiverged_OngoingVerification) GetAllCapturedArguments() (_param0 []logging.SimpleLogging, _param1 []string) { params := pegomock.GetGenericMockFrom(c.mock).GetInvocationParams(c.methodInvocations) if len(params) > 0 { - _param0 = make([]string, len(c.methodInvocations)) + _param0 = make([]logging.SimpleLogging, len(c.methodInvocations)) for u, param := range params[0] { - _param0[u] = param.(string) + _param0[u] = param.(logging.SimpleLogging) + } + _param1 = make([]string, len(c.methodInvocations)) + for u, param := range params[1] { + _param1[u] = param.(string) } } return diff --git a/server/events/models/models.go b/server/events/models/models.go index b98d93e554..a23410a69b 100644 --- a/server/events/models/models.go +++ b/server/events/models/models.go @@ -304,6 +304,7 @@ const ( BitbucketCloud BitbucketServer AzureDevops + Gitea ) func (h VCSHostType) String() string { @@ -318,6 +319,8 @@ func (h VCSHostType) String() string { return "BitbucketServer" case AzureDevops: return "AzureDevops" + case Gitea: + return "Gitea" } return "" } @@ -334,6 +337,8 @@ func NewVCSHostType(t string) (VCSHostType, error) { return BitbucketServer, nil case "AzureDevops": return AzureDevops, nil + case "Gitea": + return Gitea, nil } return -1, fmt.Errorf("%q is not a valid type", t) @@ -648,6 +653,8 @@ type WorkflowHookCommandContext struct { // Workspace is the Terraform workspace this project is in. It will always // be set. Workspace string + // API is true if plan/apply by API endpoints + API bool } // PlanSuccessStats holds stats for a plan. diff --git a/server/events/modules_test.go b/server/events/modules_test.go index 3f7770c033..e5cc9798c6 100644 --- a/server/events/modules_test.go +++ b/server/events/modules_test.go @@ -8,6 +8,7 @@ import ( "testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) //go:embed testdata/fs @@ -20,9 +21,9 @@ func Test_findModuleDependants(t *testing.T) { autoplanModuleDependants string } a, err := fs.Sub(repos, "testdata/fs/repoA") - assert.NoError(t, err) + require.NoError(t, err) b, err := fs.Sub(repos, "testdata/fs/repoB") - assert.NoError(t, err) + require.NoError(t, err) tests := []struct { name string diff --git a/server/events/pending_plan_finder.go b/server/events/pending_plan_finder.go index 72a4f2742f..9a26866b1a 100644 --- a/server/events/pending_plan_finder.go +++ b/server/events/pending_plan_finder.go @@ -8,6 +8,7 @@ import ( "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/core/runtime" + "github.com/runatlantis/atlantis/server/utils" ) //go:generate pegomock generate --package mocks -o mocks/mock_pending_plan_finder.go PendingPlanFinder @@ -92,7 +93,7 @@ func (p *DefaultPendingPlanFinder) DeletePlans(pullDir string) error { return err } for _, path := range absPaths { - if err := os.Remove(path); err != nil { + if err := utils.RemoveIgnoreNonExistent(path); err != nil { return errors.Wrapf(err, "delete plan at %s", path) } } diff --git a/server/events/plan_command_runner.go b/server/events/plan_command_runner.go index b8657f5a85..85c9f21ec1 100644 --- a/server/events/plan_command_runner.go +++ b/server/events/plan_command_runner.go @@ -76,6 +76,7 @@ type PlanCommandRunner struct { // a plan. DiscardApprovalOnPlan bool pullReqStatusFetcher vcs.PullReqStatusFetcher + SilencePRComments []string } func (p *PlanCommandRunner) runAutoplan(ctx *command.Context) { @@ -118,6 +119,12 @@ func (p *PlanCommandRunner) runAutoplan(ctx *command.Context) { ctx.Log.Warn("unable to update plan commit status: %s", err) } + if baseRepo.VCSHost.Type == models.Gitlab { + if err := p.commitStatusUpdater.UpdateCombinedCount(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull, models.PendingCommitStatus, command.Apply, 0, len(projectCmds)); err != nil { + ctx.Log.Warn("unable to update apply commit status: %s", err) + } + } + // discard previous plans that might not be relevant anymore ctx.Log.Debug("deleting previous plans and locks") p.deletePlans(ctx) diff --git a/server/events/post_workflow_hooks_command_runner.go b/server/events/post_workflow_hooks_command_runner.go index 5e36794572..f5fe0c5245 100644 --- a/server/events/post_workflow_hooks_command_runner.go +++ b/server/events/post_workflow_hooks_command_runner.go @@ -37,18 +37,10 @@ type DefaultPostWorkflowHooksCommandRunner struct { } // RunPostHooks runs post_workflow_hooks after a plan/apply has completed -func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks( - ctx *command.Context, cmd *CommentCommand, -) error { - pull := ctx.Pull - baseRepo := pull.BaseRepo - headRepo := ctx.HeadRepo - user := ctx.User - log := ctx.Log - +func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks(ctx *command.Context, cmd *CommentCommand) error { postWorkflowHooks := make([]*valid.WorkflowHook, 0) for _, repo := range w.GlobalCfg.Repos { - if repo.IDMatches(baseRepo.ID()) && repo.BranchMatches(pull.BaseBranch) && len(repo.PostWorkflowHooks) > 0 { + if repo.IDMatches(ctx.Pull.BaseRepo.ID()) && repo.BranchMatches(ctx.Pull.BaseBranch) && len(repo.PostWorkflowHooks) > 0 { postWorkflowHooks = append(postWorkflowHooks, repo.PostWorkflowHooks...) } } @@ -58,16 +50,16 @@ func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks( return nil } - log.Debug("post-hooks configured, running...") + ctx.Log.Debug("post-hooks configured, running...") - unlockFn, err := w.WorkingDirLocker.TryLock(baseRepo.FullName, pull.Num, DefaultWorkspace, DefaultRepoRelDir) + unlockFn, err := w.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, DefaultWorkspace, DefaultRepoRelDir) if err != nil { return err } - log.Debug("got workspace lock") + ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := w.WorkingDir.Clone(headRepo, pull, DefaultWorkspace) + repoDir, _, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return err } @@ -79,14 +71,15 @@ func (w *DefaultPostWorkflowHooksCommandRunner) RunPostHooks( err = w.runHooks( models.WorkflowHookCommandContext{ - BaseRepo: baseRepo, - HeadRepo: headRepo, - Log: log, - Pull: pull, - User: user, + BaseRepo: ctx.Pull.BaseRepo, + HeadRepo: ctx.HeadRepo, + Log: ctx.Log, + Pull: ctx.Pull, + User: ctx.User, Verbose: false, EscapedCommentArgs: escapedArgs, CommandName: cmd.Name.String(), + API: ctx.API, }, postWorkflowHooks, repoDir) @@ -123,16 +116,16 @@ func (w *DefaultPostWorkflowHooksCommandRunner) runHooks( ctx.HookID = uuid.NewString() shell := hook.Shell if shell == "" { - ctx.Log.Debug("Setting shell to default: %q", shell) + ctx.Log.Debug("Setting shell to default: '%s'", shell) shell = "sh" } shellArgs := hook.ShellArgs if shellArgs == "" { - ctx.Log.Debug("Setting shellArgs to default: %q", shellArgs) + ctx.Log.Debug("Setting shellArgs to default: '%s'", shellArgs) shellArgs = "-c" } url, err := w.Router.GenerateProjectWorkflowHookURL(ctx.HookID) - if err != nil { + if err != nil && !ctx.API { return err } diff --git a/server/events/post_workflow_hooks_command_runner_test.go b/server/events/post_workflow_hooks_command_runner_test.go index 38cd5ee9ec..29996d8028 100644 --- a/server/events/post_workflow_hooks_command_runner_test.go +++ b/server/events/post_workflow_hooks_command_runner_test.go @@ -140,8 +140,10 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -180,7 +182,8 @@ func TestRunPostHooks_Clone(t *testing.T) { whPostWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) postWhWorkingDirLocker.VerifyWasCalled(Never()).TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, "path") - postWhWorkingDir.VerifyWasCalled(Never()).Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace) + postWhWorkingDir.VerifyWasCalled(Never()).Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace)) }) t.Run("error locking work dir", func(t *testing.T) { postWorkflowHooksSetup(t) @@ -198,12 +201,14 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(func() {}, errors.New("some error")) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(func() {}, errors.New("some error")) err := postWh.RunPostHooks(ctx, planCmd) Assert(t, err != nil, "error not nil") - postWhWorkingDir.VerifyWasCalled(Never()).Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace) + postWhWorkingDir.VerifyWasCalled(Never()).Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace)) whPostWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) }) @@ -229,8 +234,10 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, errors.New("some error")) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, errors.New("some error")) err := postWh.RunPostHooks(ctx, planCmd) @@ -262,8 +269,10 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, errors.New("some error")) @@ -302,8 +311,10 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -336,8 +347,10 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShell.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -370,8 +383,10 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -404,16 +419,19 @@ func TestRunPostHooks_Clone(t *testing.T) { postWh.GlobalCfg = globalCfg - When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(postWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithShellandShellArgs.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(postWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(postWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPostWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShellandShellArgs.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := postWh.RunPostHooks(ctx, planCmd) Ok(t, err) whPostWorkflowHookRunner.VerifyWasCalledOnce().Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithShellandShellArgs.RunCommand), Eq(testHookWithShellandShellArgs.Shell), Eq(testHookWithShellandShellArgs.ShellArgs), Eq(repoDir)) + Eq(testHookWithShellandShellArgs.RunCommand), Eq(testHookWithShellandShellArgs.Shell), + Eq(testHookWithShellandShellArgs.ShellArgs), Eq(repoDir)) Assert(t, *unlockCalled == true, "unlock function called") }) @@ -438,8 +456,10 @@ func TestRunPostHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -472,10 +492,12 @@ func TestRunPostHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, applyCmd) @@ -506,10 +528,12 @@ func TestRunPostHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithPlanApplyCommands.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanApplyCommands.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, planCmd) diff --git a/server/events/pre_workflow_hooks_command_runner.go b/server/events/pre_workflow_hooks_command_runner.go index 17b9864757..70462765a3 100644 --- a/server/events/pre_workflow_hooks_command_runner.go +++ b/server/events/pre_workflow_hooks_command_runner.go @@ -38,15 +38,9 @@ type DefaultPreWorkflowHooksCommandRunner struct { // RunPreHooks runs pre_workflow_hooks when PR is opened or updated. func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context, cmd *CommentCommand) error { - pull := ctx.Pull - baseRepo := pull.BaseRepo - headRepo := ctx.HeadRepo - user := ctx.User - log := ctx.Log - preWorkflowHooks := make([]*valid.WorkflowHook, 0) for _, repo := range w.GlobalCfg.Repos { - if repo.IDMatches(baseRepo.ID()) && len(repo.PreWorkflowHooks) > 0 { + if repo.IDMatches(ctx.Pull.BaseRepo.ID()) && len(repo.PreWorkflowHooks) > 0 { preWorkflowHooks = append(preWorkflowHooks, repo.PreWorkflowHooks...) } } @@ -56,16 +50,16 @@ func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context, return nil } - log.Debug("pre-hooks configured, running...") + ctx.Log.Debug("pre-hooks configured, running...") - unlockFn, err := w.WorkingDirLocker.TryLock(baseRepo.FullName, pull.Num, DefaultWorkspace, DefaultRepoRelDir) + unlockFn, err := w.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, DefaultWorkspace, DefaultRepoRelDir) if err != nil { return err } - log.Debug("got workspace lock") + ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := w.WorkingDir.Clone(headRepo, pull, DefaultWorkspace) + repoDir, _, err := w.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return err } @@ -89,14 +83,15 @@ func (w *DefaultPreWorkflowHooksCommandRunner) RunPreHooks(ctx *command.Context, err = w.runHooks( models.WorkflowHookCommandContext{ - BaseRepo: baseRepo, - HeadRepo: headRepo, - Log: log, - Pull: pull, - User: user, + BaseRepo: ctx.Pull.BaseRepo, + HeadRepo: ctx.HeadRepo, + Log: ctx.Log, + Pull: ctx.Pull, + User: ctx.User, Verbose: false, EscapedCommentArgs: escapedArgs, CommandName: cmd.Name.String(), + API: ctx.API, }, preWorkflowHooks, repoDir) @@ -132,22 +127,26 @@ func (w *DefaultPreWorkflowHooksCommandRunner) runHooks( ctx.HookID = uuid.NewString() shell := hook.Shell if shell == "" { - ctx.Log.Debug("Setting shell to default: %q", shell) + ctx.Log.Debug("Setting shell to default: '%s'", shell) shell = "sh" } shellArgs := hook.ShellArgs if shellArgs == "" { - ctx.Log.Debug("Setting shellArgs to default: %q", shellArgs) + ctx.Log.Debug("Setting shellArgs to default: '%s'", shellArgs) shellArgs = "-c" } url, err := w.Router.GenerateProjectWorkflowHookURL(ctx.HookID) - if err != nil { + if err != nil && !ctx.API { return err } if err := w.CommitStatusUpdater.UpdatePreWorkflowHook(ctx.Log, ctx.Pull, models.PendingCommitStatus, ctx.HookDescription, "", url); err != nil { ctx.Log.Warn("unable to update pre workflow hook status: %s", err) - return err + ctx.Log.Info("is api? %v", ctx.API) + if !ctx.API { + ctx.Log.Info("is api? %v", ctx.API) + return err + } } _, runtimeDesc, err := w.PreWorkflowHookRunner.Run(ctx, hook.RunCommand, shell, shellArgs, repoDir) @@ -161,7 +160,9 @@ func (w *DefaultPreWorkflowHooksCommandRunner) runHooks( if err := w.CommitStatusUpdater.UpdatePreWorkflowHook(ctx.Log, ctx.Pull, models.SuccessCommitStatus, ctx.HookDescription, runtimeDesc, url); err != nil { ctx.Log.Warn("unable to update pre workflow hook status: %s", err) - return err + if !ctx.API { + return err + } } } diff --git a/server/events/pre_workflow_hooks_command_runner_test.go b/server/events/pre_workflow_hooks_command_runner_test.go index 3156797f86..191a8c27dc 100644 --- a/server/events/pre_workflow_hooks_command_runner_test.go +++ b/server/events/pre_workflow_hooks_command_runner_test.go @@ -142,8 +142,10 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -180,9 +182,11 @@ func TestRunPreHooks_Clone(t *testing.T) { Ok(t, err) - whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) + whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), + Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) preWhWorkingDirLocker.VerifyWasCalled(Never()).TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, "") - preWhWorkingDir.VerifyWasCalled(Never()).Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace) + preWhWorkingDir.VerifyWasCalled(Never()).Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace)) }) t.Run("error locking work dir", func(t *testing.T) { @@ -201,13 +205,16 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(func() {}, errors.New("some error")) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(func() {}, errors.New("some error")) err := preWh.RunPreHooks(ctx, planCmd) Assert(t, err != nil, "error not nil") - preWhWorkingDir.VerifyWasCalled(Never()).Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace) - whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) + preWhWorkingDir.VerifyWasCalled(Never()).Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace)) + whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), + Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) }) t.Run("error cloning", func(t *testing.T) { @@ -231,14 +238,17 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, errors.New("some error")) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, errors.New("some error")) err := preWh.RunPreHooks(ctx, planCmd) Assert(t, err != nil, "error not nil") - whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) + whPreWorkflowHookRunner.VerifyWasCalled(Never()).Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), + Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) Assert(t, *unlockCalled == true, "unlock function called") }) @@ -263,8 +273,10 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, errors.New("some error")) @@ -303,14 +315,18 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), + Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, planCmd) Ok(t, err) - whPreWorkflowHookRunner.VerifyWasCalledOnce().Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) + whPreWorkflowHookRunner.VerifyWasCalledOnce().Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), + Eq(defaultShell), Eq(defaultShellArgs), Eq(repoDir)) Assert(t, *unlockCalled == true, "unlock function called") }) @@ -335,8 +351,10 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShell.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -369,8 +387,10 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHook.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) @@ -403,10 +423,12 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithShellandShellArgs.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithShellandShellArgs.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, planCmd) @@ -438,10 +460,12 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, planCmd) @@ -472,10 +496,12 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithPlanCommand.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanCommand.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, applyCmd) @@ -506,10 +532,12 @@ func TestRunPreHooks_Clone(t *testing.T) { preWh.GlobalCfg = globalCfg - When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) - When(preWhWorkingDir.Clone(testdata.GithubRepo, newPull, events.DefaultWorkspace)).ThenReturn(repoDir, false, nil) - When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), - Eq(testHookWithPlanApplyCommands.RunCommand), Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) + When(preWhWorkingDirLocker.TryLock(testdata.GithubRepo.FullName, newPull.Num, events.DefaultWorkspace, + events.DefaultRepoRelDir)).ThenReturn(unlockFn, nil) + When(preWhWorkingDir.Clone(Any[logging.SimpleLogging](), Eq(testdata.GithubRepo), Eq(newPull), + Eq(events.DefaultWorkspace))).ThenReturn(repoDir, false, nil) + When(whPreWorkflowHookRunner.Run(Any[models.WorkflowHookCommandContext](), Eq(testHookWithPlanApplyCommands.RunCommand), + Any[string](), Any[string](), Eq(repoDir))).ThenReturn(result, runtimeDesc, nil) err := preWh.RunPreHooks(ctx, planCmd) diff --git a/server/events/project_command_builder.go b/server/events/project_command_builder.go index 98f1ef9997..538c71a73f 100644 --- a/server/events/project_command_builder.go +++ b/server/events/project_command_builder.go @@ -36,6 +36,7 @@ const ( ) func NewInstrumentedProjectCommandBuilder( + logger logging.SimpleLogging, policyChecksSupported bool, parserValidator *config.ParserValidator, projectFinder ProjectFinder, @@ -57,7 +58,6 @@ func NewInstrumentedProjectCommandBuilder( IncludeGitUntrackedFiles bool, AutoDiscoverMode string, scope tally.Scope, - logger logging.SimpleLogging, terraformClient terraform.Client, ) *InstrumentedProjectCommandBuilder { scope = scope.SubScope("builder") @@ -89,7 +89,6 @@ func NewInstrumentedProjectCommandBuilder( IncludeGitUntrackedFiles, AutoDiscoverMode, scope, - logger, terraformClient, ), Logger: logger, @@ -119,7 +118,6 @@ func NewProjectCommandBuilder( IncludeGitUntrackedFiles bool, AutoDiscoverMode string, scope tally.Scope, - _ logging.SimpleLogging, terraformClient terraform.Client, ) *DefaultProjectCommandBuilder { return &DefaultProjectCommandBuilder{ @@ -262,7 +260,7 @@ func (p *DefaultProjectCommandBuilder) BuildAutoplanCommands(ctx *command.Contex var autoplanEnabled []command.ProjectContext for _, projCtx := range projCtxs { if !projCtx.AutoplanEnabled { - ctx.Log.Debug("ignoring project at dir %q, workspace: %q because autoplan is disabled", projCtx.RepoRelDir, projCtx.Workspace) + ctx.Log.Debug("ignoring project at dir '%s', workspace: '%s' because autoplan is disabled", projCtx.RepoRelDir, projCtx.Workspace) continue } autoplanEnabled = append(autoplanEnabled, projCtx) @@ -278,8 +276,7 @@ func (p *DefaultProjectCommandBuilder) BuildPlanCommands(ctx *command.Context, c } ctx.Log.Debug("Building plan command for specific project with directory: '%v', workspace: '%v', project: '%v'", cmd.RepoRelDir, cmd.Workspace, cmd.ProjectName) - pcc, err := p.buildProjectPlanCommand(ctx, cmd) - return pcc, err + return p.buildProjectPlanCommand(ctx, cmd) } // See ProjectCommandBuilder.BuildApplyCommands. @@ -287,24 +284,21 @@ func (p *DefaultProjectCommandBuilder) BuildApplyCommands(ctx *command.Context, if !cmd.IsForSpecificProject() { return p.buildAllProjectCommandsByPlan(ctx, cmd) } - pac, err := p.buildProjectCommand(ctx, cmd) - return pac, err + return p.buildProjectCommand(ctx, cmd) } func (p *DefaultProjectCommandBuilder) BuildApprovePoliciesCommands(ctx *command.Context, cmd *CommentCommand) ([]command.ProjectContext, error) { if !cmd.IsForSpecificProject() { return p.buildAllProjectCommandsByPlan(ctx, cmd) } - pac, err := p.buildProjectCommand(ctx, cmd) - return pac, err + return p.buildProjectCommand(ctx, cmd) } func (p *DefaultProjectCommandBuilder) BuildVersionCommands(ctx *command.Context, cmd *CommentCommand) ([]command.ProjectContext, error) { if !cmd.IsForSpecificProject() { return p.buildAllProjectCommandsByPlan(ctx, cmd) } - pac, err := p.buildProjectCommand(ctx, cmd) - return pac, err + return p.buildProjectCommand(ctx, cmd) } func (p *DefaultProjectCommandBuilder) BuildImportCommands(ctx *command.Context, cmd *CommentCommand) ([]command.ProjectContext, error) { @@ -334,7 +328,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex if p.IncludeGitUntrackedFiles { ctx.Log.Debug(("'include-git-untracked-files' option is set, getting untracked files")) - untrackedFiles, err := p.WorkingDir.GetGitUntrackedFiles(ctx.HeadRepo, ctx.Pull, DefaultWorkspace) + untrackedFiles, err := p.WorkingDir.GetGitUntrackedFiles(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return nil, err } @@ -402,7 +396,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex ctx.Log.Debug("got workspace lock") defer unlockFn() - repoDir, _, err := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, workspace) + repoDir, _, err := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) if err != nil { return nil, err } @@ -411,7 +405,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex repoCfgFile := p.GlobalCfg.RepoConfigFile(ctx.Pull.BaseRepo.ID()) hasRepoCfg, err := p.ParserValidator.HasRepoCfg(repoDir, repoCfgFile) if err != nil { - return nil, errors.Wrapf(err, "looking for %s file in %q", repoCfgFile, repoDir) + return nil, errors.Wrapf(err, "looking for '%s' file in '%s'", repoCfgFile, repoDir) } var projCtxs []command.ProjectContext @@ -440,7 +434,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex if err != nil { ctx.Log.Warn("error(s) loading project module dependencies: %s", err) } - ctx.Log.Debug("moduleInfo for %s (matching %q) = %v", repoDir, p.AutoDetectModuleFiles, moduleInfo) + ctx.Log.Debug("moduleInfo for '%s' (matching '%s') = %v", repoDir, p.AutoDetectModuleFiles, moduleInfo) automerge := p.EnableAutoMerge parallelApply := p.EnableParallelApply @@ -467,7 +461,7 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex ctx.Log.Info("%d projects are to be planned based on their when_modified config", len(matchingProjects)) for _, mp := range matchingProjects { - ctx.Log.Debug("determining config for project at dir: %q workspace: %q", mp.Dir, mp.Workspace) + ctx.Log.Debug("determining config for project at dir: '%s' workspace: '%s'", mp.Dir, mp.Workspace) mergedCfg := p.GlobalCfg.MergeProjectCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), mp, repoCfg) projCtxs = append(projCtxs, @@ -523,10 +517,11 @@ func (p *DefaultProjectCommandBuilder) buildAllCommandsByCfg(ctx *command.Contex ctx.Log.Info("automatically determined that there were %d additional projects modified in this pull request: %s", len(modifiedProjects), modifiedProjects) for _, mp := range modifiedProjects { - ctx.Log.Debug("determining config for project at dir: %q", mp.Path) - pWorkspace, err := p.ProjectFinder.DetermineWorkspaceFromHCL(ctx.Log, repoDir) + ctx.Log.Debug("determining config for project at dir: '%s'", mp.Path) + absProjectDir := filepath.Join(repoDir, mp.Path) + pWorkspace, err := p.ProjectFinder.DetermineWorkspaceFromHCL(ctx.Log, absProjectDir) if err != nil { - return nil, errors.Wrapf(err, "looking for Terraform Cloud workspace from configuration %s", repoDir) + return nil, errors.Wrapf(err, "looking for Terraform Cloud workspace from configuration %s", absProjectDir) } pCfg := p.GlobalCfg.DefaultProjCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), mp.Path, pWorkspace) @@ -574,7 +569,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont defer unlockFn() ctx.Log.Debug("cloning repository") - _, _, err = p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, DefaultWorkspace) + _, _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, DefaultWorkspace) if err != nil { return pcc, err } @@ -595,7 +590,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont if p.IncludeGitUntrackedFiles { ctx.Log.Debug(("'include-git-untracked-files' option is set, getting untracked files")) - untrackedFiles, err := p.WorkingDir.GetGitUntrackedFiles(ctx.HeadRepo, ctx.Pull, workspace) + untrackedFiles, err := p.WorkingDir.GetGitUntrackedFiles(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) if err != nil { return nil, err } @@ -652,7 +647,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectPlanCommand(ctx *command.Cont if DefaultWorkspace != workspace { ctx.Log.Debug("cloning repository with workspace %s", workspace) - _, _, err = p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, workspace) + _, _, err = p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, workspace) if err != nil { return pcc, err } @@ -682,7 +677,7 @@ func (p *DefaultProjectCommandBuilder) getCfg(ctx *command.Context, projectName repoCfgFile := p.GlobalCfg.RepoConfigFile(ctx.Pull.BaseRepo.ID()) hasRepoCfg, err := p.ParserValidator.HasRepoCfg(repoDir, repoCfgFile) if err != nil { - err = errors.Wrapf(err, "looking for %s file in %q", repoCfgFile, repoDir) + err = errors.Wrapf(err, "looking for '%s' file in '%s'", repoCfgFile, repoDir) return } if !hasRepoCfg { @@ -712,9 +707,9 @@ func (p *DefaultProjectCommandBuilder) getCfg(ctx *command.Context, projectName } if len(projectsCfg) == 0 { if p.SilenceNoProjects && len(repoConfig.Projects) > 0 { - ctx.Log.Debug("no project with name %q found but silencing the error", projectName) + ctx.Log.Debug("no project with name '%s' found but silencing the error", projectName) } else { - err = fmt.Errorf("no project with name %q is defined in %s", projectName, repoCfgFile) + err = fmt.Errorf("no project with name '%s' is defined in '%s'", projectName, repoCfgFile) } return } @@ -726,7 +721,7 @@ func (p *DefaultProjectCommandBuilder) getCfg(ctx *command.Context, projectName return } if len(projCfgs) > 1 { - err = fmt.Errorf("must specify project name: more than one project defined in %s matched dir: %q workspace: %q", repoCfgFile, dir, workspace) + err = fmt.Errorf("must specify project name: more than one project defined in '%s' matched dir: '%s' workspace: '%s'", repoCfgFile, dir, workspace) return } projectsCfg = projCfgs @@ -765,7 +760,7 @@ func (p *DefaultProjectCommandBuilder) buildAllProjectCommandsByPlan(ctx *comman for _, plan := range plans { commentCmds, err := p.buildProjectCommandCtx(ctx, commentCmd.CommandName(), commentCmd.SubName, plan.ProjectName, commentCmd.Flags, defaultRepoDir, plan.RepoRelDir, plan.Workspace, commentCmd.Verbose) if err != nil { - return nil, errors.Wrapf(err, "building command for dir %q", plan.RepoRelDir) + return nil, errors.Wrapf(err, "building command for dir '%s'", plan.RepoRelDir) } cmds = append(cmds, commentCmds...) } @@ -861,7 +856,7 @@ func (p *DefaultProjectCommandBuilder) buildProjectCommandCtx(ctx *command.Conte repoRelDir = projCfg.RepoRelDir workspace = projCfg.Workspace for _, mp := range matchingProjects { - ctx.Log.Debug("Merging config for project at dir: %q workspace: %q", mp.Dir, mp.Workspace) + ctx.Log.Debug("Merging config for project at dir: '%s' workspace: '%s'", mp.Dir, mp.Workspace) projCfg = p.GlobalCfg.MergeProjectCfg(ctx.Log, ctx.Pull.BaseRepo.ID(), mp, *repoCfgPtr) projCtxs = append(projCtxs, diff --git a/server/events/project_command_builder_internal_test.go b/server/events/project_command_builder_internal_test.go index fc7c022073..d020871b31 100644 --- a/server/events/project_command_builder_internal_test.go +++ b/server/events/project_command_builder_internal_test.go @@ -84,7 +84,7 @@ workflows: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -143,7 +143,7 @@ projects: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -204,7 +204,7 @@ projects: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -273,7 +273,7 @@ projects: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{}, @@ -429,7 +429,7 @@ workflows: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{"apply"}, @@ -492,7 +492,7 @@ projects: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{"apply"}, @@ -558,7 +558,7 @@ workflows: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{}, expApplySteps: []string{}, @@ -609,7 +609,7 @@ projects: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"plan"}, expApplySteps: []string{"apply"}, @@ -630,10 +630,11 @@ projects: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") @@ -671,7 +672,6 @@ projects: false, "auto", statsScope, - logger, terraformClient, ) @@ -693,9 +693,9 @@ projects: ErrEquals(t, c.expErr, err) return } - ctx := ctxs[0] Ok(t, err) + ctx := ctxs[0] // Construct expected steps. var stepNames []string @@ -824,7 +824,7 @@ projects: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPlanSteps: []string{"init", "plan"}, expApplySteps: []string{"apply"}, @@ -845,10 +845,11 @@ projects: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") @@ -862,7 +863,6 @@ projects: Ok(t, os.WriteFile(filepath.Join(tmp, "atlantis.yaml"), []byte(c.repoCfg), 0600)) } - logger := logging.NewNoopLogger(t) statsScope, _, _ := metrics.NewLoggingScope(logging.NewNoopLogger(t), "atlantis") terraformClient := mocks.NewMockClient() @@ -889,7 +889,6 @@ projects: false, "auto", statsScope, - logger, terraformClient, ) @@ -911,9 +910,9 @@ projects: ErrEquals(t, c.expErr, err) return } - ctx := ctxs[0] Ok(t, err) + ctx := ctxs[0] Equals(t, 2, len(ctxs)) // Construct expected steps. @@ -1006,7 +1005,7 @@ repos: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, }, expPolicyCheckSteps: []string{"show", "policy_check"}, }, @@ -1070,7 +1069,7 @@ workflows: Verbose: true, Workspace: "myworkspace", PolicySets: emptyPolicySets, - RepoLocking: true, + RepoLocksMode: valid.DefaultRepoLocksMode, PolicySetTarget: "", }, expPolicyCheckSteps: []string{"policy_check"}, @@ -1091,10 +1090,11 @@ workflows: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") @@ -1136,7 +1136,6 @@ workflows: false, "auto", statsScope, - logger, terraformClient, ) @@ -1158,9 +1157,8 @@ workflows: return } - ctx := ctxs[1] - Ok(t, err) + ctx := ctxs[1] // Construct expected steps. var stepNames []string @@ -1246,10 +1244,11 @@ projects: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"modules/module/main.tf"}, nil) // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") @@ -1289,7 +1288,6 @@ projects: false, "auto", statsScope, - logger, terraformClient, ) @@ -1386,10 +1384,11 @@ projects: }) workingDir := NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmp, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmp, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.modifiedFiles, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(c.modifiedFiles, nil) // Write and parse the global config file. globalCfgPath := filepath.Join(tmp, "global.yaml") @@ -1431,7 +1430,6 @@ projects: false, "auto", statsScope, - logger, terraformClient, ) diff --git a/server/events/project_command_builder_test.go b/server/events/project_command_builder_test.go index 3bcc0294be..7560b5d6de 100644 --- a/server/events/project_command_builder_test.go +++ b/server/events/project_command_builder_test.go @@ -3,6 +3,7 @@ package events_test import ( "os" "path/filepath" + "sort" "strings" "testing" @@ -48,6 +49,19 @@ var defaultUserConfig = struct { AutoDiscoverMode: "auto", } +func ChangedFiles(dirStructure map[string]interface{}, parent string) []string { + var files []string + for k, v := range dirStructure { + switch v := v.(type) { + case map[string]interface{}: + files = append(files, ChangedFiles(v, k)...) + default: + files = append(files, filepath.Join(parent, k)) + } + } + return files +} + func TestDefaultProjectCommandBuilder_BuildAutoplanCommands(t *testing.T) { // expCtxFields define the ctx fields we're going to assert on. // Since we're focused on autoplanning here, we don't validate all the @@ -57,11 +71,16 @@ func TestDefaultProjectCommandBuilder_BuildAutoplanCommands(t *testing.T) { RepoRelDir string Workspace string } + defaultTestDirStructure := map[string]interface{}{ + "main.tf": nil, + } + cases := []struct { - Description string - AtlantisYAML string - ServerSideYAML string - exp []expCtxFields + Description string + AtlantisYAML string + ServerSideYAML string + TestDirStructure map[string]interface{} + exp []expCtxFields }{ { Description: "simple atlantis.yaml", @@ -70,6 +89,7 @@ version: 3 projects: - dir: . `, + TestDirStructure: defaultTestDirStructure, exp: []expCtxFields{ { ProjectName: "", @@ -94,6 +114,7 @@ projects: name: myname workspace: myworkspace2 `, + TestDirStructure: defaultTestDirStructure, exp: []expCtxFields{ { ProjectName: "", @@ -122,6 +143,7 @@ projects: - dir: . workspace: myworkspace2 `, + TestDirStructure: defaultTestDirStructure, exp: []expCtxFields{ { ProjectName: "", @@ -142,7 +164,68 @@ version: 3 projects: - dir: mydir `, - exp: nil, + TestDirStructure: defaultTestDirStructure, + exp: nil, + }, + { + Description: "workspaces from subdirectories detected", + TestDirStructure: map[string]interface{}{ + "work": map[string]interface{}{ + "main.tf": ` +terraform { + cloud { + organization = "atlantis-test" + workspaces { + name = "test-workspace1" + } + } +}`, + }, + "test": map[string]interface{}{ + "main.tf": ` +terraform { + cloud { + organization = "atlantis-test" + workspaces { + name = "test-workspace12" + } + } +}`, + }, + }, + exp: []expCtxFields{ + { + ProjectName: "", + RepoRelDir: "test", + Workspace: "test-workspace12", + }, + { + ProjectName: "", + RepoRelDir: "work", + Workspace: "test-workspace1", + }, + }, + }, + { + Description: "workspaces in parent directory are detected", + TestDirStructure: map[string]interface{}{ + "main.tf": ` +terraform { + cloud { + organization = "atlantis-test" + workspaces { + name = "test-workspace" + } + } +}`, + }, + exp: []expCtxFields{ + { + ProjectName: "", + RepoRelDir: ".", + Workspace: "test-workspace", + }, + }, }, } @@ -151,20 +234,17 @@ projects: userConfig := defaultUserConfig terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) for _, c := range cases { t.Run(c.Description, func(t *testing.T) { RegisterMockTestingT(t) - tmpDir := DirStructure(t, map[string]interface{}{ - "main.tf": nil, - }) - + tmpDir := DirStructure(t, c.TestDirStructure) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(ChangedFiles(c.TestDirStructure, ""), nil) if c.AtlantisYAML != "" { err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600) Ok(t, err) @@ -194,7 +274,6 @@ projects: userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -207,6 +286,17 @@ projects: }) Ok(t, err) Equals(t, len(c.exp), len(ctxs)) + + // Sort so comparisons are deterministic + sort.Slice(ctxs, func(i, j int) bool { + if ctxs[i].ProjectName != ctxs[j].ProjectName { + return ctxs[i].ProjectName < ctxs[j].ProjectName + } + if ctxs[i].RepoRelDir != ctxs[j].RepoRelDir { + return ctxs[i].RepoRelDir < ctxs[j].RepoRelDir + } + return ctxs[i].Workspace < ctxs[j].Workspace + }) for i, actCtx := range ctxs { expCtx := c.exp[i] Equals(t, expCtx.ProjectName, actCtx.ProjectName) @@ -384,7 +474,7 @@ projects: dir: . workspace: myworkspace `, - ExpErr: "must specify project name: more than one project defined in atlantis.yaml matched dir: \".\" workspace: \"myworkspace\"", + ExpErr: "must specify project name: more than one project defined in 'atlantis.yaml' matched dir: '.' workspace: 'myworkspace'", }, { Description: "atlantis.yaml with project flag not matching", @@ -399,7 +489,7 @@ version: 3 projects: - dir: . `, - ExpErr: "no project with name \"notconfigured\" is defined in atlantis.yaml", + ExpErr: "no project with name 'notconfigured' is defined in 'atlantis.yaml'", }, { Description: "atlantis.yaml with project flag not matching but silenced", @@ -511,11 +601,12 @@ projects: }) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) if c.AtlantisYAML != "" { err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600) Ok(t, err) @@ -526,7 +617,6 @@ projects: } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -550,7 +640,6 @@ projects: userConfig.IncludeGitUntrackedFiles, c.AutoDiscoverModeUserCfg, scope, - logger, terraformClient, ) @@ -700,11 +789,12 @@ projects: tmpDir := DirStructure(t, c.DirectoryStructure) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) if c.AtlantisYAML != "" { err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600) Ok(t, err) @@ -715,7 +805,6 @@ projects: } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -739,7 +828,6 @@ projects: userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1030,11 +1118,12 @@ projects: tmpDir := DirStructure(t, c.DirStructure) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) if c.AtlantisYAML != "" { err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600) Ok(t, err) @@ -1045,7 +1134,6 @@ projects: } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -1069,7 +1157,6 @@ projects: userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1145,7 +1232,6 @@ func TestDefaultProjectCommandBuilder_BuildMultiApply(t *testing.T) { scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -1169,7 +1255,6 @@ func TestDefaultProjectCommandBuilder_BuildMultiApply(t *testing.T) { userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1221,14 +1306,9 @@ projects: err := os.WriteFile(filepath.Join(repoDir, valid.DefaultAtlantisFile), []byte(yamlCfg), 0600) Ok(t, err) - When(workingDir.Clone( - Any[models.Repo](), - Any[models.PullRequest](), + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(repoDir, false, nil) - When(workingDir.GetWorkingDir( - Any[models.Repo](), - Any[models.PullRequest](), - Any[string]())).ThenReturn(repoDir, nil) + When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(repoDir, nil) globalCfgArgs := valid.GlobalCfgArgs{ AllowAllRepoSettings: true, @@ -1238,7 +1318,6 @@ projects: userConfig := defaultUserConfig terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -1262,7 +1341,6 @@ projects: userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1316,18 +1394,18 @@ func TestDefaultProjectCommandBuilder_EscapeArgs(t *testing.T) { }) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) globalCfgArgs := valid.GlobalCfgArgs{ AllowAllRepoSettings: true, } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -1351,7 +1429,6 @@ func TestDefaultProjectCommandBuilder_EscapeArgs(t *testing.T) { userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1470,19 +1547,12 @@ projects: tmpDir := DirStructure(t, testCase.DirStructure) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(testCase.ModifiedFiles, nil) - + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(testCase.ModifiedFiles, nil) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone( - Any[models.Repo](), - Any[models.PullRequest](), + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) - - When(workingDir.GetWorkingDir( - Any[models.Repo](), - Any[models.PullRequest](), - Any[string]())).ThenReturn(tmpDir, nil) + When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) globalCfgArgs := valid.GlobalCfgArgs{ AllowAllRepoSettings: true, @@ -1521,7 +1591,6 @@ projects: userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1609,7 +1678,6 @@ projects: } scope, _, _ := metrics.NewLoggingScope(logger, "atlantis") terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -1633,7 +1701,6 @@ projects: userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1651,7 +1718,8 @@ projects: }) Ok(t, err) Equals(t, c.ExpectedCtxs, len(actCtxs)) - workingDir.VerifyWasCalled(c.ExpectedClones).Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]()) + workingDir.VerifyWasCalled(c.ExpectedClones).Clone(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest](), Any[string]()) } } @@ -1666,10 +1734,11 @@ func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanComman userConfig := defaultUserConfig workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn([]string{"main.tf"}, nil) globalCfgArgs := valid.GlobalCfgArgs{ AllowAllRepoSettings: false, @@ -1678,7 +1747,6 @@ func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanComman globalCfg := valid.NewGlobalCfgFromArgs(globalCfgArgs) terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( true, @@ -1702,7 +1770,6 @@ func TestDefaultProjectCommandBuilder_WithPolicyCheckEnabled_BuildAutoplanComman userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1768,7 +1835,6 @@ func TestDefaultProjectCommandBuilder_BuildVersionCommand(t *testing.T) { AllowAllRepoSettings: false, } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, @@ -1792,7 +1858,6 @@ func TestDefaultProjectCommandBuilder_BuildVersionCommand(t *testing.T) { userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1886,19 +1951,20 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_Single_With_RestrictFile tmpDir := DirStructure(t, c.DirectoryStructure) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) - When(workingDir.GetGitUntrackedFiles(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(c.UntrackedFiles, nil) + When(workingDir.GetGitUntrackedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(c.UntrackedFiles, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) if c.AtlantisYAML != "" { err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600) Ok(t, err) } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, // policyChecksSupported @@ -1922,7 +1988,6 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_Single_With_RestrictFile userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) @@ -1997,19 +2062,20 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_with_IncludeGitUntracked tmpDir := DirStructure(t, c.DirectoryStructure) workingDir := mocks.NewMockWorkingDir() - When(workingDir.Clone(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, false, nil) + When(workingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(tmpDir, false, nil) When(workingDir.GetWorkingDir(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(tmpDir, nil) - When(workingDir.GetGitUntrackedFiles(Any[models.Repo](), Any[models.PullRequest](), Any[string]())).ThenReturn(c.UntrackedFiles, nil) + When(workingDir.GetGitUntrackedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(c.UntrackedFiles, nil) vcsClient := vcsmocks.NewMockClient() - When(vcsClient.GetModifiedFiles( - Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) + When(vcsClient.GetModifiedFiles(Any[logging.SimpleLogging](), Any[models.Repo](), + Any[models.PullRequest]())).ThenReturn(c.ModifiedFiles, nil) if c.AtlantisYAML != "" { err := os.WriteFile(filepath.Join(tmpDir, valid.DefaultAtlantisFile), []byte(c.AtlantisYAML), 0600) Ok(t, err) } terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(Any[logging.SimpleLogging]())).ThenReturn([]string{}, nil) builder := events.NewProjectCommandBuilder( false, // policyChecksSupported @@ -2033,7 +2099,6 @@ func TestDefaultProjectCommandBuilder_BuildPlanCommands_with_IncludeGitUntracked userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverMode, scope, - logger, terraformClient, ) diff --git a/server/events/project_command_context_builder.go b/server/events/project_command_context_builder.go index 5ed6dad94c..6a08b16471 100644 --- a/server/events/project_command_context_builder.go +++ b/server/events/project_command_context_builder.go @@ -273,7 +273,7 @@ func newProjectCommandContext(ctx *command.Context, EscapedCommentArgs: escapedCommentArgs, AutomergeEnabled: automergeEnabled, DeleteSourceBranchOnMerge: projCfg.DeleteSourceBranchOnMerge, - RepoLocking: projCfg.RepoLocking, + RepoLocksMode: projCfg.RepoLocks.Mode, CustomPolicyCheck: projCfg.CustomPolicyCheck, ParallelApplyEnabled: parallelApplyEnabled, ParallelPlanEnabled: parallelPlanEnabled, @@ -306,6 +306,7 @@ func newProjectCommandContext(ctx *command.Context, JobID: uuid.New().String(), ExecutionOrderGroup: projCfg.ExecutionOrderGroup, AbortOnExcecutionOrderFail: abortOnExcecutionOrderFail, + SilencePRComments: projCfg.SilencePRComments, } } diff --git a/server/events/project_command_context_builder_test.go b/server/events/project_command_context_builder_test.go index 8bee1d9fb0..c3d75e950c 100644 --- a/server/events/project_command_context_builder_test.go +++ b/server/events/project_command_context_builder_test.go @@ -48,7 +48,6 @@ func TestProjectCommandContextBuilder_PullStatus(t *testing.T) { expectedPlanCmt := "Plan Comment" terraformClient := terraform_mocks.NewMockClient() - When(terraformClient.ListAvailableVersions(commandCtx.Log)) t.Run("with project name defined", func(t *testing.T) { When(mockCommentBuilder.BuildPlanComment(projRepoRelDir, projWorkspace, projName, []string{})).ThenReturn(expectedPlanCmt) diff --git a/server/events/project_command_runner.go b/server/events/project_command_runner.go index 38935aa421..410e40344d 100644 --- a/server/events/project_command_runner.go +++ b/server/events/project_command_runner.go @@ -78,7 +78,7 @@ type EnvStepRunner interface { // MultiEnvStepRunner runs multienv steps. type MultiEnvStepRunner interface { // Run cmd in path. - Run(ctx command.ProjectContext, cmd string, path string, envs map[string]string) (string, error) + Run(ctx command.ProjectContext, cmd string, path string, envs map[string]string, postProcessOutput valid.PostProcessRunOutputOption) (string, error) } //go:generate pegomock generate --package mocks -o mocks/mock_webhooks_sender.go WebhooksSender @@ -225,13 +225,14 @@ type DefaultProjectCommandRunner struct { func (p *DefaultProjectCommandRunner) Plan(ctx command.ProjectContext) command.ProjectResult { planSuccess, failure, err := p.doPlan(ctx) return command.ProjectResult{ - Command: command.Plan, - PlanSuccess: planSuccess, - Error: err, - Failure: failure, - RepoRelDir: ctx.RepoRelDir, - Workspace: ctx.Workspace, - ProjectName: ctx.ProjectName, + Command: command.Plan, + PlanSuccess: planSuccess, + Error: err, + Failure: failure, + RepoRelDir: ctx.RepoRelDir, + Workspace: ctx.Workspace, + ProjectName: ctx.ProjectName, + SilencePRComments: ctx.SilencePRComments, } } @@ -253,13 +254,14 @@ func (p *DefaultProjectCommandRunner) PolicyCheck(ctx command.ProjectContext) co func (p *DefaultProjectCommandRunner) Apply(ctx command.ProjectContext) command.ProjectResult { applyOut, failure, err := p.doApply(ctx) return command.ProjectResult{ - Command: command.Apply, - Failure: failure, - Error: err, - ApplySuccess: applyOut, - RepoRelDir: ctx.RepoRelDir, - Workspace: ctx.Workspace, - ProjectName: ctx.ProjectName, + Command: command.Apply, + Failure: failure, + Error: err, + ApplySuccess: applyOut, + RepoRelDir: ctx.RepoRelDir, + Workspace: ctx.Workspace, + ProjectName: ctx.ProjectName, + SilencePRComments: ctx.SilencePRComments, } } @@ -320,7 +322,7 @@ func (p *DefaultProjectCommandRunner) StateRm(ctx command.ProjectContext) comman func (p *DefaultProjectCommandRunner) doApprovePolicies(ctx command.ProjectContext) (*models.PolicyCheckResults, string, error) { // Acquire Atlantis lock for this repo/dir/workspace. - lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocking) + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocksMode == valid.RepoLocksOnPlanMode) if err != nil { return nil, "", errors.Wrap(err, "acquiring lock") } @@ -417,7 +419,7 @@ func (p *DefaultProjectCommandRunner) doPolicyCheck(ctx command.ProjectContext) // we will attempt to capture the lock here but fail to get the working directory // at which point we will unlock again to preserve functionality // If we fail to capture the lock here (super unlikely) then we error out and the user is forced to replan - lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocking) + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocksMode == valid.RepoLocksOnPlanMode) if err != nil { return nil, "", errors.Wrap(err, "acquiring lock") @@ -536,7 +538,7 @@ func (p *DefaultProjectCommandRunner) doPolicyCheck(ctx command.ProjectContext) func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*models.PlanSuccess, string, error) { // Acquire Atlantis lock for this repo/dir/workspace. - lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocking) + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocksMode == valid.RepoLocksOnPlanMode) if err != nil { return nil, "", errors.Wrap(err, "acquiring lock") } @@ -554,7 +556,7 @@ func (p *DefaultProjectCommandRunner) doPlan(ctx command.ProjectContext) (*model p.WorkingDir.SetCheckForUpstreamChanges() // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, mergedAgain, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, mergedAgain, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { if unlockErr := lockAttempt.UnlockFn(); unlockErr != nil { ctx.Log.Err("error unlocking state after plan error: %v", unlockErr) @@ -612,6 +614,16 @@ func (p *DefaultProjectCommandRunner) doApply(ctx command.ProjectContext) (apply return "", failure, err } + // Acquire Atlantis lock for this repo/dir/workspace. + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocksMode == valid.RepoLocksOnApplyMode) + if err != nil { + return "", "", errors.Wrap(err, "acquiring lock") + } + if !lockAttempt.LockAcquired { + return "", lockAttempt.LockFailureReason, nil + } + ctx.Log.Debug("acquired lock for project") + // Acquire internal lock for the directory we're going to operate in. unlockFn, err := p.WorkingDirLocker.TryLock(ctx.Pull.BaseRepo.FullName, ctx.Pull.Num, ctx.Workspace, ctx.RepoRelDir) if err != nil { @@ -667,7 +679,7 @@ func (p *DefaultProjectCommandRunner) doVersion(ctx command.ProjectContext) (ver func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out *models.ImportSuccess, failure string, err error) { // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { return nil, "", cloneErr } @@ -682,7 +694,7 @@ func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out } // Acquire Atlantis lock for this repo/dir/workspace. - lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocking) + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocksMode != valid.RepoLocksDisabledMode) if err != nil { return nil, "", errors.Wrap(err, "acquiring lock") } @@ -713,7 +725,7 @@ func (p *DefaultProjectCommandRunner) doImport(ctx command.ProjectContext) (out func (p *DefaultProjectCommandRunner) doStateRm(ctx command.ProjectContext) (out *models.StateRmSuccess, failure string, err error) { // Clone is idempotent so okay to run even if the repo was already cloned. - repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.HeadRepo, ctx.Pull, ctx.Workspace) + repoDir, _, cloneErr := p.WorkingDir.Clone(ctx.Log, ctx.HeadRepo, ctx.Pull, ctx.Workspace) if cloneErr != nil { return nil, "", cloneErr } @@ -723,7 +735,7 @@ func (p *DefaultProjectCommandRunner) doStateRm(ctx command.ProjectContext) (out } // Acquire Atlantis lock for this repo/dir/workspace. - lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocking) + lockAttempt, err := p.Locker.TryLock(ctx.Log, ctx.Pull, ctx.User, ctx.Workspace, models.NewProject(ctx.Pull.BaseRepo.FullName, ctx.RepoRelDir, ctx.ProjectName), ctx.RepoLocksMode != valid.RepoLocksDisabledMode) if err != nil { return nil, "", errors.Wrap(err, "acquiring lock") } @@ -785,7 +797,7 @@ func (p *DefaultProjectCommandRunner) runSteps(steps []valid.Step, ctx command.P // be printed to the PR, it's solely to set the environment variable. out = "" case "multienv": - out, err = p.MultiEnvStepRunner.Run(ctx, step.RunCommand, absPath, envs) + out, err = p.MultiEnvStepRunner.Run(ctx, step.RunCommand, absPath, envs, step.Output) } if out != "" { diff --git a/server/events/project_command_runner_test.go b/server/events/project_command_runner_test.go index 4446a0054d..d55df4b2f8 100644 --- a/server/events/project_command_runner_test.go +++ b/server/events/project_command_runner_test.go @@ -63,22 +63,10 @@ func TestDefaultProjectCommandRunner_Plan(t *testing.T) { } repoDir := t.TempDir() - When(mockWorkingDir.Clone( - Any[models.Repo](), - Any[models.PullRequest](), - Any[string](), - )).ThenReturn(repoDir, false, nil) - When(mockLocker.TryLock( - Any[logging.SimpleLogging](), - Any[models.PullRequest](), - Any[models.User](), - Any[string](), - Any[models.Project](), - AnyBool(), - )).ThenReturn(&events.TryLockResponse{ - LockAcquired: true, - LockKey: "lock-key", - }, nil) + When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(repoDir, false, nil) + When(mockLocker.TryLock(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.User](), Any[string](), + Any[models.Project](), AnyBool())).ThenReturn(&events.TryLockResponse{LockAcquired: true, LockKey: "lock-key"}, nil) expEnvs := map[string]string{ "name": "value", @@ -317,7 +305,7 @@ func TestDefaultProjectCommandRunner_ApplyDiverged(t *testing.T) { } tmp := t.TempDir() When(mockWorkingDir.GetWorkingDir(ctx.BaseRepo, ctx.Pull, ctx.Workspace)).ThenReturn(tmp, nil) - When(mockWorkingDir.HasDiverged(tmp)).ThenReturn(true) + When(mockWorkingDir.HasDiverged(ctx.Log, tmp)).ThenReturn(true) res := runner.Apply(ctx) Equals(t, "Default branch must be rebased onto pull request before running apply.", res.Failure) @@ -436,6 +424,17 @@ func TestDefaultProjectCommandRunner_Apply(t *testing.T) { Any[models.PullRequest](), Any[string](), )).ThenReturn(repoDir, nil) + When(mockLocker.TryLock( + Any[logging.SimpleLogging](), + Any[models.PullRequest](), + Any[models.User](), + Any[string](), + Any[models.Project](), + AnyBool(), + )).ThenReturn(&events.TryLockResponse{ + LockAcquired: true, + LockKey: "lock-key", + }, nil) ctx := command.ProjectContext{ Log: logging.NewNoopLogger(t), @@ -507,6 +506,17 @@ func TestDefaultProjectCommandRunner_ApplyRunStepFailure(t *testing.T) { Any[models.PullRequest](), Any[string](), )).ThenReturn(repoDir, nil) + When(mockLocker.TryLock( + Any[logging.SimpleLogging](), + Any[models.PullRequest](), + Any[models.User](), + Any[string](), + Any[models.Project](), + AnyBool(), + )).ThenReturn(&events.TryLockResponse{ + LockAcquired: true, + LockKey: "lock-key", + }, nil) ctx := command.ProjectContext{ Log: logging.NewNoopLogger(t), @@ -560,22 +570,10 @@ func TestDefaultProjectCommandRunner_RunEnvSteps(t *testing.T) { } repoDir := t.TempDir() - When(mockWorkingDir.Clone( - Any[models.Repo](), - Any[models.PullRequest](), - Any[string](), - )).ThenReturn(repoDir, false, nil) - When(mockLocker.TryLock( - Any[logging.SimpleLogging](), - Any[models.PullRequest](), - Any[models.User](), - Any[string](), - Any[models.Project](), - AnyBool(), - )).ThenReturn(&events.TryLockResponse{ - LockAcquired: true, - LockKey: "lock-key", - }, nil) + When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(repoDir, false, nil) + When(mockLocker.TryLock(Any[logging.SimpleLogging](), Any[models.PullRequest](), Any[models.User](), Any[string](), + Any[models.Project](), AnyBool())).ThenReturn(&events.TryLockResponse{LockAcquired: true, LockKey: "lock-key"}, nil) ctx := command.ProjectContext{ Log: logging.NewNoopLogger(t), @@ -714,11 +712,8 @@ func TestDefaultProjectCommandRunner_Import(t *testing.T) { RePlanCmd: "atlantis plan -d . -- addr id", } repoDir := t.TempDir() - When(mockWorkingDir.Clone( - Any[models.Repo](), - Any[models.PullRequest](), - Any[string](), - )).ThenReturn(repoDir, false, nil) + When(mockWorkingDir.Clone(Any[logging.SimpleLogging](), Any[models.Repo](), Any[models.PullRequest](), + Any[string]())).ThenReturn(repoDir, false, nil) if c.setup != nil { c.setup(repoDir, ctx, mockLocker, mockInit, mockImport) } diff --git a/server/events/project_locker_test.go b/server/events/project_locker_test.go index 62be1c40f9..268faf20ee 100644 --- a/server/events/project_locker_test.go +++ b/server/events/project_locker_test.go @@ -29,7 +29,7 @@ import ( func TestDefaultProjectLocker_TryLockWhenLocked(t *testing.T) { var githubClient *vcs.GithubClient - mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil) + mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil, nil) mockLocker := mocks.NewMockLocker() locker := events.DefaultProjectLocker{ Locker: mockLocker, @@ -65,7 +65,7 @@ func TestDefaultProjectLocker_TryLockWhenLocked(t *testing.T) { func TestDefaultProjectLocker_TryLockWhenLockedSamePull(t *testing.T) { RegisterMockTestingT(t) var githubClient *vcs.GithubClient - mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil) + mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil, nil) mockLocker := mocks.NewMockLocker() locker := events.DefaultProjectLocker{ Locker: mockLocker, @@ -104,7 +104,7 @@ func TestDefaultProjectLocker_TryLockWhenLockedSamePull(t *testing.T) { func TestDefaultProjectLocker_TryLockUnlocked(t *testing.T) { RegisterMockTestingT(t) var githubClient *vcs.GithubClient - mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil) + mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil, nil) mockLocker := mocks.NewMockLocker() locker := events.DefaultProjectLocker{ Locker: mockLocker, @@ -142,7 +142,7 @@ func TestDefaultProjectLocker_TryLockUnlocked(t *testing.T) { func TestDefaultProjectLocker_RepoLocking(t *testing.T) { var githubClient *vcs.GithubClient - mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil) + mockClient := vcs.NewClientProxy(githubClient, nil, nil, nil, nil, nil) expProject := models.Project{} expWorkspace := "default" expPull := models.PullRequest{Num: 2} diff --git a/server/events/pull_closed_executor.go b/server/events/pull_closed_executor.go index 64b929633b..5c005dbc9a 100644 --- a/server/events/pull_closed_executor.go +++ b/server/events/pull_closed_executor.go @@ -51,7 +51,6 @@ type PullClosedExecutor struct { Locker locking.Locker VCSClient vcs.Client WorkingDir WorkingDir - Logger logging.SimpleLogging Backend locking.Backend PullClosedTemplate PullCleanupTemplate LogStreamResourceCleaner ResourceCleaner @@ -82,7 +81,7 @@ func (p *PullClosedExecutor) CleanUpPull(logger logging.SimpleLogging, repo mode pullStatus, err := p.Backend.GetPullStatus(pull) if err != nil { // Log and continue to clean up other resources. - p.Logger.Err("retrieving pull status: %s", err) + logger.Err("retrieving pull status: %s", err) } if pullStatus != nil { @@ -97,7 +96,7 @@ func (p *PullClosedExecutor) CleanUpPull(logger logging.SimpleLogging, repo mode } } - if err := p.WorkingDir.Delete(repo, pull); err != nil { + if err := p.WorkingDir.Delete(logger, repo, pull); err != nil { return errors.Wrap(err, "cleaning workspace") } @@ -111,7 +110,7 @@ func (p *PullClosedExecutor) CleanUpPull(logger logging.SimpleLogging, repo mode // Delete pull from DB. if err := p.Backend.DeletePullStatus(pull); err != nil { - p.Logger.Err("deleting pull from db: %s", err) + logger.Err("deleting pull from db: %s", err) } // If there are no locks then there's no need to comment. diff --git a/server/events/pull_closed_executor_test.go b/server/events/pull_closed_executor_test.go index 1236060d39..df904a1c6f 100644 --- a/server/events/pull_closed_executor_test.go +++ b/server/events/pull_closed_executor_test.go @@ -50,7 +50,7 @@ func TestCleanUpPullWorkspaceErr(t *testing.T) { Backend: db, } err = errors.New("err") - When(w.Delete(testdata.GithubRepo, testdata.Pull)).ThenReturn(err) + When(w.Delete(logger, testdata.GithubRepo, testdata.Pull)).ThenReturn(err) actualErr := pce.CleanUpPull(logger, testdata.GithubRepo, testdata.Pull) Equals(t, "cleaning workspace: err", actualErr.Error()) } @@ -271,7 +271,6 @@ func TestCleanUpLogStreaming(t *testing.T) { VCSClient: client, PullClosedTemplate: &events.PullClosedEventTemplate{}, LogStreamResourceCleaner: prjCmdOutHandler, - Logger: logger, } locks := []models.ProjectLock{ diff --git a/server/events/pull_updater.go b/server/events/pull_updater.go index d640e5a374..d85bd84f9d 100644 --- a/server/events/pull_updater.go +++ b/server/events/pull_updater.go @@ -3,6 +3,7 @@ package events import ( "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/vcs" + "github.com/runatlantis/atlantis/server/utils" ) type PullUpdater struct { @@ -23,13 +24,30 @@ func (c *PullUpdater) updatePull(ctx *command.Context, cmd PullCommand, res comm // clutter in a pull/merge request. This will not delete the comment, since the // comment trail may be useful in auditing or backtracing problems. if c.HidePrevPlanComments { - ctx.Log.Debug("Hiding previous plan comments for command: '%v', directory: '%v'", cmd.CommandName().TitleString(), cmd.Dir()) + ctx.Log.Debug("hiding previous plan comments for command: '%v', directory: '%v'", cmd.CommandName().TitleString(), cmd.Dir()) if err := c.VCSClient.HidePrevCommandComments(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull.Num, cmd.CommandName().TitleString(), cmd.Dir()); err != nil { ctx.Log.Err("unable to hide old comments: %s", err) } } - comment := c.MarkdownRenderer.Render(res, cmd.CommandName(), cmd.SubCommandName(), ctx.Log.GetHistory(), cmd.IsVerbose(), ctx.Pull.BaseRepo.VCSHost.Type) + if len(res.ProjectResults) > 0 { + var commentOnProjects []command.ProjectResult + for _, result := range res.ProjectResults { + if utils.SlicesContains(result.SilencePRComments, cmd.CommandName().String()) { + ctx.Log.Debug("silenced command '%s' comment for project '%s'", cmd.CommandName().String(), result.ProjectName) + continue + } + commentOnProjects = append(commentOnProjects, result) + } + + if len(commentOnProjects) == 0 { + return + } + + res.ProjectResults = commentOnProjects + } + + comment := c.MarkdownRenderer.Render(ctx, res, cmd) if err := c.VCSClient.CreateComment(ctx.Log, ctx.Pull.BaseRepo, ctx.Pull.Num, comment, cmd.CommandName().String()); err != nil { ctx.Log.Err("unable to comment: %s", err) } diff --git a/server/events/repo_branch_test.go b/server/events/repo_branch_test.go index 4a6bb9e922..cc4521a20e 100644 --- a/server/events/repo_branch_test.go +++ b/server/events/repo_branch_test.go @@ -80,7 +80,7 @@ projects: repo, err := parser.ParseRepoCfg(tmp, global, "github.com/foo/bar", "main") require.NoError(t, err) - require.Equal(t, 1, len(repo.Projects)) + require.Len(t, repo.Projects, 1) t.Logf("Projects: %+v", repo.Projects) } diff --git a/server/events/templates/import_success_unwrapped.tmpl b/server/events/templates/import_success_unwrapped.tmpl index c8a8a1b19d..08b6336d4d 100644 --- a/server/events/templates/import_success_unwrapped.tmpl +++ b/server/events/templates/import_success_unwrapped.tmpl @@ -6,5 +6,7 @@ :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `{{.RePlanCmd}}` + ```shell + {{.RePlanCmd}} + ``` {{ end -}} diff --git a/server/events/templates/import_success_wrapped.tmpl b/server/events/templates/import_success_wrapped.tmpl index 12711c1d4d..00d9689a38 100644 --- a/server/events/templates/import_success_wrapped.tmpl +++ b/server/events/templates/import_success_wrapped.tmpl @@ -8,5 +8,7 @@ :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `{{ .RePlanCmd }}` + ```shell + {{ .RePlanCmd }} + ``` {{ end -}} diff --git a/server/events/templates/log.tmpl b/server/events/templates/log.tmpl index cb409801c7..305436eebb 100644 --- a/server/events/templates/log.tmpl +++ b/server/events/templates/log.tmpl @@ -1,7 +1,7 @@ {{ define "log" -}} -{{ if .Verbose }} +{{ if .Verbose -}}
Log -

+

``` {{.Log}}``` diff --git a/server/events/templates/merged_again.tmpl b/server/events/templates/merged_again.tmpl index 796afe552a..ece363f19e 100644 --- a/server/events/templates/merged_again.tmpl +++ b/server/events/templates/merged_again.tmpl @@ -1,5 +1,5 @@ {{ define "mergedAgain" -}} -{{ if .MergedAgain }} +{{ if .MergedAgain -}} :twisted_rightwards_arrows: Upstream was modified, a new merge was performed. {{ end -}} {{ end -}} diff --git a/server/events/templates/multi_project_apply.tmpl b/server/events/templates/multi_project_apply.tmpl index 50038555b3..2e2b2baa30 100644 --- a/server/events/templates/multi_project_apply.tmpl +++ b/server/events/templates/multi_project_apply.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectApply" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ range $i, $result := .Results -}} ### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` {{ $result.Rendered }} diff --git a/server/events/templates/multi_project_header.tmpl b/server/events/templates/multi_project_header.tmpl index c202c7e50c..c1ce5dc053 100644 --- a/server/events/templates/multi_project_header.tmpl +++ b/server/events/templates/multi_project_header.tmpl @@ -3,5 +3,9 @@ Ran {{.Command}} for {{ len .Results }} projects: {{ range $result := .Results -}} 1. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` +{{ end -}} +{{ if (gt (len .Results) 0) -}} +--- + {{ end -}} {{ end -}} diff --git a/server/events/templates/multi_project_import.tmpl b/server/events/templates/multi_project_import.tmpl index 22e4b4388d..31cd70cbd4 100644 --- a/server/events/templates/multi_project_import.tmpl +++ b/server/events/templates/multi_project_import.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectImport" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ range $i, $result := .Results -}} ### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` {{ $result.Rendered }} diff --git a/server/events/templates/multi_project_plan.tmpl b/server/events/templates/multi_project_plan.tmpl index 9c3898ad48..f57e96794a 100644 --- a/server/events/templates/multi_project_plan.tmpl +++ b/server/events/templates/multi_project_plan.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectPlan" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ $disableApplyAll := .DisableApplyAll -}} {{ $hideUnchangedPlans := .HideUnchangedPlanComments -}} {{ range $i, $result := .Results -}} diff --git a/server/events/templates/multi_project_plan_footer.tmpl b/server/events/templates/multi_project_plan_footer.tmpl index 41683ab018..1c193a16b7 100644 --- a/server/events/templates/multi_project_plan_footer.tmpl +++ b/server/events/templates/multi_project_plan_footer.tmpl @@ -4,10 +4,14 @@ {{ len .Results }} projects, {{ .NumPlansWithChanges }} with changes, {{ .NumPlansWithNoChanges }} with no changes, {{ .NumPlanFailures }} failed {{ if and (not .PlansDeleted) (ne .DisableApplyAll true) }} -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `{{ .ExecutableName }} apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `{{ .ExecutableName }} unlock` +* :fast_forward: To **apply** all unapplied plans from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} unlock + ``` {{ end -}} {{ end -}} {{ end -}} diff --git a/server/events/templates/multi_project_policy.tmpl b/server/events/templates/multi_project_policy.tmpl index c34c59f896..add574fde4 100644 --- a/server/events/templates/multi_project_policy.tmpl +++ b/server/events/templates/multi_project_policy.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectPolicy" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ $disableApplyAll := .DisableApplyAll -}} {{ $hideUnchangedPlans := .HideUnchangedPlanComments -}} {{ range $i, $result := .Results -}} @@ -13,10 +13,14 @@ {{ end -}} {{ if ne .DisableApplyAll true -}} {{ if and (gt (len .Results) 0) (not .PlansDeleted) -}} -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `{{ .ExecutableName }} apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `{{ .ExecutableName }} unlock` +* :fast_forward: To **apply** all unapplied plans from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} unlock + ``` {{ end -}} {{ end -}} {{ template "log" . -}} diff --git a/server/events/templates/multi_project_policy_unsuccessful.tmpl b/server/events/templates/multi_project_policy_unsuccessful.tmpl index a0a59fd994..039dd9ce7c 100644 --- a/server/events/templates/multi_project_policy_unsuccessful.tmpl +++ b/server/events/templates/multi_project_policy_unsuccessful.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectPolicyUnsuccessful" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ $disableApplyAll := .DisableApplyAll -}} {{ range $i, $result := .Results -}} ### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` @@ -11,12 +11,18 @@ {{ end -}} {{ if ne .DisableApplyAll true -}} {{ if and (gt (len .Results) 0) (not .PlansDeleted) -}} -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `{{ .ExecutableName }} approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `{{ .ExecutableName }} unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `{{ .ExecutableName }} plan` + ```shell + {{ .ExecutableName }} plan + ``` {{ end -}} {{ end -}} {{- template "log" . -}} diff --git a/server/events/templates/multi_project_state_rm.tmpl b/server/events/templates/multi_project_state_rm.tmpl index 90c0259dfe..a00464a7b8 100644 --- a/server/events/templates/multi_project_state_rm.tmpl +++ b/server/events/templates/multi_project_state_rm.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectStateRm" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ range $i, $result := .Results -}} ### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` {{ $result.Rendered}} diff --git a/server/events/templates/multi_project_version.tmpl b/server/events/templates/multi_project_version.tmpl index 08266520e5..70eeea40f9 100644 --- a/server/events/templates/multi_project_version.tmpl +++ b/server/events/templates/multi_project_version.tmpl @@ -1,5 +1,5 @@ {{ define "multiProjectVersion" -}} -{{ template "multiProjectHeader" . }} +{{ template "multiProjectHeader" . -}} {{ range $i, $result := .Results -}} ### {{ add $i 1 }}. {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` {{ $result.Rendered}} diff --git a/server/events/templates/plan_success_unwrapped.tmpl b/server/events/templates/plan_success_unwrapped.tmpl index 6bd81de233..e4ed2e0911 100644 --- a/server/events/templates/plan_success_unwrapped.tmpl +++ b/server/events/templates/plan_success_unwrapped.tmpl @@ -8,13 +8,17 @@ This plan was not saved because one or more projects failed and automerge requir {{ else -}} {{ if not .DisableApply -}} * :arrow_forward: To **apply** this plan, comment: - * `{{ .ApplyCmd }}` + ```shell + {{ .ApplyCmd }} + ``` {{ end -}} {{ if not .DisableRepoLocking -}} -* :put_litter_in_its_place: To **delete** this plan click [here]({{ .LockURL }}) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here]({{ .LockURL }}) {{ end -}} * :repeat: To **plan** this project again, comment: - * `{{ .RePlanCmd }}` + ```shell + {{ .RePlanCmd }} + ``` {{ end -}} -{{ template "mergedAgain" . }} +{{ template "mergedAgain" . -}} {{ end -}} diff --git a/server/events/templates/plan_success_wrapped.tmpl b/server/events/templates/plan_success_wrapped.tmpl index cef96d0609..55c0d3042a 100644 --- a/server/events/templates/plan_success_wrapped.tmpl +++ b/server/events/templates/plan_success_wrapped.tmpl @@ -4,21 +4,25 @@ ```diff {{ if .EnableDiffMarkdownFormat }}{{ .DiffMarkdownFormattedTerraformOutput }}{{ else }}{{ .TerraformOutput }}{{ end }} ``` +

{{ if .PlanWasDeleted -}} This plan was not saved because one or more projects failed and automerge requires all plans pass. {{ else -}} {{ if not .DisableApply -}} * :arrow_forward: To **apply** this plan, comment: - * `{{ .ApplyCmd }}` + ```shell + {{ .ApplyCmd }} + ``` {{ end -}} {{ if not .DisableRepoLocking -}} -* :put_litter_in_its_place: To **delete** this plan click [here]({{ .LockURL }}) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here]({{ .LockURL }}) {{ end -}} * :repeat: To **plan** this project again, comment: - * `{{ .RePlanCmd }}` + ```shell + {{ .RePlanCmd }} + ``` {{ end -}} - {{ .PlanSummary -}} {{ template "mergedAgain" . -}} {{ end -}} diff --git a/server/events/templates/policy_check_results_unwrapped.tmpl b/server/events/templates/policy_check_results_unwrapped.tmpl index 089e85660f..16d7b9e865 100644 --- a/server/events/templates/policy_check_results_unwrapped.tmpl +++ b/server/events/templates/policy_check_results_unwrapped.tmpl @@ -14,16 +14,22 @@ {{- end }} {{- if .PolicyCleared }} * :arrow_forward: To **apply** this plan, comment: - * `{{ .ApplyCmd }}` + ```shell + {{ .ApplyCmd }} + ``` {{- else }} #### Policy Approval Status: ``` {{ .PolicyApprovalSummary }} ``` * :heavy_check_mark: To **approve** this project, comment: - * `{{ .ApprovePoliciesCmd }}` + ```shell + {{ .ApprovePoliciesCmd }} + ``` {{- end }} -* :put_litter_in_its_place: To **delete** this plan click [here]({{ .LockURL }}) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here]({{ .LockURL }}) * :repeat: To re-run policies **plan** this project again by commenting: - * `{{ .RePlanCmd }}` + ```shell + {{ .RePlanCmd }} + ``` {{ end -}} diff --git a/server/events/templates/policy_check_results_wrapped.tmpl b/server/events/templates/policy_check_results_wrapped.tmpl index bf03a6b1f1..330980c2f4 100644 --- a/server/events/templates/policy_check_results_wrapped.tmpl +++ b/server/events/templates/policy_check_results_wrapped.tmpl @@ -15,23 +15,32 @@ {{- end }} {{- if .PolicyCleared }} * :arrow_forward: To **apply** this plan, comment: - * `{{ .ApplyCmd }}` + ```shell + {{ .ApplyCmd }} + ``` {{- else }} + #### Policy Approval Status: ``` {{ .PolicyApprovalSummary }} ``` * :heavy_check_mark: To **approve** this project, comment: - * `{{ .ApprovePoliciesCmd }}` + ```shell + {{ .ApprovePoliciesCmd }} + ``` {{- end }} -* :put_litter_in_its_place: To **delete** this plan click [here]({{ .LockURL }}) +* :put_litter_in_its_place: To **delete** this plan and lock, click [here]({{ .LockURL }}) * :repeat: To re-run policies **plan** this project again by commenting: - * `{{ .RePlanCmd }}` - + ```shell + {{ .RePlanCmd }} + ``` {{- if eq .Command "Policy Check" }} +{{- if ne .PolicyCheckSummary "" }} ``` {{ .PolicyCheckSummary }} ``` {{- end }} -{{ end -}} \ No newline at end of file + +{{- end }} +{{ end -}} diff --git a/server/events/templates/single_project_plan_success.tmpl b/server/events/templates/single_project_plan_success.tmpl index afbe3d5701..77f6e13d64 100644 --- a/server/events/templates/single_project_plan_success.tmpl +++ b/server/events/templates/single_project_plan_success.tmpl @@ -5,10 +5,14 @@ Ran {{ .Command }} for {{ if $result.ProjectName }}project: `{{ $result.ProjectN {{ $result.Rendered }} {{ if ne .DisableApplyAll true }} --- -* :fast_forward: To **apply** all unapplied plans from this pull request, comment: - * `{{ .ExecutableName }} apply` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `{{ .ExecutableName }} unlock` +* :fast_forward: To **apply** all unapplied plans from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} apply + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} unlock + ``` {{ end -}} {{ template "log" . -}} {{ end -}} diff --git a/server/events/templates/single_project_policy_unsuccessful.tmpl b/server/events/templates/single_project_policy_unsuccessful.tmpl index 0760406814..0bf0ac1a0c 100644 --- a/server/events/templates/single_project_policy_unsuccessful.tmpl +++ b/server/events/templates/single_project_policy_unsuccessful.tmpl @@ -3,14 +3,20 @@ Ran {{ .Command }} for {{ if $result.ProjectName }}project: `{{ $result.ProjectName }}` {{ end }}dir: `{{ $result.RepoRelDir }}` workspace: `{{ $result.Workspace }}` {{ $result.Rendered }} -{{ if ne .DisableApplyAll true }} +{{ if ne .DisableApplyAll true -}} --- -* :heavy_check_mark: To **approve** all unapplied plans from this pull request, comment: - * `{{ .ExecutableName }} approve_policies` -* :put_litter_in_its_place: To **delete** all plans and locks for the PR, comment: - * `{{ .ExecutableName }} unlock` +* :heavy_check_mark: To **approve** all unapplied plans from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} approve_policies + ``` +* :put_litter_in_its_place: To **delete** all plans and locks from this {{ .VcsRequestType }}, comment: + ```shell + {{ .ExecutableName }} unlock + ``` * :repeat: To re-run policies **plan** this project again by commenting: - * `{{ .ExecutableName }} plan` + ```shell + {{ .ExecutableName }} plan + ``` {{ end -}} {{- template "log" . -}} {{ end -}} diff --git a/server/events/templates/state_rm_success_unwrapped.tmpl b/server/events/templates/state_rm_success_unwrapped.tmpl index c0f24323a5..564d8796ae 100644 --- a/server/events/templates/state_rm_success_unwrapped.tmpl +++ b/server/events/templates/state_rm_success_unwrapped.tmpl @@ -6,5 +6,7 @@ :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `{{.RePlanCmd}}` + ```shell + {{.RePlanCmd}} + ``` {{ end }} diff --git a/server/events/templates/state_rm_success_wrapped.tmpl b/server/events/templates/state_rm_success_wrapped.tmpl index f182c85bc1..2a703107c6 100644 --- a/server/events/templates/state_rm_success_wrapped.tmpl +++ b/server/events/templates/state_rm_success_wrapped.tmpl @@ -8,5 +8,7 @@ :put_litter_in_its_place: A plan file was discarded. Re-plan would be required before applying. * :repeat: To **plan** this project again, comment: - * `{{.RePlanCmd}}` + ```shell + {{.RePlanCmd}} + ``` {{ end }} diff --git a/server/events/unlock_command_runner.go b/server/events/unlock_command_runner.go index 470fe26118..af360adf83 100644 --- a/server/events/unlock_command_runner.go +++ b/server/events/unlock_command_runner.go @@ -56,7 +56,7 @@ func (u *UnlockCommandRunner) Run(ctx *command.Context, _ *CommentCommand) { var numLocks int if err == nil && !hasLabel { - numLocks, err = u.deleteLockCommand.DeleteLocksByPull(baseRepo.FullName, pullNum) + numLocks, err = u.deleteLockCommand.DeleteLocksByPull(ctx.Log, baseRepo.FullName, pullNum) if err != nil { vcsMessage = "Failed to delete PR locks" ctx.Log.Err("failed to delete locks by pull %s", err.Error()) diff --git a/server/events/vcs/azuredevops_client.go b/server/events/vcs/azuredevops_client.go index 03bc1963c0..cd2ebe52fe 100644 --- a/server/events/vcs/azuredevops_client.go +++ b/server/events/vcs/azuredevops_client.go @@ -316,7 +316,7 @@ func (g *AzureDevopsClient) MergePull(logger logging.SimpleLogging, pull models. return fmt.Errorf("the user %s is not found in the organization %s", g.UserName, owner) } - imageURL := "https://github.com/runatlantis/atlantis/raw/main/runatlantis.io/.vuepress/public/hero.png" + imageURL := "https://raw.githubusercontent.com/runatlantis/atlantis/main/runatlantis.io/public/hero.png" id := azuredevops.IdentityRef{ Descriptor: &descriptor, ID: userID, diff --git a/server/events/vcs/bitbucketcloud/client_test.go b/server/events/vcs/bitbucketcloud/client_test.go index e7def22b66..0a4bd48db6 100644 --- a/server/events/vcs/bitbucketcloud/client_test.go +++ b/server/events/vcs/bitbucketcloud/client_test.go @@ -14,6 +14,8 @@ import ( . "github.com/runatlantis/atlantis/testing" ) +const diffstatURL = "/2.0/repositories/owner/repo/pullrequests/1/diffstat" + // Should follow pagination properly. func TestClient_GetModifiedFilesPagination(t *testing.T) { logger := logging.NewNoopLogger(t) @@ -56,12 +58,12 @@ func TestClient_GetModifiedFilesPagination(t *testing.T) { testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.RequestURI { // The first request should hit this URL. - case "/2.0/repositories/owner/repo/pullrequests/1/diffstat": - resp := firstResp + fmt.Sprintf(`,"next": "%s/2.0/repositories/owner/repo/pullrequests/1/diffstat?page=2"}`, serverURL) + case diffstatURL: + resp := firstResp + fmt.Sprintf(`,"next": "%s%s?page=2"}`, serverURL, diffstatURL) w.Write([]byte(resp)) // nolint: errcheck return // The second should hit this URL. - case "/2.0/repositories/owner/repo/pullrequests/1/diffstat?page=2": + case fmt.Sprintf("%s?page=2", diffstatURL): w.Write([]byte(secondResp + "}")) // nolint: errcheck default: t.Errorf("got unexpected request at %q", r.RequestURI) @@ -125,7 +127,7 @@ func TestClient_GetModifiedFilesOldNil(t *testing.T) { testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.RequestURI { // The first request should hit this URL. - case "/2.0/repositories/owner/repo/pullrequests/1/diffstat": + case diffstatURL: w.Write([]byte(resp)) // nolint: errcheck return default: @@ -322,7 +324,7 @@ func TestClient_PullIsMergeable(t *testing.T) { t.Run(name, func(t *testing.T) { testServer := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { switch r.RequestURI { - case "/2.0/repositories/owner/repo/pullrequests/1/diffstat": + case diffstatURL: w.Write([]byte(c.DiffStat)) // nolint: errcheck return default: diff --git a/server/events/vcs/gitea/client.go b/server/events/vcs/gitea/client.go new file mode 100644 index 0000000000..f9deb2cb74 --- /dev/null +++ b/server/events/vcs/gitea/client.go @@ -0,0 +1,517 @@ +// Copyright 2024 Martijn van der Kleijn & Florian Beisel +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import ( + "context" + "encoding/base64" + "fmt" + "strings" + "time" + + "code.gitea.io/sdk/gitea" + "github.com/pkg/errors" + "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/logging" +) + +// Emergency break for Gitea pagination (just in case) +// Set to 500 to prevent runaway situations +// Value chosen purposely high, though randomly. +const giteaPaginationEBreak = 500 + +type GiteaClient struct { + giteaClient *gitea.Client + username string + token string + pageSize int + ctx context.Context +} + +type GiteaPRReviewSummary struct { + Reviews []GiteaReview +} + +type GiteaReview struct { + ID int64 + Body string + Reviewer string + State gitea.ReviewStateType // e.g., "APPROVED", "PENDING", "REQUEST_CHANGES" + SubmittedAt time.Time +} + +type GiteaPullGetter interface { + GetPullRequest(repo models.Repo, pullNum int) (*gitea.PullRequest, error) +} + +// NewClient builds a client that makes API calls to Gitea. httpClient is the +// client to use to make the requests, username and password are used as basic +// auth in the requests, baseURL is the API's baseURL, ex. https://corp.com:7990. +// Don't include the API version, ex. '/1.0'. +func NewClient(baseURL string, username string, token string, pagesize int, logger logging.SimpleLogging) (*GiteaClient, error) { + logger.Debug("Creating new Gitea client for: %s", baseURL) + + giteaClient, err := gitea.NewClient(baseURL, + gitea.SetToken(token), + gitea.SetUserAgent("atlantis"), + ) + + if err != nil { + return nil, errors.Wrap(err, "creating gitea client") + } + + return &GiteaClient{ + giteaClient: giteaClient, + username: username, + token: token, + pageSize: pagesize, + ctx: context.Background(), + }, nil +} + +func (c *GiteaClient) GetPullRequest(logger logging.SimpleLogging, repo models.Repo, pullNum int) (*gitea.PullRequest, error) { + logger.Debug("Getting Gitea pull request %d", pullNum) + + pr, resp, err := c.giteaClient.GetPullRequest(repo.Owner, repo.Name, int64(pullNum)) + + if err != nil { + logger.Debug("GET /repos/%v/%v/pulls/%d returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode) + return nil, err + } + + return pr, nil +} + +// GetModifiedFiles returns the names of files that were modified in the merge request +// relative to the repo root, e.g. parent/child/file.txt. +func (c *GiteaClient) GetModifiedFiles(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { + logger.Debug("Getting modified files for Gitea pull request %d", pull.Num) + + changedFiles := make([]string, 0) + page := 0 + nextPage := 1 + listOptions := gitea.ListPullRequestFilesOptions{ + ListOptions: gitea.ListOptions{ + Page: 1, + PageSize: c.pageSize, + }, + } + + for page < nextPage { + page = +1 + listOptions.ListOptions.Page = page + files, resp, err := c.giteaClient.ListPullRequestFiles(repo.Owner, repo.Name, int64(pull.Num), listOptions) + if err != nil { + logger.Debug("[page %d] GET /repos/%v/%v/pulls/%d/files returned: %v", page, repo.Owner, repo.Name, pull.Num, resp.StatusCode) + return nil, err + } + + for _, file := range files { + changedFiles = append(changedFiles, file.Filename) + } + + nextPage = resp.NextPage + + // Emergency break after giteaPaginationEBreak pages + if page >= giteaPaginationEBreak { + break + } + } + + return changedFiles, nil +} + +// CreateComment creates a comment on the merge request. As far as we're aware, Gitea has no built in max comment length right now. +func (c *GiteaClient) CreateComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, comment string, command string) error { + logger.Debug("Creating comment on Gitea pull request %d", pullNum) + + opt := gitea.CreateIssueCommentOption{ + Body: comment, + } + + _, resp, err := c.giteaClient.CreateIssueComment(repo.Owner, repo.Name, int64(pullNum), opt) + + if err != nil { + logger.Debug("POST /repos/%v/%v/issues/%d/comments returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode) + return err + } + + logger.Debug("Added comment to Gitea pull request %d: %s", pullNum, comment) + + return nil +} + +// ReactToComment adds a reaction to a comment. +func (c *GiteaClient) ReactToComment(logger logging.SimpleLogging, repo models.Repo, pullNum int, commentID int64, reaction string) error { + logger.Debug("Adding reaction to Gitea pull request comment %d", commentID) + + _, resp, err := c.giteaClient.PostIssueCommentReaction(repo.Owner, repo.Name, commentID, reaction) + + if err != nil { + logger.Debug("POST /repos/%v/%v/issues/comments/%d/reactions returned: %v", repo.Owner, repo.Name, commentID, resp.StatusCode) + return err + } + + return nil +} + +// HidePrevCommandComments hides the previous command comments from the pull +// request. +func (c *GiteaClient) HidePrevCommandComments(logger logging.SimpleLogging, repo models.Repo, pullNum int, command string, dir string) error { + logger.Debug("Hiding previous command comments on Gitea pull request %d", pullNum) + + var allComments []*gitea.Comment + + nextPage := int(1) + for { + // Initialize ListIssueCommentOptions with the current page + opts := gitea.ListIssueCommentOptions{ + ListOptions: gitea.ListOptions{ + Page: nextPage, + PageSize: c.pageSize, + }, + } + + comments, resp, err := c.giteaClient.ListIssueComments(repo.Owner, repo.Name, int64(pullNum), opts) + if err != nil { + logger.Debug("GET /repos/%v/%v/issues/%d/comments returned: %v", repo.Owner, repo.Name, pullNum, resp.StatusCode) + return err + } + + allComments = append(allComments, comments...) + + // Break the loop if there are no more pages to fetch + if resp.NextPage == 0 { + break + } + nextPage = resp.NextPage + } + + currentUser, resp, err := c.giteaClient.GetMyUserInfo() + if err != nil { + logger.Debug("GET /user returned: %v", resp.StatusCode) + return err + } + + summaryHeader := fmt.Sprintf("
Superseded Atlantis %s", command) + summaryFooter := "
" + lineFeed := "\n" + + for _, comment := range allComments { + if comment.Poster == nil || comment.Poster.UserName != currentUser.UserName { + continue + } + + body := strings.Split(comment.Body, "\n") + if len(body) == 0 || (!strings.Contains(strings.ToLower(body[0]), strings.ToLower(command)) && dir != "" && !strings.Contains(strings.ToLower(body[0]), strings.ToLower(dir))) { + continue + } + + supersededComment := summaryHeader + lineFeed + comment.Body + lineFeed + summaryFooter + lineFeed + + logger.Debug("Hiding comment %s", comment.ID) + _, _, err := c.giteaClient.EditIssueComment(repo.Owner, repo.Name, comment.ID, gitea.EditIssueCommentOption{ + Body: supersededComment, + }) + if err != nil { + return err + } + } + + return nil +} + +// PullIsApproved returns ApprovalStatus with IsApproved set to true if the pull request has a review that approved the PR. +func (c *GiteaClient) PullIsApproved(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) (models.ApprovalStatus, error) { + logger.Debug("Checking if Gitea pull request %d is approved", pull.Num) + + page := 0 + nextPage := 1 + + approvalStatus := models.ApprovalStatus{ + IsApproved: false, + } + + listOptions := gitea.ListPullReviewsOptions{ + ListOptions: gitea.ListOptions{ + Page: 1, + PageSize: c.pageSize, + }, + } + + for page < nextPage { + page = +1 + listOptions.ListOptions.Page = page + pullReviews, resp, err := c.giteaClient.ListPullReviews(repo.Owner, repo.Name, int64(pull.Num), listOptions) + + if err != nil { + logger.Debug("GET /repos/%v/%v/pulls/%d/reviews returned: %v", repo.Owner, repo.Name, pull.Num, resp.StatusCode) + return approvalStatus, err + } + + for _, review := range pullReviews { + if review.State == gitea.ReviewStateApproved { + approvalStatus.IsApproved = true + approvalStatus.ApprovedBy = review.Reviewer.UserName + approvalStatus.Date = review.Submitted + + return approvalStatus, nil + } + } + + nextPage = resp.NextPage + + // Emergency break after giteaPaginationEBreak pages + if page >= giteaPaginationEBreak { + break + } + } + + return approvalStatus, nil +} + +// PullIsMergeable returns true if the pull request is mergeable +func (c *GiteaClient) PullIsMergeable(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, vcsstatusname string) (bool, error) { + logger.Debug("Checking if Gitea pull request %d is mergeable", pull.Num) + + pullRequest, _, err := c.giteaClient.GetPullRequest(repo.Owner, repo.Name, int64(pull.Num)) + + if err != nil { + return false, err + } + + logger.Debug("Gitea pull request is mergeable: %v (%v)", pullRequest.Mergeable, pull.Num) + + return pullRequest.Mergeable, nil +} + +// UpdateStatus updates the commit status to state for pull. src is the +// source of this status. This should be relatively static across runs, +// ex. atlantis/plan or atlantis/apply. +// description is a description of this particular status update and can +// change across runs. +// url is an optional link that users should click on for more information +// about this status. +func (c *GiteaClient) UpdateStatus(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest, state models.CommitStatus, src string, description string, url string) error { + giteaState := gitea.StatusFailure + + switch state { + case models.PendingCommitStatus: + giteaState = gitea.StatusPending + case models.SuccessCommitStatus: + giteaState = gitea.StatusSuccess + case models.FailedCommitStatus: + giteaState = gitea.StatusFailure + } + + logger.Debug("Updating status on Gitea pull request %d for '%s' to '%s'", pull.Num, description, state) + + newStatusOption := gitea.CreateStatusOption{ + State: giteaState, + TargetURL: url, + Description: description, + } + + _, resp, err := c.giteaClient.CreateStatus(repo.Owner, repo.Name, pull.HeadCommit, newStatusOption) + + if err != nil { + logger.Debug("POST /repos/%v/%v/statuses/%s returned: %v", repo.Owner, repo.Name, pull.HeadCommit, resp.StatusCode) + return err + } + + logger.Debug("Gitea status for pull request updated: %v (%v)", state, pull.Num) + + return nil +} + +// DiscardReviews discards / dismisses all pull request reviews +func (c *GiteaClient) DiscardReviews(repo models.Repo, pull models.PullRequest) error { + page := 0 + nextPage := 1 + + dismissOptions := gitea.DismissPullReviewOptions{ + Message: "Dismissed by Atlantis", + } + + listOptions := gitea.ListPullReviewsOptions{ + ListOptions: gitea.ListOptions{ + Page: 1, + PageSize: c.pageSize, + }, + } + + for page < nextPage { + page = +1 + listOptions.ListOptions.Page = page + pullReviews, resp, err := c.giteaClient.ListPullReviews(repo.Owner, repo.Name, int64(pull.Num), listOptions) + + if err != nil { + return err + } + + for _, review := range pullReviews { + _, err := c.giteaClient.DismissPullReview(repo.Owner, repo.Name, int64(pull.Num), review.ID, dismissOptions) + + if err != nil { + return err + } + } + + nextPage = resp.NextPage + + // Emergency break after giteaPaginationEBreak pages + if page >= giteaPaginationEBreak { + break + } + } + + return nil +} + +func (c *GiteaClient) MergePull(logger logging.SimpleLogging, pull models.PullRequest, pullOptions models.PullRequestOptions) error { + logger.Debug("Merging Gitea pull request %d", pull.Num) + + mergeOptions := gitea.MergePullRequestOption{ + Style: gitea.MergeStyleMerge, + Title: "Atlantis merge", + Message: "Automatic merge by Atlantis", + DeleteBranchAfterMerge: pullOptions.DeleteSourceBranchOnMerge, + ForceMerge: false, + HeadCommitId: pull.HeadCommit, + MergeWhenChecksSucceed: false, + } + + succeeded, resp, err := c.giteaClient.MergePullRequest(pull.BaseRepo.Owner, pull.BaseRepo.Name, int64(pull.Num), mergeOptions) + + if err != nil { + logger.Debug("POST /repos/%v/%v/pulls/%d/merge returned: %v", pull.BaseRepo.Owner, pull.BaseRepo.Name, pull.Num, resp.StatusCode) + return err + } + + if !succeeded { + return fmt.Errorf("merge failed: %s", resp.Status) + } + + return nil +} + +// MarkdownPullLink specifies the string used in a pull request comment to reference another pull request. +func (c *GiteaClient) MarkdownPullLink(pull models.PullRequest) (string, error) { + return fmt.Sprintf("#%d", pull.Num), nil +} + +// GetTeamNamesForUser returns the names of the teams or groups that the user belongs to (in the organization the repository belongs to). +func (c *GiteaClient) GetTeamNamesForUser(repo models.Repo, user models.User) ([]string, error) { + // TODO: implement + return nil, errors.New("GetTeamNamesForUser not (yet) implemented for Gitea client") +} + +// GetFileContent a repository file content from VCS (which support fetch a single file from repository) +// The first return value indicates whether the repo contains a file or not +// if BaseRepo had a file, its content will placed on the second return value +func (c *GiteaClient) GetFileContent(logger logging.SimpleLogging, pull models.PullRequest, fileName string) (bool, []byte, error) { + logger.Debug("Getting file content for %s in Gitea pull request %d", fileName, pull.Num) + + content, resp, err := c.giteaClient.GetContents(pull.BaseRepo.Owner, pull.BaseRepo.Name, pull.HeadCommit, fileName) + + if err != nil { + logger.Debug("GET /repos/%v/%v/contents/%s?ref=%v returned: %v", pull.BaseRepo.Owner, pull.BaseRepo.Name, fileName, pull.HeadCommit, resp.StatusCode) + return false, nil, err + } + + if content.Type == "file" { + decodedData, err := base64.StdEncoding.DecodeString(*content.Content) + if err != nil { + return true, []byte{}, err + } + return true, decodedData, nil + } + + return false, nil, nil +} + +// SupportsSingleFileDownload returns true if the VCS supports downloading a single file +func (c *GiteaClient) SupportsSingleFileDownload(repo models.Repo) bool { + return true +} + +// GetCloneURL returns the clone URL of the repo +func (c *GiteaClient) GetCloneURL(logger logging.SimpleLogging, _ models.VCSHostType, repo string) (string, error) { + logger.Debug("Getting clone URL for %s", repo) + + parts := strings.Split(repo, "/") + if len(parts) < 2 { + return "", errors.New("invalid repo format, expected 'owner/repo'") + } + repository, _, err := c.giteaClient.GetRepo(parts[0], parts[1]) + if err != nil { + logger.Debug("GET /repos/%v/%v returned an error: %v", parts[0], parts[1], err) + return "", err + } + return repository.CloneURL, nil +} + +// GetPullLabels returns the labels of a pull request +func (c *GiteaClient) GetPullLabels(logger logging.SimpleLogging, repo models.Repo, pull models.PullRequest) ([]string, error) { + logger.Debug("Getting labels for Gitea pull request %d", pull.Num) + + page := 0 + nextPage := 1 + results := make([]string, 0) + + opts := gitea.ListLabelsOptions{ + ListOptions: gitea.ListOptions{ + Page: 0, + PageSize: c.pageSize, + }, + } + + for page < nextPage { + page = +1 + opts.ListOptions.Page = page + + labels, resp, err := c.giteaClient.GetIssueLabels(repo.Owner, repo.Name, int64(pull.Num), opts) + + if err != nil { + logger.Debug("GET /repos/%v/%v/issues/%d/labels?%v returned: %v", repo.Owner, repo.Name, pull.Num, "unknown", resp.StatusCode) + return nil, err + } + + for _, label := range labels { + results = append(results, label.Name) + } + + nextPage = resp.NextPage + + // Emergency break after giteaPaginationEBreak pages + if page >= giteaPaginationEBreak { + break + } + } + + return results, nil +} + +func ValidateSignature(payload []byte, signature string, secretKey []byte) error { + isValid, err := gitea.VerifyWebhookSignature(string(secretKey), signature, payload) + if err != nil { + return errors.New("signature verification internal error") + } + if !isValid { + return errors.New("invalid signature") + } + + return nil +} diff --git a/server/events/vcs/gitea/models.go b/server/events/vcs/gitea/models.go new file mode 100644 index 0000000000..e624578e24 --- /dev/null +++ b/server/events/vcs/gitea/models.go @@ -0,0 +1,30 @@ +// Copyright 2024 Florian Beisel +// +// Licensed under the Apache License, Version 2.0 (the "License"); +// you may not use this file except in compliance with the License. +// You may obtain a copy of the License at +// +// http://www.apache.org/licenses/LICENSE-2.0 +// +// Unless required by applicable law or agreed to in writing, software +// distributed under the License is distributed on an "AS IS" BASIS, +// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +// See the License for the specific language governing permissions and +// limitations under the License. + +package gitea + +import "code.gitea.io/sdk/gitea" + +type GiteaWebhookPayload struct { + Action string `json:"action"` + Number int `json:"number"` + PullRequest gitea.PullRequest `json:"pull_request"` +} + +type GiteaIssueCommentPayload struct { + Action string `json:"action"` + Comment gitea.Comment `json:"comment"` + Repository gitea.Repository `json:"repository"` + Issue gitea.Issue `json:"issue"` +} diff --git a/server/events/vcs/github_client.go b/server/events/vcs/github_client.go index 3ceef6c5e7..8ee5c34385 100644 --- a/server/events/vcs/github_client.go +++ b/server/events/vcs/github_client.go @@ -76,7 +76,7 @@ type GithubPRReviewSummary struct { } // NewGithubClient returns a valid GitHub client. -func NewGithubClient(hostname string, credentials GithubCredentials, config GithubConfig, logger logging.SimpleLogging) (*GithubClient, error) { //nolint:staticcheck +func NewGithubClient(hostname string, credentials GithubCredentials, config GithubConfig, logger logging.SimpleLogging) (*GithubClient, error) { logger.Debug("Creating new GitHub client for host: %s", hostname) transport, err := credentials.Client() if err != nil { @@ -90,7 +90,8 @@ func NewGithubClient(hostname string, credentials GithubCredentials, config Gith graphqlURL = "https://api.github.com/graphql" } else { apiURL := resolveGithubAPIURL(hostname) - client, err = github.NewEnterpriseClient(apiURL.String(), apiURL.String(), transport) // nolint: staticcheck + // TODO: Deprecated: Use NewClient(httpClient).WithEnterpriseURLs(baseURL, uploadURL) instead + client, err = github.NewEnterpriseClient(apiURL.String(), apiURL.String(), transport) //nolint:staticcheck if err != nil { return nil, err } diff --git a/server/events/vcs/github_credentials.go b/server/events/vcs/github_credentials.go index 116b5beade..d1d59bffaa 100644 --- a/server/events/vcs/github_credentials.go +++ b/server/events/vcs/github_credentials.go @@ -71,7 +71,7 @@ type GithubAppCredentials struct { Key []byte Hostname string apiURL *url.URL - installationID int64 + InstallationID int64 tr *ghinstallation.Transport AppSlug string } @@ -122,8 +122,8 @@ func (c *GithubAppCredentials) GetToken() (string, error) { } func (c *GithubAppCredentials) getInstallationID() (int64, error) { - if c.installationID != 0 { - return c.installationID, nil + if c.InstallationID != 0 { + return c.InstallationID, nil } tr := http.DefaultTransport @@ -148,8 +148,8 @@ func (c *GithubAppCredentials) getInstallationID() (int64, error) { return 0, fmt.Errorf("wrong number of installations, expected 1, found %d", len(installations)) } - c.installationID = installations[0].GetID() - return c.installationID, nil + c.InstallationID = installations[0].GetID() + return c.InstallationID, nil } func (c *GithubAppCredentials) transport() (*ghinstallation.Transport, error) { diff --git a/server/events/vcs/github_credentials_test.go b/server/events/vcs/github_credentials_test.go index f6604adfa0..f5a4d33f0a 100644 --- a/server/events/vcs/github_credentials_test.go +++ b/server/events/vcs/github_credentials_test.go @@ -69,3 +69,40 @@ func TestGithubClient_AppAuthentication(t *testing.T) { t.Errorf("app token was not cached: %q != %q", token, newToken) } } + +func TestGithubClient_MultipleAppAuthentication(t *testing.T) { + logger := logging.NewNoopLogger(t) + defer disableSSLVerification()() + testServer, err := testdata.GithubMultipleAppTestServer(t) + Ok(t, err) + + anonCreds := &vcs.GithubAnonymousCredentials{} + anonClient, err := vcs.NewGithubClient(testServer, anonCreds, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + Ok(t, err) + tempSecrets, err := anonClient.ExchangeCode(logger, "good-code") + Ok(t, err) + + appCreds := &vcs.GithubAppCredentials{ + AppID: tempSecrets.ID, + InstallationID: 1, + Key: []byte(testdata.GithubPrivateKey), + Hostname: testServer, + } + _, err = vcs.NewGithubClient(testServer, appCreds, vcs.GithubConfig{}, logging.NewNoopLogger(t)) + Ok(t, err) + + token, err := appCreds.GetToken() + Ok(t, err) + + newToken, err := appCreds.GetToken() + Ok(t, err) + + user, err := appCreds.GetUser() + Ok(t, err) + + Assert(t, user == "", "user should be empty") + + if token != newToken { + t.Errorf("app token was not cached: %q != %q", token, newToken) + } +} diff --git a/server/events/vcs/gitlab_client.go b/server/events/vcs/gitlab_client.go index c4cb837a4e..14d63069ad 100644 --- a/server/events/vcs/gitlab_client.go +++ b/server/events/vcs/gitlab_client.go @@ -22,14 +22,13 @@ import ( "strings" "time" - "github.com/runatlantis/atlantis/server/events/command" - "github.com/runatlantis/atlantis/server/events/vcs/common" - version "github.com/hashicorp/go-version" + "github.com/jpillora/backoff" "github.com/pkg/errors" - "github.com/runatlantis/atlantis/server/logging" - + "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" + "github.com/runatlantis/atlantis/server/events/vcs/common" + "github.com/runatlantis/atlantis/server/logging" gitlab "github.com/xanzy/go-gitlab" ) @@ -355,18 +354,28 @@ func (g *GitlabClient) PullIsMergeable(logger logging.SimpleLogging, repo models return false, err } + if supportsDetailedMergeStatus { + logger.Debug("Detailed merge status: '%s'", mr.DetailedMergeStatus) + } else { + logger.Debug("Merge status: '%s'", mr.MergeStatus) //nolint:staticcheck // Need to reference deprecated field for backwards compatibility + } + if ((supportsDetailedMergeStatus && (mr.DetailedMergeStatus == "mergeable" || mr.DetailedMergeStatus == "ci_still_running" || - mr.DetailedMergeStatus == "ci_must_pass")) || + mr.DetailedMergeStatus == "ci_must_pass" || + mr.DetailedMergeStatus == "need_rebase")) || (!supportsDetailedMergeStatus && mr.MergeStatus == "can_be_merged")) && //nolint:staticcheck // Need to reference deprecated field for backwards compatibility mr.ApprovalsBeforeMerge <= 0 && mr.BlockingDiscussionsResolved && !mr.WorkInProgress && (allowSkippedPipeline || !isPipelineSkipped) { + + logger.Debug("Merge request is mergeable") return true, nil } + logger.Debug("Merge request is not mergeable") return false, nil } @@ -429,17 +438,66 @@ func (g *GitlabClient) UpdateStatus(logger logging.SimpleLogging, repo models.Re } } - _, resp, err := g.Client.Commits.SetCommitStatus(repo.FullName, pull.HeadCommit, &gitlab.SetCommitStatusOptions{ - State: gitlabState, - Context: gitlab.Ptr(src), - Description: gitlab.Ptr(description), - TargetURL: &url, - Ref: gitlab.Ptr(refTarget), - }) - if resp != nil { - logger.Debug("POST /projects/%s/statuses/%s returned: %d", repo.FullName, pull.HeadCommit, resp.StatusCode) + var ( + resp *gitlab.Response + maxAttempts = 10 + retryer = &backoff.Backoff{ + Jitter: true, + Max: g.PollingInterval, + } + ) + + for i := 0; i < maxAttempts; i++ { + logger := logger.With( + "attempt", i+1, + "max_attempts", maxAttempts, + "repo", repo.FullName, + "commit", pull.HeadCommit, + "state", state.String(), + ) + + _, resp, err = g.Client.Commits.SetCommitStatus(repo.FullName, pull.HeadCommit, &gitlab.SetCommitStatusOptions{ + State: gitlabState, + Context: gitlab.Ptr(src), + Description: gitlab.Ptr(description), + TargetURL: &url, + Ref: gitlab.Ptr(refTarget), + }) + + if resp != nil { + logger.Debug("POST /projects/%s/statuses/%s returned: %d", repo.FullName, pull.HeadCommit, resp.StatusCode) + + // GitLab returns a `409 Conflict` status when the commit pipeline status is being changed/locked by another request, + // which is likely to happen if you use [`--parallel-pool-size > 1`] and [`parallel-plan|apply`]. + // + // The likelihood of this happening is increased when the number of parallel apply jobs is increased. + // + // Returning the [err] without retrying will permanently leave the GitLab commit status in a "running" state, + // which would prevent Atlantis from merging the merge request on [apply]. + // + // GitLab does not allow merge requests to be merged when the pipeline status is "running." + + if resp.StatusCode == http.StatusConflict { + sleep := retryer.ForAttempt(float64(i)) + + logger.With("retry_in", sleep).Warn("GitLab returned HTTP [409 Conflict] when updating commit status") + time.Sleep(sleep) + + continue + } + } + + // Log we got a 200 OK response from GitLab after at least one retry to help with debugging/understanding delays/errors. + if err == nil && i > 0 { + logger.Info("GitLab returned HTTP [200 OK] after updating commit status") + } + + // Return the err, which might be nil if everything worked out + return err } - return err + + // If we got here, we've exhausted all attempts to update the commit status and still failed, so return the error upstream + return errors.Wrap(err, fmt.Sprintf("failed to update commit status for '%s' @ '%s' to '%s' after %d attempts", repo.FullName, pull.HeadCommit, src, maxAttempts)) } func (g *GitlabClient) GetMergeRequest(logger logging.SimpleLogging, repoFullName string, pullNum int) (*gitlab.MergeRequest, error) { @@ -461,7 +519,7 @@ func (g *GitlabClient) WaitForSuccessPipeline(logger logging.SimpleLogging, ctx case <-ctx.Done(): // validation check time out cancel() - return //ctx.Err() + return // ctx.Err() default: mr, _ := g.GetMergeRequest(logger, pull.BaseRepo.FullName, pull.Num) diff --git a/server/events/vcs/gitlab_client_test.go b/server/events/vcs/gitlab_client_test.go index 5c463e85cf..a32a75d74a 100644 --- a/server/events/vcs/gitlab_client_test.go +++ b/server/events/vcs/gitlab_client_test.go @@ -177,7 +177,6 @@ func TestGitlabClient_GetModifiedFiles(t *testing.T) { Equals(t, []string{"somefile.yaml"}, filenames) }) } - } func TestGitlabClient_MergePull(t *testing.T) { @@ -346,6 +345,113 @@ func TestGitlabClient_UpdateStatus(t *testing.T) { } } +func TestGitlabClient_UpdateStatusRetryable(t *testing.T) { + logger := logging.NewNoopLogger(t) + pipelineSuccess, err := os.ReadFile("testdata/gitlab-pipeline-success.json") + Ok(t, err) + + cases := []struct { + status models.CommitStatus + numberOfConflicts int + expNumberOfRequests int + expState string + expError bool + }{ + // Ensure that 0 x 409 Conflict succeeds + { + status: models.PendingCommitStatus, + numberOfConflicts: 0, + expNumberOfRequests: 1, + expState: "running", + }, + // Ensure that 5 x 409 Conflict still succeeds + { + status: models.PendingCommitStatus, + numberOfConflicts: 5, + expNumberOfRequests: 6, + expState: "running", + }, + // Ensure that 10 x 409 Conflict still fail due to running out of retries + { + status: models.FailedCommitStatus, + numberOfConflicts: 100, // anything larger than 10 is fine + expNumberOfRequests: 10, + expState: "failed", + expError: true, + }, + } + for _, c := range cases { + t.Run(c.expState, func(t *testing.T) { + handledNumberOfRequests := 0 + + testServer := httptest.NewServer( + http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + case "/api/v4/projects/runatlantis%2Fatlantis/statuses/sha": + handledNumberOfRequests++ + shouldSendConflict := handledNumberOfRequests <= c.numberOfConflicts + + body, err := io.ReadAll(r.Body) + Ok(t, err) + exp := fmt.Sprintf(`{"state":"%s","ref":"patch-1-merger","context":"src","target_url":"https://google.com","description":"description"}`, c.expState) + Equals(t, exp, string(body)) + defer r.Body.Close() // nolint: errcheck + + if shouldSendConflict { + w.WriteHeader(http.StatusConflict) + } + + w.Write([]byte("{}")) // nolint: errcheck + + case "/api/v4/projects/runatlantis%2Fatlantis/merge_requests/1": + w.WriteHeader(http.StatusOK) + w.Write(pipelineSuccess) // nolint: errcheck + + case "/api/v4/": + // Rate limiter requests. + w.WriteHeader(http.StatusOK) + + default: + t.Errorf("got unexpected request at %q", r.RequestURI) + http.Error(w, "not found", http.StatusNotFound) + } + })) + + internalClient, err := gitlab.NewClient("token", gitlab.WithBaseURL(testServer.URL)) + Ok(t, err) + client := &GitlabClient{ + Client: internalClient, + Version: nil, + PollingInterval: 10 * time.Millisecond, + } + + repo := models.Repo{ + FullName: "runatlantis/atlantis", + Owner: "runatlantis", + Name: "atlantis", + } + err = client.UpdateStatus( + logger, + repo, + models.PullRequest{ + Num: 1, + BaseRepo: repo, + HeadCommit: "sha", + HeadBranch: "test", + }, c.status, "src", "description", "https://google.com") + + if c.expError { + ErrContains(t, "failed to update commit status for 'runatlantis/atlantis' @ 'sha' to 'src' after 10 attempts", err) + ErrContains(t, "409", err) + } else { + Ok(t, err) + } + + Assert(t, c.expNumberOfRequests == handledNumberOfRequests, fmt.Sprintf("expected %d number of requests, but processed %d", c.expNumberOfRequests, handledNumberOfRequests)) + }) + } +} + func TestGitlabClient_PullIsMergeable(t *testing.T) { logger := logging.NewNoopLogger(t) gitlabClientUnderTest = true @@ -358,6 +464,7 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { noHeadPipelineMR := 2 ciMustPassSuccessMR := 3 ciMustPassFailureMR := 4 + needRebaseMR := 5 pipelineSuccess, err := os.ReadFile("testdata/gitlab-pipeline-success.json") Ok(t, err) @@ -368,6 +475,9 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { detailedMergeStatusCiMustPass, err := os.ReadFile("testdata/gitlab-detailed-merge-status-ci-must-pass.json") Ok(t, err) + detailedMergeStatusNeedRebase, err := os.ReadFile("testdata/gitlab-detailed-merge-status-need-rebase.json") + Ok(t, err) + headPipelineNotAvailable, err := os.ReadFile("testdata/gitlab-head-pipeline-not-available.json") Ok(t, err) @@ -427,6 +537,13 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { ciMustPassFailureMR, false, }, + { + fmt.Sprintf("%s/apply", vcsStatusName), + models.FailedCommitStatus, + gitlabServerVersions, + needRebaseMR, + true, + }, { fmt.Sprintf("%s/apply: resource/default", vcsStatusName), models.FailedCommitStatus, @@ -491,6 +608,9 @@ func TestGitlabClient_PullIsMergeable(t *testing.T) { case fmt.Sprintf("/api/v4/projects/runatlantis%%2Fatlantis/merge_requests/%v", ciMustPassFailureMR): w.WriteHeader(http.StatusOK) w.Write(detailedMergeStatusCiMustPass) // nolint: errcheck + case fmt.Sprintf("/api/v4/projects/runatlantis%%2Fatlantis/merge_requests/%v", needRebaseMR): + w.WriteHeader(http.StatusOK) + w.Write(detailedMergeStatusNeedRebase) // nolint: errcheck case fmt.Sprintf("/api/v4/projects/%v", projectID): w.WriteHeader(http.StatusOK) w.Write(projectSuccess) // nolint: errcheck diff --git a/server/events/vcs/proxy.go b/server/events/vcs/proxy.go index d3d60b03fb..cd67b84c90 100644 --- a/server/events/vcs/proxy.go +++ b/server/events/vcs/proxy.go @@ -26,7 +26,7 @@ type ClientProxy struct { clients map[models.VCSHostType]Client } -func NewClientProxy(githubClient Client, gitlabClient Client, bitbucketCloudClient Client, bitbucketServerClient Client, azuredevopsClient Client) *ClientProxy { +func NewClientProxy(githubClient Client, gitlabClient Client, bitbucketCloudClient Client, bitbucketServerClient Client, azuredevopsClient Client, giteaClient Client) *ClientProxy { if githubClient == nil { githubClient = &NotConfiguredVCSClient{} } @@ -42,6 +42,9 @@ func NewClientProxy(githubClient Client, gitlabClient Client, bitbucketCloudClie if azuredevopsClient == nil { azuredevopsClient = &NotConfiguredVCSClient{} } + if giteaClient == nil { + giteaClient = &NotConfiguredVCSClient{} + } return &ClientProxy{ clients: map[models.VCSHostType]Client{ models.Github: githubClient, @@ -49,6 +52,7 @@ func NewClientProxy(githubClient Client, gitlabClient Client, bitbucketCloudClie models.BitbucketCloud: bitbucketCloudClient, models.BitbucketServer: bitbucketServerClient, models.AzureDevops: azuredevopsClient, + models.Gitea: giteaClient, }, } } diff --git a/server/events/vcs/testdata/fixtures.go b/server/events/vcs/testdata/fixtures.go index 94926edba9..2872892f20 100644 --- a/server/events/vcs/testdata/fixtures.go +++ b/server/events/vcs/testdata/fixtures.go @@ -496,6 +496,79 @@ var githubAppInstallationJSON = `[ } ]` +var githubAppMultipleInstallationJSON = `[ + { + "id": 1, + "account": { + "login": "github", + "id": 1, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjE=", + "url": "https://api.github.com/orgs/github", + "repos_url": "https://api.github.com/orgs/github/repos", + "events_url": "https://api.github.com/orgs/github/events", + "hooks_url": "https://api.github.com/orgs/github/hooks", + "issues_url": "https://api.github.com/orgs/github/issues", + "members_url": "https://api.github.com/orgs/github/members{/member}", + "public_members_url": "https://api.github.com/orgs/github/public_members{/member}", + "avatar_url": "https://github.com/images/error/octocat_happy.gif", + "description": "A great organization" + }, + "access_tokens_url": "https://api.github.com/installations/1/access_tokens", + "repositories_url": "https://api.github.com/installation/repositories", + "html_url": "https://github.com/organizations/github/settings/installations/1", + "app_id": 1, + "target_id": 1, + "target_type": "Organization", + "permissions": { + "metadata": "read", + "contents": "read", + "issues": "write", + "single_file": "write" + }, + "events": [ + "push", + "pull_request" + ], + "single_file_name": "config.yml", + "repository_selection": "selected" + }, + { + "id": 2, + "account": { + "login": "github", + "id": 1, + "node_id": "MDEyOk9yZ2FuaXphdGlvbjE=", + "url": "https://api.github.com/orgs/github", + "repos_url": "https://api.github.com/orgs/github/repos", + "events_url": "https://api.github.com/orgs/github/events", + "hooks_url": "https://api.github.com/orgs/github/hooks", + "issues_url": "https://api.github.com/orgs/github/issues", + "members_url": "https://api.github.com/orgs/github/members{/member}", + "public_members_url": "https://api.github.com/orgs/github/public_members{/member}", + "avatar_url": "https://github.com/images/error/octocat_happy.gif", + "description": "A great organization" + }, + "access_tokens_url": "https://api.github.com/installations/1/access_tokens", + "repositories_url": "https://api.github.com/installation/repositories", + "html_url": "https://github.com/organizations/github/settings/installations/1", + "app_id": 1, + "target_id": 1, + "target_type": "Organization", + "permissions": { + "metadata": "read", + "contents": "read", + "issues": "write", + "single_file": "write" + }, + "events": [ + "push", + "pull_request" + ], + "single_file_name": "config.yml", + "repository_selection": "selected" + } +]` + // nolint: gosec var githubAppTokenJSON = `{ "token": "some-token", @@ -741,3 +814,58 @@ func GithubAppTestServer(t *testing.T) (string, error) { return testServerURL.Host, err } + +func GithubMultipleAppTestServer(t *testing.T) (string, error) { + counter := 0 + testServer := httptest.NewTLSServer( + http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) { + switch r.RequestURI { + case "/api/v3/app-manifests/good-code/conversions": + encodedKey := strings.Join(strings.Split(GithubPrivateKey, "\n"), "\\n") + appInfo := fmt.Sprintf(githubConversionJSON, encodedKey) + w.Write([]byte(appInfo)) // nolint: errcheck + // https://developer.github.com/v3/apps/#list-installations + case "/api/v3/app/installations": + token := strings.Replace(r.Header.Get("Authorization"), "Bearer ", "", 1) + if err := validateGithubToken(token); err != nil { + w.WriteHeader(403) + w.Write([]byte("Invalid token")) // nolint: errcheck + return + } + + w.Write([]byte(githubAppMultipleInstallationJSON)) // nolint: errcheck + return + case "/api/v3/apps/some-app": + token := strings.Replace(r.Header.Get("Authorization"), "token ", "", 1) + + // token is taken from githubAppTokenJSON + if token != "some-token" { + w.WriteHeader(403) + w.Write([]byte("Invalid installation token")) // nolint: errcheck + return + } + w.Write([]byte(githubAppJSON)) // nolint: errcheck + return + case "/api/v3/app/installations/1/access_tokens": + token := strings.Replace(r.Header.Get("Authorization"), "Bearer ", "", 1) + if err := validateGithubToken(token); err != nil { + w.WriteHeader(403) + w.Write([]byte("Invalid token")) // nolint: errcheck + return + } + + appToken := fmt.Sprintf(githubAppTokenJSON, counter) + counter++ + w.Write([]byte(appToken)) // nolint: errcheck + return + default: + t.Errorf("got unexpected request at %q", r.RequestURI) + http.Error(w, "not found", http.StatusNotFound) + return + } + })) + + testServerURL, err := url.Parse(testServer.URL) + + return testServerURL.Host, err +} diff --git a/server/events/vcs/testdata/gitlab-detailed-merge-status-need-rebase.json b/server/events/vcs/testdata/gitlab-detailed-merge-status-need-rebase.json new file mode 100644 index 0000000000..a37f0e8577 --- /dev/null +++ b/server/events/vcs/testdata/gitlab-detailed-merge-status-need-rebase.json @@ -0,0 +1,124 @@ +{ + "id": 22461274, + "iid": 13, + "project_id": 4580910, + "title": "Update main.tf", + "description": "", + "state": "opened", + "created_at": "2019-01-15T18:27:29.375Z", + "updated_at": "2019-01-25T17:28:01.437Z", + "merged_by": null, + "merged_at": null, + "closed_by": null, + "closed_at": null, + "target_branch": "patch-1", + "source_branch": "patch-1-merger", + "user_notes_count": 0, + "upvotes": 0, + "downvotes": 0, + "author": { + "id": 1755902, + "name": "Luke Kysow", + "username": "lkysow", + "state": "active", + "avatar_url": "https://secure.gravatar.com/avatar/25fd57e71590fe28736624ff24d41c5f?s=80&d=identicon", + "web_url": "https://gitlab.com/lkysow" + }, + "assignee": null, + "reviewers": [], + "source_project_id": 4580910, + "target_project_id": 4580910, + "labels": [], + "work_in_progress": false, + "milestone": null, + "merge_when_pipeline_succeeds": false, + "merge_status": "can_be_merged", + "detailed_merge_status": "need_rebase", + "sha": "cb86d70f464632bdfbe1bb9bc0f2f9d847a774a0", + "merge_commit_sha": null, + "squash_commit_sha": null, + "discussion_locked": null, + "should_remove_source_branch": null, + "force_remove_source_branch": true, + "reference": "!13", + "references": { + "short": "!13", + "relative": "!13", + "full": "lkysow/atlantis-example!13" + }, + "web_url": "https://gitlab.com/lkysow/atlantis-example/merge_requests/13", + "time_stats": { + "time_estimate": 0, + "total_time_spent": 0, + "human_time_estimate": null, + "human_total_time_spent": null + }, + "squash": true, + "task_completion_status": { + "count": 0, + "completed_count": 0 + }, + "has_conflicts": false, + "blocking_discussions_resolved": true, + "approvals_before_merge": null, + "subscribed": false, + "changes_count": "1", + "latest_build_started_at": "2019-01-15T18:27:29.375Z", + "latest_build_finished_at": "2019-01-25T17:28:01.437Z", + "first_deployed_to_production_at": null, + "pipeline": { + "id": 488598, + "sha": "67cb91d3f6198189f433c045154a885784ba6977", + "ref": "patch-1-merger", + "status": "success", + "created_at": "2019-01-15T18:27:29.375Z", + "updated_at": "2019-01-25T17:28:01.437Z", + "web_url": "https://gitlab.com/lkysow/atlantis-example/-/pipelines/488598" + }, + "head_pipeline": { + "id": 488598, + "sha": "67cb91d3f6198189f433c045154a885784ba6977", + "ref": "patch-1-merger", + "status": "success", + "created_at": "2019-01-15T18:27:29.375Z", + "updated_at": "2019-01-25T17:28:01.437Z", + "web_url": "https://gitlab.com/lkysow/atlantis-example/-/pipelines/488598", + "before_sha": "0000000000000000000000000000000000000000", + "tag": false, + "yaml_errors": null, + "user": { + "id": 1755902, + "name": "Luke Kysow", + "username": "lkysow", + "state": "active", + "avatar_url": "https://secure.gravatar.com/avatar/25fd57e71590fe28736624ff24d41c5f?s=80&d=identicon", + "web_url": "https://gitlab.com/lkysow" + }, + "started_at": "2019-01-15T18:27:29.375Z", + "finished_at": "2019-01-25T17:28:01.437Z", + "committed_at": null, + "duration": 31, + "coverage": null, + "detailed_status": { + "icon": "status_success", + "text": "passed", + "label": "passed", + "group": "success", + "tooltip": "passed", + "has_details": true, + "details_path": "/lkysow/atlantis-example/-/pipelines/488598", + "illustration": null, + "favicon": "/assets/ci_favicons/favicon_status_success-8451333011eee8ce9f2ab25dc487fe24a8758c694827a582f17f42b0a90446a2.png" + } + }, + "diff_refs": { + "base_sha": "67cb91d3f6198189f433c045154a885784ba6977", + "head_sha": "cb86d70f464632bdfbe1bb9bc0f2f9d847a774a0", + "start_sha": "67cb91d3f6198189f433c045154a885784ba6977" + }, + "merge_error": null, + "first_contribution": false, + "user": { + "can_merge": true + } +} diff --git a/server/events/working_dir.go b/server/events/working_dir.go index 886b3c4b40..c2e56d8dc7 100644 --- a/server/events/working_dir.go +++ b/server/events/working_dir.go @@ -26,6 +26,7 @@ import ( "github.com/runatlantis/atlantis/server/core/runtime" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/logging" + "github.com/runatlantis/atlantis/server/utils" ) const workingDirPrefix = "repos" @@ -41,23 +42,23 @@ type WorkingDir interface { // absolute path to the root of the cloned repo. It also returns // a boolean indicating if we should warn users that the branch we're // merging into has been updated since we cloned it. - Clone(headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) + Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) // GetWorkingDir returns the path to the workspace for this repo and pull. // If workspace does not exist on disk, error will be of type os.IsNotExist. GetWorkingDir(r models.Repo, p models.PullRequest, workspace string) (string, error) - HasDiverged(cloneDir string) bool + HasDiverged(logger logging.SimpleLogging, cloneDir string) bool GetPullDir(r models.Repo, p models.PullRequest) (string, error) // Delete deletes the workspace for this repo and pull. - Delete(r models.Repo, p models.PullRequest) error - DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error + Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error + DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error // Set a flag in the workingdir so Clone() can know that it is safe to re-clone the workingdir if // the upstream branch has been modified. This is only safe after grabbing the project lock // and before running any plans SetCheckForUpstreamChanges() // DeletePlan deletes the plan for this repo, pull, workspace path and project name - DeletePlan(r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error + DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, path string, projectName string) error // GetGitUntrackedFiles returns a list of Git untracked files in the working dir. - GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) ([]string, error) + GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) } // FileWorkspace implements WorkingDir with the file system. @@ -86,7 +87,6 @@ type FileWorkspace struct { GpgNoSigningEnabled bool // flag indicating if we have to merge with potential new changes upstream (directly after grabbing project lock) CheckForUpstreamChanges bool - Logger logging.SimpleLogging } // Clone git clones headRepo, checks out the branch and then returns the absolute @@ -95,10 +95,7 @@ type FileWorkspace struct { // If the repo already exists and is at // the right commit it does nothing. This is to support running commands in // multiple dirs of the same repo without deleting existing plans. -func (w *FileWorkspace) Clone( - headRepo models.Repo, - p models.PullRequest, - workspace string) (string, bool, error) { +func (w *FileWorkspace) Clone(logger logging.SimpleLogging, headRepo models.Repo, p models.PullRequest, workspace string) (string, bool, error) { cloneDir := w.cloneDir(p.BaseRepo, p, workspace) defer func() { w.CheckForUpstreamChanges = false }() @@ -106,7 +103,7 @@ func (w *FileWorkspace) Clone( // If the directory already exists, check if it's at the right commit. // If so, then we do nothing. if _, err := os.Stat(cloneDir); err == nil { - w.Logger.Debug("clone directory %q already exists, checking if it's at the right commit", cloneDir) + logger.Debug("clone directory '%s' already exists, checking if it's at the right commit", cloneDir) // We use git rev-parse to see if our repo is at the right commit. // If just checking out the pull request branch, we can use HEAD. @@ -121,28 +118,28 @@ func (w *FileWorkspace) Clone( revParseCmd.Dir = cloneDir outputRevParseCmd, err := revParseCmd.CombinedOutput() if err != nil { - w.Logger.Warn("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd)) - return cloneDir, false, w.forceClone(c) + logger.Warn("will re-clone repo, could not determine if was at correct commit: %s: %s: %s", strings.Join(revParseCmd.Args, " "), err, string(outputRevParseCmd)) + return cloneDir, false, w.forceClone(logger, c) } currCommit := strings.Trim(string(outputRevParseCmd), "\n") // We're prefix matching here because BitBucket doesn't give us the full // commit, only a 12 character prefix. if strings.HasPrefix(currCommit, p.HeadCommit) { - if w.CheckForUpstreamChanges && w.CheckoutMerge && w.recheckDiverged(p, headRepo, cloneDir) { - w.Logger.Info("base branch has been updated, using merge strategy and will clone again") - return cloneDir, true, w.mergeAgain(c) + if w.CheckForUpstreamChanges && w.CheckoutMerge && w.recheckDiverged(logger, p, headRepo, cloneDir) { + logger.Info("base branch has been updated, using merge strategy and will clone again") + return cloneDir, true, w.mergeAgain(logger, c) } - w.Logger.Debug("repo is at correct commit %q so will not re-clone", p.HeadCommit) + logger.Debug("repo is at correct commit '%s' so will not re-clone", p.HeadCommit) return cloneDir, false, nil } else { - w.Logger.Debug("repo was already cloned but is not at correct commit, wanted %q got %q", p.HeadCommit, currCommit) + logger.Debug("repo was already cloned but is not at correct commit, wanted '%s' got '%s'", p.HeadCommit, currCommit) } // We'll fall through to re-clone. } // Otherwise we clone the repo. - return cloneDir, false, w.forceClone(c) + return cloneDir, false, w.forceClone(logger, c) } // recheckDiverged returns true if the branch we're merging into has diverged @@ -152,7 +149,7 @@ func (w *FileWorkspace) Clone( // and we have to perform a new merge. // If there are any errors we return false since we prefer things to succeed // vs. stopping the plan/apply. -func (w *FileWorkspace) recheckDiverged(p models.PullRequest, headRepo models.Repo, cloneDir string) bool { +func (w *FileWorkspace) recheckDiverged(logger logging.SimpleLogging, p models.PullRequest, headRepo models.Repo, cloneDir string) bool { if !w.CheckoutMerge { // It only makes sense to warn that main has diverged if we're using // the checkout merge strategy. If we're just checking out the branch, @@ -183,17 +180,16 @@ func (w *FileWorkspace) recheckDiverged(p models.PullRequest, headRepo models.Re cmd.Dir = cloneDir output, err := cmd.CombinedOutput() - if err != nil { - w.Logger.Warn("getting remote update failed: %s", string(output)) + logger.Warn("getting remote update failed: %s", string(output)) return false } } - return w.HasDiverged(cloneDir) + return w.HasDiverged(logger, cloneDir) } -func (w *FileWorkspace) HasDiverged(cloneDir string) bool { +func (w *FileWorkspace) HasDiverged(logger logging.SimpleLogging, cloneDir string) bool { if !w.CheckoutMerge { // Both the diverged warning and the UnDiverged apply requirement only apply to merge checkout strategy so // we assume false here for 'branch' strategy. @@ -204,7 +200,7 @@ func (w *FileWorkspace) HasDiverged(cloneDir string) bool { statusFetchCmd.Dir = cloneDir outputStatusFetch, err := statusFetchCmd.CombinedOutput() if err != nil { - w.Logger.Warn("fetching repo has failed: %s", string(outputStatusFetch)) + logger.Warn("fetching repo has failed: %s", string(outputStatusFetch)) return false } @@ -213,14 +209,14 @@ func (w *FileWorkspace) HasDiverged(cloneDir string) bool { statusUnoCmd.Dir = cloneDir outputStatusUno, err := statusUnoCmd.CombinedOutput() if err != nil { - w.Logger.Warn("getting repo status has failed: %s", string(outputStatusUno)) + logger.Warn("getting repo status has failed: %s", string(outputStatusUno)) return false } hasDiverged := strings.Contains(string(outputStatusUno), "have diverged") return hasDiverged } -func (w *FileWorkspace) forceClone(c wrappedGitContext) error { +func (w *FileWorkspace) forceClone(logger logging.SimpleLogging, c wrappedGitContext) error { value, _ := cloneLocks.LoadOrStore(c.dir, new(sync.Mutex)) mutex := value.(*sync.Mutex) @@ -232,11 +228,11 @@ func (w *FileWorkspace) forceClone(c wrappedGitContext) error { err := os.RemoveAll(c.dir) if err != nil { - return errors.Wrapf(err, "deleting dir %q before cloning", c.dir) + return errors.Wrapf(err, "deleting dir '%s' before cloning", c.dir) } // Create the directory and parents if necessary. - w.Logger.Info("creating dir %q", c.dir) + logger.Info("creating dir '%s'", c.dir) if err := os.MkdirAll(c.dir, 0700); err != nil { return errors.Wrap(err, "creating new workspace") } @@ -253,37 +249,37 @@ func (w *FileWorkspace) forceClone(c wrappedGitContext) error { // if branch strategy, use depth=1 if !w.CheckoutMerge { - return w.wrappedGit(c, "clone", "--depth=1", "--branch", c.pr.HeadBranch, "--single-branch", headCloneURL, c.dir) + return w.wrappedGit(logger, c, "clone", "--depth=1", "--branch", c.pr.HeadBranch, "--single-branch", headCloneURL, c.dir) } // if merge strategy... // if no checkout depth, omit depth arg if w.CheckoutDepth == 0 { - if err := w.wrappedGit(c, "clone", "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil { + if err := w.wrappedGit(logger, c, "clone", "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil { return err } } else { - if err := w.wrappedGit(c, "clone", "--depth", fmt.Sprint(w.CheckoutDepth), "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil { + if err := w.wrappedGit(logger, c, "clone", "--depth", fmt.Sprint(w.CheckoutDepth), "--branch", c.pr.BaseBranch, "--single-branch", baseCloneURL, c.dir); err != nil { return err } } - if err := w.wrappedGit(c, "remote", "add", "head", headCloneURL); err != nil { + if err := w.wrappedGit(logger, c, "remote", "add", "head", headCloneURL); err != nil { return err } if w.GpgNoSigningEnabled { - if err := w.wrappedGit(c, "config", "--local", "commit.gpgsign", "false"); err != nil { + if err := w.wrappedGit(logger, c, "config", "--local", "commit.gpgsign", "false"); err != nil { return err } } - return w.mergeToBaseBranch(c) + return w.mergeToBaseBranch(logger, c) } // There is a new upstream update that we need, and we want to update to it // without deleting any existing plans -func (w *FileWorkspace) mergeAgain(c wrappedGitContext) error { +func (w *FileWorkspace) mergeAgain(logger logging.SimpleLogging, c wrappedGitContext) error { value, _ := cloneLocks.LoadOrStore(c.dir, new(sync.Mutex)) mutex := value.(*sync.Mutex) @@ -294,11 +290,11 @@ func (w *FileWorkspace) mergeAgain(c wrappedGitContext) error { } // Reset branch as if it was cloned again - if err := w.wrappedGit(c, "reset", "--hard", fmt.Sprintf("refs/remotes/origin/%s", c.pr.BaseBranch)); err != nil { + if err := w.wrappedGit(logger, c, "reset", "--hard", fmt.Sprintf("refs/remotes/origin/%s", c.pr.BaseBranch)); err != nil { return err } - return w.mergeToBaseBranch(c) + return w.mergeToBaseBranch(logger, c) } // wrappedGitContext is the configuration for wrappedGit that is typically unchanged @@ -311,7 +307,7 @@ type wrappedGitContext struct { // wrappedGit runs git with additional environment settings required for git merge, // and with sanitized error logging to avoid leaking git credentials -func (w *FileWorkspace) wrappedGit(c wrappedGitContext, args ...string) error { +func (w *FileWorkspace) wrappedGit(logger logging.SimpleLogging, c wrappedGitContext, args ...string) error { cmd := exec.Command("git", args...) // nolint: gosec cmd.Dir = c.dir // The git merge command requires these env vars are set. @@ -327,12 +323,12 @@ func (w *FileWorkspace) wrappedGit(c wrappedGitContext, args ...string) error { sanitizedErrMsg := w.sanitizeGitCredentials(err.Error(), c.pr.BaseRepo, c.head) return fmt.Errorf("running %s: %s: %s", cmdStr, sanitizedOutput, sanitizedErrMsg) } - w.Logger.Debug("ran: %s. Output: %s", cmdStr, strings.TrimSuffix(sanitizedOutput, "\n")) + logger.Debug("ran: %s. Output: %s", cmdStr, strings.TrimSuffix(sanitizedOutput, "\n")) return nil } // Merge the PR into the base branch. -func (w *FileWorkspace) mergeToBaseBranch(c wrappedGitContext) error { +func (w *FileWorkspace) mergeToBaseBranch(logger logging.SimpleLogging, c wrappedGitContext) error { fetchRef := fmt.Sprintf("+refs/heads/%s:", c.pr.HeadBranch) fetchRemote := "head" if w.GithubAppEnabled { @@ -342,19 +338,19 @@ func (w *FileWorkspace) mergeToBaseBranch(c wrappedGitContext) error { // if no checkout depth, omit depth arg if w.CheckoutDepth == 0 { - if err := w.wrappedGit(c, "fetch", fetchRemote, fetchRef); err != nil { + if err := w.wrappedGit(logger, c, "fetch", fetchRemote, fetchRef); err != nil { return err } } else { - if err := w.wrappedGit(c, "fetch", "--depth", fmt.Sprint(w.CheckoutDepth), fetchRemote, fetchRef); err != nil { + if err := w.wrappedGit(logger, c, "fetch", "--depth", fmt.Sprint(w.CheckoutDepth), fetchRemote, fetchRef); err != nil { return err } } - if err := w.wrappedGit(c, "merge-base", c.pr.BaseBranch, "FETCH_HEAD"); err != nil { + if err := w.wrappedGit(logger, c, "merge-base", c.pr.BaseBranch, "FETCH_HEAD"); err != nil { // git merge-base returning error means that we did not receive enough commits in shallow clone. // Fall back to retrieving full repo history. - if err := w.wrappedGit(c, "fetch", "--unshallow"); err != nil { + if err := w.wrappedGit(logger, c, "fetch", "--unshallow"); err != nil { return err } } @@ -365,7 +361,7 @@ func (w *FileWorkspace) mergeToBaseBranch(c wrappedGitContext) error { // git rev-parse HEAD^2 to get the head commit because it will // always succeed whereas without --no-ff, if the merge was fast // forwarded then git rev-parse HEAD^2 would fail. - return w.wrappedGit(c, "merge", "-q", "--no-ff", "-m", "atlantis-merge", "FETCH_HEAD") + return w.wrappedGit(logger, c, "merge", "-q", "--no-ff", "-m", "atlantis-merge", "FETCH_HEAD") } // GetWorkingDir returns the path to the workspace for this repo and pull. @@ -388,16 +384,16 @@ func (w *FileWorkspace) GetPullDir(r models.Repo, p models.PullRequest) (string, } // Delete deletes the workspace for this repo and pull. -func (w *FileWorkspace) Delete(r models.Repo, p models.PullRequest) error { +func (w *FileWorkspace) Delete(logger logging.SimpleLogging, r models.Repo, p models.PullRequest) error { repoPullDir := w.repoPullDir(r, p) - w.Logger.Info("Deleting repo pull directory: " + repoPullDir) + logger.Info("Deleting repo pull directory: " + repoPullDir) return os.RemoveAll(repoPullDir) } // DeleteForWorkspace deletes the working dir for this workspace. -func (w *FileWorkspace) DeleteForWorkspace(r models.Repo, p models.PullRequest, workspace string) error { +func (w *FileWorkspace) DeleteForWorkspace(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) error { workspaceDir := w.cloneDir(r, p, workspace) - w.Logger.Info("Deleting workspace directory: " + workspaceDir) + logger.Info("Deleting workspace directory: " + workspaceDir) return os.RemoveAll(workspaceDir) } @@ -421,20 +417,20 @@ func (w *FileWorkspace) SetCheckForUpstreamChanges() { w.CheckForUpstreamChanges = true } -func (w *FileWorkspace) DeletePlan(r models.Repo, p models.PullRequest, workspace string, projectPath string, projectName string) error { +func (w *FileWorkspace) DeletePlan(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string, projectPath string, projectName string) error { planPath := filepath.Join(w.cloneDir(r, p, workspace), projectPath, runtime.GetPlanFilename(workspace, projectName)) - w.Logger.Info("Deleting plan: " + planPath) - return os.Remove(planPath) + logger.Info("Deleting plan: " + planPath) + return utils.RemoveIgnoreNonExistent(planPath) } // getGitUntrackedFiles returns a list of Git untracked files in the working dir. -func (w *FileWorkspace) GetGitUntrackedFiles(r models.Repo, p models.PullRequest, workspace string) ([]string, error) { +func (w *FileWorkspace) GetGitUntrackedFiles(logger logging.SimpleLogging, r models.Repo, p models.PullRequest, workspace string) ([]string, error) { workingDir, err := w.GetWorkingDir(r, p, workspace) if err != nil { return nil, err } - w.Logger.Debug("Checking for Git untracked files in directory: '%s'", workingDir) + logger.Debug("Checking for Git untracked files in directory: '%s'", workingDir) cmd := exec.Command("git", "ls-files", "--others", "--exclude-standard") cmd.Dir = workingDir @@ -444,6 +440,6 @@ func (w *FileWorkspace) GetGitUntrackedFiles(r models.Repo, p models.PullRequest } untrackedFiles := strings.Split(string(output), "\n")[:] - w.Logger.Debug("Untracked files: '%s'", strings.Join(untrackedFiles, ",")) + logger.Debug("Untracked files: '%s'", strings.Join(untrackedFiles, ",")) return untrackedFiles, nil } diff --git a/server/events/working_dir_test.go b/server/events/working_dir_test.go index f277c12e6b..e25c420100 100644 --- a/server/events/working_dir_test.go +++ b/server/events/working_dir_test.go @@ -43,10 +43,9 @@ func TestClone_NoneExisting(t *testing.T) { CheckoutMerge: false, TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), GpgNoSigningEnabled: true, - Logger: logger, } - cloneDir, _, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") @@ -96,10 +95,9 @@ func TestClone_CheckoutMergeNoneExisting(t *testing.T) { TestingOverrideHeadCloneURL: overrideURL, TestingOverrideBaseCloneURL: overrideURL, GpgNoSigningEnabled: true, - Logger: logger, } - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -148,10 +146,9 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { TestingOverrideHeadCloneURL: overrideURL, TestingOverrideBaseCloneURL: overrideURL, GpgNoSigningEnabled: true, - Logger: logger, } - _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -163,7 +160,7 @@ func TestClone_CheckoutMergeNoReclone(t *testing.T) { runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -201,10 +198,9 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { TestingOverrideHeadCloneURL: overrideURL, TestingOverrideBaseCloneURL: overrideURL, GpgNoSigningEnabled: true, - Logger: logger, } - _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -216,7 +212,7 @@ func TestClone_CheckoutMergeNoRecloneFastForward(t *testing.T) { runCmd(t, dataDir, "touch", "repos/0/default/proof") // Now run the clone again. - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -259,10 +255,9 @@ func TestClone_CheckoutMergeConflict(t *testing.T) { TestingOverrideHeadCloneURL: overrideURL, TestingOverrideBaseCloneURL: overrideURL, GpgNoSigningEnabled: true, - Logger: logger, } - _, _, err := wd.Clone(models.Repo{}, models.PullRequest{ + _, _, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -319,10 +314,9 @@ func TestClone_CheckoutMergeShallow(t *testing.T) { TestingOverrideHeadCloneURL: overrideURL, TestingOverrideBaseCloneURL: overrideURL, GpgNoSigningEnabled: true, - Logger: logger, } - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -350,10 +344,9 @@ func TestClone_CheckoutMergeShallow(t *testing.T) { TestingOverrideHeadCloneURL: overrideURL, TestingOverrideBaseCloneURL: overrideURL, GpgNoSigningEnabled: true, - Logger: logger, } - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", BaseBranch: "main", @@ -387,9 +380,8 @@ func TestClone_NoReclone(t *testing.T) { CheckoutMerge: false, TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), GpgNoSigningEnabled: true, - Logger: logger, } - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", }, "default") @@ -432,9 +424,8 @@ func TestClone_RecloneWrongCommit(t *testing.T) { CheckoutMerge: false, TestingOverrideHeadCloneURL: fmt.Sprintf("file://%s", repoDir), GpgNoSigningEnabled: true, - Logger: logger, } - cloneDir, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + cloneDir, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "branch", HeadCommit: expCommit, @@ -506,7 +497,6 @@ func TestClone_MasterHasDiverged(t *testing.T) { CheckoutMerge: false, CheckoutDepth: 50, GpgNoSigningEnabled: true, - Logger: logger, } // Pretend terraform has created a plan file, we'll check for it later @@ -518,7 +508,7 @@ func TestClone_MasterHasDiverged(t *testing.T) { // Run the clone without the checkout merge strategy. It should return // false for mergedAgain - _, mergedAgain, err := wd.Clone(models.Repo{}, models.PullRequest{ + _, mergedAgain, err := wd.Clone(logger, models.Repo{}, models.PullRequest{ BaseRepo: models.Repo{}, HeadBranch: "second-pr", BaseBranch: "main", @@ -532,7 +522,7 @@ func TestClone_MasterHasDiverged(t *testing.T) { // Run the clone twice with the merge strategy, the first run should // return true for mergedAgain, subsequent runs should // return false since the first call is supposed to merge. - _, mergedAgain, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{ + _, mergedAgain, err = wd.Clone(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", BaseBranch: "main", @@ -542,7 +532,7 @@ func TestClone_MasterHasDiverged(t *testing.T) { Assert(t, mergedAgain == true, "First clone with CheckoutMerge=true with diverged base should have merged") wd.SetCheckForUpstreamChanges() - _, mergedAgain, err = wd.Clone(models.Repo{CloneURL: repoDir}, models.PullRequest{ + _, mergedAgain, err = wd.Clone(logger, models.Repo{CloneURL: repoDir}, models.PullRequest{ BaseRepo: models.Repo{CloneURL: repoDir}, HeadBranch: "second-pr", BaseBranch: "main", @@ -610,15 +600,14 @@ func TestHasDiverged_MasterHasDiverged(t *testing.T) { CheckoutMerge: true, CheckoutDepth: 50, GpgNoSigningEnabled: true, - Logger: logger, } - hasDiverged := wd.HasDiverged(repoDir + "/repos/0/default") + hasDiverged := wd.HasDiverged(logger, repoDir+"/repos/0/default") Equals(t, hasDiverged, true) // Run it again but without the checkout merge strategy. It should return // false. wd.CheckoutMerge = false - hasDiverged = wd.HasDiverged(repoDir + "/repos/0/default") + hasDiverged = wd.HasDiverged(logger, repoDir+"/repos/0/default") Equals(t, hasDiverged, false) } diff --git a/server/logging/simple_logger.go b/server/logging/simple_logger.go index e7d18e5654..5003a1fda0 100644 --- a/server/logging/simple_logger.go +++ b/server/logging/simple_logger.go @@ -19,7 +19,6 @@ package logging import ( "bytes" "fmt" - "testing" "github.com/pkg/errors" "go.uber.org/zap" @@ -184,7 +183,7 @@ func (l *StructuredLogger) saveToHistory(lvl LogLevel, format string, a ...inter // NewNoopLogger creates a logger instance that discards all logs and never // writes them. Used for testing. -func NewNoopLogger(t *testing.T) SimpleLogging { +func NewNoopLogger(t zaptest.TestingT) SimpleLogging { level := zap.DebugLevel return &StructuredLogger{ z: zaptest.NewLogger(t, zaptest.Level(level)).Sugar(), diff --git a/server/router_test.go b/server/router_test.go index 4b683e07cc..02cb51668b 100644 --- a/server/router_test.go +++ b/server/router_test.go @@ -11,7 +11,7 @@ import ( "github.com/runatlantis/atlantis/server/events/command" "github.com/runatlantis/atlantis/server/events/models" . "github.com/runatlantis/atlantis/testing" - "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestRouter_GenerateLockURL(t *testing.T) { @@ -107,6 +107,6 @@ func TestGenerateProjectJobURL_ShouldReturnErrorWhenJobIDNotSpecified(t *testing } expectedErrString := "no job id in ctx" gotURL, err := router.GenerateProjectJobURL(ctx) - assert.EqualError(t, err, expectedErrString) + require.EqualError(t, err, expectedErrString) Equals(t, "", gotURL) } diff --git a/server/server.go b/server/server.go index eeab9d732e..521643df40 100644 --- a/server/server.go +++ b/server/server.go @@ -50,7 +50,7 @@ import ( "github.com/pkg/errors" "github.com/runatlantis/atlantis/server/controllers" events_controllers "github.com/runatlantis/atlantis/server/controllers/events" - "github.com/runatlantis/atlantis/server/controllers/templates" + "github.com/runatlantis/atlantis/server/controllers/web_templates" "github.com/runatlantis/atlantis/server/controllers/websocket" "github.com/runatlantis/atlantis/server/core/locking" "github.com/runatlantis/atlantis/server/core/runtime" @@ -62,6 +62,7 @@ import ( "github.com/runatlantis/atlantis/server/events/vcs" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketcloud" "github.com/runatlantis/atlantis/server/events/vcs/bitbucketserver" + "github.com/runatlantis/atlantis/server/events/vcs/gitea" "github.com/runatlantis/atlantis/server/events/webhooks" "github.com/runatlantis/atlantis/server/logging" ) @@ -106,10 +107,10 @@ type Server struct { StatusController *controllers.StatusController JobsController *controllers.JobsController APIController *controllers.APIController - IndexTemplate templates.TemplateWriter - LockDetailTemplate templates.TemplateWriter - ProjectJobsTemplate templates.TemplateWriter - ProjectJobsErrorTemplate templates.TemplateWriter + IndexTemplate web_templates.TemplateWriter + LockDetailTemplate web_templates.TemplateWriter + ProjectJobsTemplate web_templates.TemplateWriter + ProjectJobsErrorTemplate web_templates.TemplateWriter SSLCertFile string SSLKeyFile string CertLastRefreshTime time.Time @@ -176,6 +177,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { var bitbucketCloudClient *bitbucketcloud.Client var bitbucketServerClient *bitbucketserver.Client var azuredevopsClient *vcs.AzureDevopsClient + var giteaClient *gitea.GiteaClient policyChecksEnabled := false if userConfig.EnablePolicyChecksFlag { @@ -237,18 +239,20 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { return nil, err } githubCredentials = &vcs.GithubAppCredentials{ - AppID: userConfig.GithubAppID, - Key: privateKey, - Hostname: userConfig.GithubHostname, - AppSlug: userConfig.GithubAppSlug, + AppID: userConfig.GithubAppID, + InstallationID: userConfig.GithubAppInstallationID, + Key: privateKey, + Hostname: userConfig.GithubHostname, + AppSlug: userConfig.GithubAppSlug, } githubAppEnabled = true } else if userConfig.GithubAppID != 0 && userConfig.GithubAppKey != "" { githubCredentials = &vcs.GithubAppCredentials{ - AppID: userConfig.GithubAppID, - Key: []byte(userConfig.GithubAppKey), - Hostname: userConfig.GithubHostname, - AppSlug: userConfig.GithubAppSlug, + AppID: userConfig.GithubAppID, + InstallationID: userConfig.GithubAppInstallationID, + Key: []byte(userConfig.GithubAppKey), + Hostname: userConfig.GithubHostname, + AppSlug: userConfig.GithubAppSlug, } githubAppEnabled = true } @@ -300,6 +304,19 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { return nil, err } } + if userConfig.GiteaToken != "" { + supportedVCSHosts = append(supportedVCSHosts, models.Gitea) + + giteaClient, err = gitea.NewClient(userConfig.GiteaBaseURL, userConfig.GiteaUser, userConfig.GiteaToken, userConfig.GiteaPageSize, logger) + if err != nil { + fmt.Println("error setting up gitea client", "error", err) + return nil, errors.Wrapf(err, "setting up Gitea client") + } else { + logger.Info("gitea client configured successfully") + } + } + + logger.Info("Supported VCS Hosts", "hosts", supportedVCSHosts) home, err := homedir.Dir() if err != nil { @@ -333,6 +350,11 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { return nil, err } } + if userConfig.GiteaUser != "" { + if err := vcs.WriteGitCreds(userConfig.GiteaUser, userConfig.GiteaToken, userConfig.GiteaBaseURL, home, logger, false); err != nil { + return nil, err + } + } } // default the project files used to generate the module index to the autoplan-file-list if autoplan-modules is true @@ -356,7 +378,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { if err != nil { return nil, errors.Wrap(err, "initializing webhooks") } - vcsClient := vcs.NewClientProxy(githubClient, gitlabClient, bitbucketCloudClient, bitbucketServerClient, azuredevopsClient) + vcsClient := vcs.NewClientProxy(githubClient, gitlabClient, bitbucketCloudClient, bitbucketServerClient, azuredevopsClient, giteaClient) commitStatusUpdater := &events.DefaultCommitStatusUpdater{Client: vcsClient, StatusName: userConfig.VCSStatusName} binDir, err := mkSubDir(userConfig.DataDir, BinDirName) @@ -469,7 +491,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { CheckoutMerge: userConfig.CheckoutStrategy == "merge", CheckoutDepth: userConfig.CheckoutDepth, GithubAppEnabled: githubAppEnabled, - Logger: logger, } scheduledExecutorService := scheduled.NewExecutorService( @@ -503,7 +524,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { } deleteLockCommand := &events.DefaultDeleteLockCommand{ Locker: lockingClient, - Logger: logger, WorkingDir: workingDir, WorkingDirLocker: workingDirLocker, Backend: backend, @@ -515,7 +535,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { &events.PullClosedExecutor{ Locker: lockingClient, WorkingDir: workingDir, - Logger: logger, Backend: backend, PullClosedTemplate: &events.PullClosedEventTemplate{}, LogStreamResourceCleaner: projectCmdOutputHandler, @@ -528,6 +547,8 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { GithubToken: userConfig.GithubToken, GitlabUser: userConfig.GitlabUser, GitlabToken: userConfig.GitlabToken, + GiteaUser: userConfig.GiteaUser, + GiteaToken: userConfig.GiteaToken, AllowDraftPRs: userConfig.PlanDrafts, BitbucketUser: userConfig.BitbucketUser, BitbucketToken: userConfig.BitbucketToken, @@ -538,6 +559,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { commentParser := events.NewCommentParser( userConfig.GithubUser, userConfig.GitlabUser, + userConfig.GiteaUser, userConfig.BitbucketUser, userConfig.AzureDevopsUser, userConfig.ExecutableName, @@ -580,6 +602,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { Router: router, } projectCommandBuilder := events.NewInstrumentedProjectCommandBuilder( + logger, policyChecksEnabled, validator, &events.DefaultProjectFinder{}, @@ -601,7 +624,6 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { userConfig.IncludeGitUntrackedFiles, userConfig.AutoDiscoverModeFlag, statsScope, - logger, terraformClient, ) @@ -798,6 +820,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { GithubPullGetter: githubClient, GitlabMergeRequestGetter: gitlabClient, AzureDevopsPullGetter: azuredevopsClient, + GiteaPullGetter: giteaClient, CommentCommandRunnerByCmd: commentCommandRunnerByCmd, EventParser: eventParser, FailOnPreWorkflowHookError: userConfig.FailOnPreWorkflowHookError, @@ -829,7 +852,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { ApplyLocker: applyLockingClient, Logger: logger, VCSClient: vcsClient, - LockDetailTemplate: templates.LockTemplate, + LockDetailTemplate: web_templates.LockTemplate, WorkingDir: workingDir, WorkingDirLocker: workingDirLocker, Backend: backend, @@ -847,24 +870,27 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { AtlantisVersion: config.AtlantisVersion, AtlantisURL: parsedURL, Logger: logger, - ProjectJobsTemplate: templates.ProjectJobsTemplate, - ProjectJobsErrorTemplate: templates.ProjectJobsErrorTemplate, + ProjectJobsTemplate: web_templates.ProjectJobsTemplate, + ProjectJobsErrorTemplate: web_templates.ProjectJobsErrorTemplate, Backend: backend, WsMux: wsMux, KeyGenerator: controllers.JobIDKeyGenerator{}, StatsScope: statsScope.SubScope("api"), } apiController := &controllers.APIController{ - APISecret: []byte(userConfig.APISecret), - Locker: lockingClient, - Logger: logger, - Parser: eventParser, - ProjectCommandBuilder: projectCommandBuilder, - ProjectPlanCommandRunner: instrumentedProjectCmdRunner, - ProjectApplyCommandRunner: instrumentedProjectCmdRunner, - RepoAllowlistChecker: repoAllowlist, - Scope: statsScope.SubScope("api"), - VCSClient: vcsClient, + APISecret: []byte(userConfig.APISecret), + Locker: lockingClient, + Logger: logger, + Parser: eventParser, + ProjectCommandBuilder: projectCommandBuilder, + ProjectPlanCommandRunner: instrumentedProjectCmdRunner, + ProjectApplyCommandRunner: instrumentedProjectCmdRunner, + FailOnPreWorkflowHookError: userConfig.FailOnPreWorkflowHookError, + PreWorkflowHooksCommandRunner: preWorkflowHooksCommandRunner, + PostWorkflowHooksCommandRunner: postWorkflowHooksCommandRunner, + RepoAllowlistChecker: repoAllowlist, + Scope: statsScope.SubScope("api"), + VCSClient: vcsClient, } eventsController := &events_controllers.VCSEventsController{ @@ -889,6 +915,7 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { AzureDevopsWebhookBasicUser: []byte(userConfig.AzureDevopsWebhookUser), AzureDevopsWebhookBasicPassword: []byte(userConfig.AzureDevopsWebhookPassword), AzureDevopsRequestValidator: &events_controllers.DefaultAzureDevopsRequestValidator{}, + GiteaWebhookSecret: []byte(userConfig.GiteaWebhookSecret), } githubAppController := &controllers.GithubAppController{ AtlantisURL: parsedURL, @@ -918,10 +945,10 @@ func NewServer(userConfig UserConfig, config Config) (*Server, error) { JobsController: jobsController, StatusController: statusController, APIController: apiController, - IndexTemplate: templates.IndexTemplate, - LockDetailTemplate: templates.LockTemplate, - ProjectJobsTemplate: templates.ProjectJobsTemplate, - ProjectJobsErrorTemplate: templates.ProjectJobsErrorTemplate, + IndexTemplate: web_templates.IndexTemplate, + LockDetailTemplate: web_templates.LockTemplate, + ProjectJobsTemplate: web_templates.ProjectJobsTemplate, + ProjectJobsErrorTemplate: web_templates.ProjectJobsErrorTemplate, SSLKeyFile: userConfig.SSLKeyFile, SSLCertFile: userConfig.SSLCertFile, DisableGlobalApplyLock: userConfig.DisableGlobalApplyLock, @@ -1046,10 +1073,10 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) { return } - var lockResults []templates.LockIndexData + var lockResults []web_templates.LockIndexData for id, v := range locks { lockURL, _ := s.Router.Get(LockViewRouteName).URL("id", url.QueryEscape(id)) - lockResults = append(lockResults, templates.LockIndexData{ + lockResults = append(lockResults, web_templates.LockIndexData{ // NOTE: must use .String() instead of .Path because we need the // query params as part of the lock URL. LockPath: lockURL.String(), @@ -1059,7 +1086,7 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) { Path: v.Project.Path, Workspace: v.Workspace, Time: v.Time, - TimeFormatted: v.Time.Format("02-01-2006 15:04:05"), + TimeFormatted: v.Time.Format("2006-01-02 15:04:05"), }) } @@ -1071,16 +1098,16 @@ func (s *Server) Index(w http.ResponseWriter, _ *http.Request) { return } - applyLockData := templates.ApplyLockData{ + applyLockData := web_templates.ApplyLockData{ Time: applyCmdLock.Time, Locked: applyCmdLock.Locked, GlobalApplyLockEnabled: applyCmdLock.GlobalApplyLockEnabled, - TimeFormatted: applyCmdLock.Time.Format("02-01-2006 15:04:05"), + TimeFormatted: applyCmdLock.Time.Format("2006-01-02 15:04:05"), } //Sort by date - newest to oldest. sort.SliceStable(lockResults, func(i, j int) bool { return lockResults[i].Time.After(lockResults[j].Time) }) - err = s.IndexTemplate.Execute(w, templates.IndexData{ + err = s.IndexTemplate.Execute(w, web_templates.IndexData{ Locks: lockResults, PullToJobMapping: preparePullToJobMappings(s), ApplyLock: applyLockData, @@ -1100,7 +1127,7 @@ func preparePullToJobMappings(s *Server) []jobs.PullInfoWithJobIDs { for j := range pullToJobMappings[i].JobIDInfos { jobUrl, _ := s.Router.Get(ProjectJobsViewRouteName).URL("job-id", pullToJobMappings[i].JobIDInfos[j].JobID) pullToJobMappings[i].JobIDInfos[j].JobIDUrl = jobUrl.String() - pullToJobMappings[i].JobIDInfos[j].TimeFormatted = pullToJobMappings[i].JobIDInfos[j].Time.Format("02-01-2006 15:04:05") + pullToJobMappings[i].JobIDInfos[j].TimeFormatted = pullToJobMappings[i].JobIDInfos[j].Time.Format("2006-01-02 15:04:05") } //Sort by date - newest to oldest. diff --git a/server/server_test.go b/server/server_test.go index e9151443e2..4da3583013 100644 --- a/server/server_test.go +++ b/server/server_test.go @@ -27,8 +27,8 @@ import ( "github.com/gorilla/mux" . "github.com/petergtz/pegomock/v4" "github.com/runatlantis/atlantis/server" - "github.com/runatlantis/atlantis/server/controllers/templates" - tMocks "github.com/runatlantis/atlantis/server/controllers/templates/mocks" + "github.com/runatlantis/atlantis/server/controllers/web_templates" + tMocks "github.com/runatlantis/atlantis/server/controllers/web_templates/mocks" "github.com/runatlantis/atlantis/server/core/locking/mocks" "github.com/runatlantis/atlantis/server/events/models" "github.com/runatlantis/atlantis/server/jobs" @@ -113,19 +113,19 @@ func TestIndex_Success(t *testing.T) { req, _ := http.NewRequest("GET", "", bytes.NewBuffer(nil)) w := httptest.NewRecorder() s.Index(w, req) - it.VerifyWasCalledOnce().Execute(w, templates.IndexData{ - ApplyLock: templates.ApplyLockData{ + it.VerifyWasCalledOnce().Execute(w, web_templates.IndexData{ + ApplyLock: web_templates.ApplyLockData{ Locked: false, Time: time.Time{}, - TimeFormatted: "01-01-0001 00:00:00", + TimeFormatted: "0001-01-01 00:00:00", }, - Locks: []templates.LockIndexData{ + Locks: []web_templates.LockIndexData{ { LockPath: "/lock?id=lkysow%252Fatlantis-example%252F.%252Fdefault", RepoFullName: "lkysow/atlantis-example", PullNum: 9, Time: now, - TimeFormatted: now.Format("02-01-2006 15:04:05"), + TimeFormatted: now.Format("2006-01-02 15:04:05"), }, }, PullToJobMapping: []jobs.PullInfoWithJobIDs{}, @@ -139,9 +139,12 @@ func TestHealthz(t *testing.T) { req, _ := http.NewRequest("GET", "/healthz", bytes.NewBuffer(nil)) w := httptest.NewRecorder() s.Healthz(w, req) - Equals(t, http.StatusOK, w.Result().StatusCode) - body, _ := io.ReadAll(w.Result().Body) - Equals(t, "application/json", w.Result().Header["Content-Type"][0]) + + resp := w.Result() + defer resp.Body.Close() + Equals(t, http.StatusOK, resp.StatusCode) + body, _ := io.ReadAll(resp.Body) + Equals(t, "application/json", resp.Header["Content-Type"][0]) Equals(t, `{ "status": "ok" diff --git a/server/user_config.go b/server/user_config.go index 977b008610..31b8271efa 100644 --- a/server/user_config.go +++ b/server/user_config.go @@ -57,7 +57,13 @@ type UserConfig struct { GithubAppKey string `mapstructure:"gh-app-key"` GithubAppKeyFile string `mapstructure:"gh-app-key-file"` GithubAppSlug string `mapstructure:"gh-app-slug"` + GithubAppInstallationID int64 `mapstructure:"gh-app-installation-id"` GithubTeamAllowlist string `mapstructure:"gh-team-allowlist"` + GiteaBaseURL string `mapstructure:"gitea-base-url"` + GiteaToken string `mapstructure:"gitea-token"` + GiteaUser string `mapstructure:"gitea-user"` + GiteaWebhookSecret string `mapstructure:"gitea-webhook-secret"` + GiteaPageSize int `mapstructure:"gitea-page-size"` GitlabHostname string `mapstructure:"gitlab-hostname"` GitlabToken string `mapstructure:"gitlab-token"` GitlabUser string `mapstructure:"gitlab-user"` diff --git a/server/user_config_test.go b/server/user_config_test.go index 490abe7c52..225049f335 100644 --- a/server/user_config_test.go +++ b/server/user_config_test.go @@ -8,6 +8,7 @@ import ( "github.com/runatlantis/atlantis/server/logging" . "github.com/runatlantis/atlantis/testing" "github.com/stretchr/testify/assert" + "github.com/stretchr/testify/require" ) func TestUserConfig_ToAllowCommandNames(t *testing.T) { @@ -61,7 +62,7 @@ func TestUserConfig_ToAllowCommandNames(t *testing.T) { } got, err := u.ToAllowCommandNames() if err != nil { - assert.ErrorContains(t, err, tt.wantErr, "ToAllowCommandNames()") + require.ErrorContains(t, err, tt.wantErr, "ToAllowCommandNames()") } assert.Equalf(t, tt.want, got, "ToAllowCommandNames()") }) diff --git a/server/utils/os.go b/server/utils/os.go new file mode 100644 index 0000000000..2a06d8486e --- /dev/null +++ b/server/utils/os.go @@ -0,0 +1,13 @@ +package utils + +import "os" + +// RemoveIgnoreNonExistent removes a file, ignoring if it doesn't exist. +func RemoveIgnoreNonExistent(file string) error { + err := os.Remove(file) + if err == nil || os.IsNotExist(err) { + return nil + } + + return err +} diff --git a/testdrive/utils.go b/testdrive/utils.go index cbf706d587..b29dc5ad3a 100644 --- a/testdrive/utils.go +++ b/testdrive/utils.go @@ -35,7 +35,7 @@ import ( ) const hashicorpReleasesURL = "https://releases.hashicorp.com" -const terraformVersion = "1.7.4" // renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp +const terraformVersion = "1.8.5" // renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp const ngrokDownloadURL = "https://bin.equinox.io/c/4VmDzA7iaHb" const ngrokAPIURL = "localhost:41414" // We hope this isn't used. const atlantisPort = 4141 diff --git a/testing/Dockerfile b/testing/Dockerfile index aafcb3cbe8..17b0f6a167 100644 --- a/testing/Dockerfile +++ b/testing/Dockerfile @@ -1,4 +1,4 @@ -FROM golang:1.22.1 +FROM golang:1.22.4@sha256:c2010b9c2342431a24a2e64e33d9eb2e484af49e72c820e200d332d214d5e61f RUN apt-get update && apt-get --no-install-recommends -y install unzip \ && apt-get clean \ @@ -6,7 +6,7 @@ RUN apt-get update && apt-get --no-install-recommends -y install unzip \ # Install Terraform # renovate: datasource=github-releases depName=hashicorp/terraform versioning=hashicorp -ENV TERRAFORM_VERSION=1.7.4 +ENV TERRAFORM_VERSION=1.8.5 RUN case $(uname -m) in x86_64|amd64) ARCH="amd64" ;; aarch64|arm64|armv7l) ARCH="arm64" ;; esac && \ wget -nv -O terraform.zip https://releases.hashicorp.com/terraform/${TERRAFORM_VERSION}/terraform_${TERRAFORM_VERSION}_linux_${ARCH}.zip && \ mkdir -p /usr/local/bin/tf/versions/${TERRAFORM_VERSION} && \ @@ -16,7 +16,7 @@ RUN case $(uname -m) in x86_64|amd64) ARCH="amd64" ;; aarch64|arm64|armv7l) ARCH # Install conftest # renovate: datasource=github-releases depName=open-policy-agent/conftest -ENV CONFTEST_VERSION=0.50.0 +ENV CONFTEST_VERSION=0.52.0 SHELL ["/bin/bash", "-o", "pipefail", "-c"] RUN case $(uname -m) in x86_64|amd64) ARCH="x86_64" ;; aarch64|arm64|armv7l) ARCH="arm64" ;; esac && \ curl -LOs https://github.com/open-policy-agent/conftest/releases/download/v${CONFTEST_VERSION}/conftest_${CONFTEST_VERSION}_Linux_${ARCH}.tar.gz && \