From 0f8ddcc15a8410087f7dd75169478a523f455ae2 Mon Sep 17 00:00:00 2001
From: Violet Hynes
Date: Wed, 20 Nov 2024 21:26:58 +0000
Subject: [PATCH] backport of commit 1a15c4b73e717131e55e9fb60dcf770db2946010
---
.github/actions/changed-files/action.yml | 3 -
.../actions/install-external-tools/action.yml | 5 +-
.../actions/run-apupgrade-tests/action.yml | 123 -
.github/actions/set-up-sqlc/action.yml | 62 -
.github/pull_request_template.md | 5 +-
.github/scripts/changed-files.sh | 13 -
.github/scripts/install-cob.sh | 409 --
.github/workflows/ci.yml | 81 +-
.github/workflows/hack-week-benchmark.yml | 26 -
.go-version | 2 +-
.release/versions.hcl | 8 +-
CHANGELOG.md | 471 +-
CODEOWNERS | 4 +-
Makefile | 6 -
api/auth/approle/LICENSE | 365 --
api/auth/aws/LICENSE | 365 --
api/auth/azure/LICENSE | 365 --
api/auth/gcp/LICENSE | 365 --
api/auth/kubernetes/LICENSE | 365 --
api/auth/ldap/LICENSE | 365 --
api/auth/userpass/LICENSE | 365 --
api/client.go | 6 -
api/output_string.go | 6 +-
api/sys_seal.go | 37 +-
audit/headers.go | 1 -
audit/headers_test.go | 30 +-
builtin/logical/aws/client.go | 3 -
builtin/logical/aws/path_config_root.go | 8 -
builtin/logical/aws/path_config_root_test.go | 1 -
builtin/logical/aws/path_static_roles.go | 6 -
builtin/logical/pki/acme_challenge_engine.go | 2 +-
builtin/logical/pki/acme_challenges.go | 26 +-
builtin/logical/pki/acme_errors.go | 3 +-
builtin/logical/pki/backend_test.go | 129 +-
builtin/logical/pki/cert_util.go | 2 +-
builtin/logical/pki/crl_test.go | 28 +-
builtin/logical/pki/crl_util.go | 14 -
builtin/logical/pki/issuing/issuers.go | 1 -
builtin/logical/pki/issuing/issuers_oss.go | 8 -
builtin/logical/pki/metadata.pb.go | 26 +-
builtin/logical/pki/path_config_crl.go | 263 +-
builtin/logical/pki/path_fetch_issuers.go | 183 +-
.../pki/path_fetch_issuers_stubs_oss.go | 21 -
builtin/logical/pki/path_manage_issuers.go | 79 +-
builtin/logical/pki/path_tidy.go | 522 +-
builtin/logical/pki/pki_backend/crl_config.go | 2 -
builtin/logical/transit/api_utils.go | 29 -
builtin/logical/transit/api_utils_test.go | 52 -
builtin/logical/transit/backend_test.go | 145 +-
builtin/logical/transit/path_datakey.go | 22 +-
builtin/logical/transit/path_datakey_test.go | 125 -
builtin/logical/transit/path_decrypt.go | 28 +-
builtin/logical/transit/path_encrypt.go | 48 +-
builtin/logical/transit/path_rewrap.go | 80 +-
builtin/logical/transit/path_rewrap_test.go | 113 -
builtin/logical/transit/path_sign_verify.go | 539 +-
.../logical/transit/path_sign_verify_ce.go | 81 -
.../logical/transit/path_sign_verify_test.go | 99 -
changelog/18615.txt | 3 -
changelog/22726.txt | 3 -
changelog/25486.txt | 3 -
changelog/27033.txt | 3 -
changelog/27920.txt | 3 -
changelog/27927.txt | 6 -
changelog/28126.txt | 6 -
changelog/28330.txt | 3 -
changelog/28456.txt | 3 -
changelog/28596.txt | 4 -
changelog/28654.txt | 3 -
changelog/28678.txt | 3 -
changelog/28798.txt | 3 -
changelog/28808.txt | 6 -
changelog/28822.txt | 3 -
changelog/28867.txt | 4 -
changelog/28875.txt | 3 -
changelog/28938.txt | 3 -
changelog/_go-ver-1190.txt | 2 +-
command/agent/config/config.go | 2 +
command/agentproxyshared/auth/cert/cert.go | 159 +-
.../agentproxyshared/auth/cert/cert_test.go | 151 +-
.../auth/cert/test-fixtures/keys/cert1.pem | 17 -
.../auth/cert/test-fixtures/keys/key1.pem | 28 -
.../cache/cacheboltdb/bolt.go | 2 +-
.../cache/cacheboltdb/bolt_test.go | 2 +-
command/command_test.go | 19 -
.../operator_usage_testonly_test.go | 2 +-
command/format.go | 4 -
command/main.go | 5 -
command/proxy/config/config.go | 2 +
command/server.go | 60 +-
command/status_test.go | 40 -
go.mod | 26 +-
go.sum | 52 +-
helper/dhutil/dhutil.go | 11 +-
helper/forwarding/types.pb.go | 92 +-
helper/identity/mfa/types.pb.go | 180 +-
helper/identity/types.pb.go | 136 +-
helper/storagepacker/types.pb.go | 48 +-
helper/testhelpers/mssql/mssqlhelper.go | 71 +-
http/handler.go | 9 +-
http/sys_seal.go | 8 -
http/sys_seal_test.go | 29 -
physical/dynamodb/dynamodb.go | 18 +-
physical/raft/config.go | 39 +-
physical/raft/fsm.go | 2 +-
physical/raft/raft.go | 87 +-
physical/raft/raft_test.go | 38 +-
physical/raft/snapshot.go | 2 +-
physical/raft/types.pb.go | 136 +-
physical/s3/s3.go | 8 +-
sdk/database/dbplugin/database.pb.go | 378 +-
sdk/database/dbplugin/database_grpc.pb.go | 25 +-
sdk/database/dbplugin/v5/proto/database.pb.go | 334 +-
.../dbplugin/v5/proto/database_grpc.pb.go | 25 +-
sdk/go.mod | 40 +-
sdk/go.sum | 85 +-
sdk/helper/clientcountutil/clientcountutil.go | 34 +-
.../clientcountutil/clientcountutil_test.go | 6 +-
.../generation/generate_data.pb.go | 136 +-
sdk/helper/docker/testhelpers.go | 3 +-
sdk/helper/keysutil/policy.go | 119 +-
sdk/helper/keysutil/policy_test.go | 80 +-
sdk/helper/pluginutil/multiplexing.pb.go | 48 +-
sdk/helper/pluginutil/multiplexing_grpc.pb.go | 25 +-
sdk/logical/event.pb.go | 70 +-
sdk/logical/identity.pb.go | 136 +-
sdk/logical/plugin.pb.go | 26 +-
sdk/logical/version.pb.go | 48 +-
sdk/logical/version_grpc.pb.go | 25 +-
sdk/physical/physical.go | 19 -
sdk/plugin/pb/backend.pb.go | 1192 +++-
sdk/plugin/pb/backend_grpc.pb.go | 82 +-
tools/tools.sh | 4 +-
ui/README.md | 32 +-
ui/app/adapters/auth-method.js | 34 +-
ui/app/adapters/generated-item-list.js | 126 +-
ui/app/adapters/kmip/config.js | 11 +-
ui/app/adapters/kmip/role.js | 33 +-
ui/app/adapters/named-path.js | 2 +-
ui/app/app.js | 37 +-
ui/app/components/alphabet-edit.hbs | 4 +-
ui/app/components/console/ui-panel.js | 4 +-
.../components/database-role-setting-form.js | 2 +-
ui/app/components/generated-item-list.js | 4 +-
ui/app/components/keymgmt/distribute.js | 3 +-
.../mfa/mfa-login-enforcement-form.js | 10 +-
ui/app/components/mount-backend-form.hbs | 6 +-
ui/app/components/pgp-file.js | 3 -
ui/app/components/secret-create-or-update.js | 2 +-
ui/app/components/sidebar/frame.hbs | 6 +-
ui/app/components/sidebar/user-menu.hbs | 2 +-
ui/app/components/transformation-edit.js | 74 +-
ui/app/components/transit-key-actions.hbs | 5 -
ui/app/components/transit-key-actions.js | 22 +-
.../cluster/access/mfa/methods/create.js | 5 +-
.../cluster/access/oidc/clients/client.js | 15 +
.../vault/cluster/access/oidc/keys/key.js | 14 +
.../cluster/access/oidc/providers/provider.js | 14 +
ui/app/controllers/vault/cluster/init.js | 3 +-
ui/app/decorators/model-validations.js | 2 +-
ui/app/helpers/mountable-secret-engines.js | 4 +-
ui/app/initializers/deprecation-filter.js | 3 +-
ui/app/mixins/unload-model-route.js | 2 -
ui/app/models/auth-config.js | 7 +-
ui/app/models/auth-method.js | 2 +-
ui/app/models/database/connection.js | 2 +-
ui/app/models/database/role.js | 2 +-
ui/app/models/generated-item.js | 44 -
ui/app/models/keymgmt/provider.js | 4 +-
ui/app/models/kmip/role.js | 79 +-
ui/app/models/kv/data.js | 2 +-
ui/app/models/secret-engine.js | 10 +-
ui/app/models/transit-key.js | 16 +-
.../cluster/access/identity/aliases/index.js | 8 +-
.../vault/cluster/access/identity/index.js | 8 +-
.../vault/cluster/access/leases/list.js | 5 +-
ui/app/routes/vault/cluster/access/method.js | 40 +-
.../vault/cluster/access/method/item/list.js | 8 +-
ui/app/routes/vault/cluster/access/methods.js | 2 +-
.../vault/cluster/access/namespaces/index.js | 7 +-
ui/app/routes/vault/cluster/policies/index.js | 8 +-
ui/app/routes/vault/cluster/policy/show.js | 1 +
.../vault/cluster/secrets/backend/list.js | 9 +-
.../settings/auth/configure/section.js | 2 +-
ui/app/serializers/database/connection.js | 2 +-
ui/app/services/path-help.js | 273 +-
ui/app/services/{pagination.js => store.js} | 92 +-
ui/app/styles/components/kmip-role-edit.scss | 5 +-
ui/app/styles/components/popup-menu.scss | 3 +-
.../components/replication-mode-summary.scss | 16 +
ui/app/styles/core.scss | 1 +
ui/app/styles/core/element-styling.scss | 5 +
.../templates/components/console/log-json.hbs | 3 +-
.../components/control-group-success.hbs | 3 +-
.../components/generated-item-list.hbs | 15 +-
.../components/identity/popup-alias.hbs | 9 +-
.../components/identity/popup-members.hbs | 2 +-
.../components/identity/popup-metadata.hbs | 2 +-
.../components/identity/popup-policy.hbs | 13 +-
.../mfa/login-enforcement-list-item.hbs | 6 +-
.../components/mfa/method-list-item.hbs | 6 +-
.../mfa/mfa-login-enforcement-form.hbs | 2 +-
.../templates/components/oidc/client-list.hbs | 6 +-
.../components/oidc/provider-list.hbs | 6 +-
ui/app/templates/components/pgp-file.hbs | 2 +-
.../components/raft-storage-overview.hbs | 7 +-
.../components/secret-list/aws-role-item.hbs | 12 +-
.../secret-list/database-list-item.hbs | 19 +-
.../templates/components/secret-list/item.hbs | 12 +-
.../components/secret-list/ssh-role-item.hbs | 22 +-
.../secret-list/transform-list-item.hbs | 4 +-
.../transform-transformation-item.hbs | 4 +-
.../components/transform-role-edit.hbs | 4 +-
.../components/transform-template-edit.hbs | 4 +-
.../components/transformation-edit.hbs | 4 +-
ui/app/templates/components/transit-edit.hbs | 3 +-
.../components/transit-form-edit.hbs | 4 +-
.../components/transit-form-show.hbs | 2 +-
.../components/transit-key-action/datakey.hbs | 26 +-
.../components/transit-key-action/decrypt.hbs | 26 +-
.../components/transit-key-action/encrypt.hbs | 29 +-
.../components/transit-key-action/rewrap.hbs | 51 +-
ui/app/templates/docs.hbs | 2 +-
.../vault/cluster/access/identity/index.hbs | 18 +-
.../vault/cluster/access/methods.hbs | 21 +-
.../mfa/enforcements/enforcement/index.hbs | 5 +-
.../vault/cluster/access/namespaces/index.hbs | 2 +-
.../cluster/access/oidc/assignments/index.hbs | 6 +-
.../cluster/access/oidc/clients/client.hbs | 2 +-
.../vault/cluster/access/oidc/keys/index.hbs | 8 +-
.../vault/cluster/access/oidc/keys/key.hbs | 2 +-
.../access/oidc/providers/provider.hbs | 2 +-
.../cluster/access/oidc/scopes/index.hbs | 6 +-
.../vault/cluster/policies/index.hbs | 9 +-
.../templates/vault/cluster/policy/edit.hbs | 4 +-
.../templates/vault/cluster/policy/show.hbs | 2 +-
.../vault/cluster/secrets/backends.hbs | 12 +-
.../{model-helpers => }/database-helpers.js | 0
.../utils/model-helpers/kmip-role-fields.js | 27 -
ui/app/utils/openapi-helpers.ts | 202 +-
ui/app/utils/openapi-to-attrs.js | 121 +-
.../utils/{model-helpers => }/validators.js | 0
ui/config/deprecation-workflow.js | 19 +-
ui/docs/client-pagination.md | 14 +-
ui/docs/ember-engines.md | 6 +-
ui/docs/model-validations.md | 55 +-
ui/docs/models.md | 301 +-
.../messages/page/create-and-edit.js | 5 +-
.../addon/components/messages/page/details.js | 6 +-
.../addon/components/messages/page/list.hbs | 7 +-
.../addon/components/messages/page/list.js | 8 +-
ui/lib/config-ui/addon/engine.js | 11 +-
.../config-ui/addon/routes/messages/index.js | 4 +-
.../core/addon/components/confirm-action.hbs | 5 +-
ui/lib/core/addon/components/json-editor.hbs | 2 +-
ui/lib/core/addon/components/json-editor.js | 2 +-
.../core/addon/components/kv-object-editor.js | 7 +-
ui/lib/core/addon/components/linked-block.js | 11 +-
.../components/replication-mode-summary.js | 68 +
.../core/addon/components/replication-page.js | 8 +-
ui/lib/core/addon/helpers/transition-to.js | 31 -
ui/lib/core/addon/mixins/list-route.js | 4 +-
.../core/addon/mixins/replication-actions.js | 1 +
ui/lib/core/addon/modifiers/code-mirror.js | 2 +-
.../components/replication-mode-summary.hbs | 122 +
.../components/replication-mode-summary.js | 6 +
.../addon/components/edit-form-kmip-role.js | 55 +
.../kmip/addon/components/kmip/role-form.hbs | 82 -
.../kmip/addon/components/kmip/role-form.js | 107 -
.../addon/controllers/credentials/show.js | 2 +-
ui/lib/kmip/addon/controllers/role.js | 2 +-
ui/lib/kmip/addon/engine.js | 22 +-
.../addon/resolver.js} | 4 +-
ui/lib/kmip/addon/routes/configure.js | 2 +-
ui/lib/kmip/addon/routes/credentials/index.js | 4 +-
ui/lib/kmip/addon/routes/scope/roles.js | 4 +-
.../kmip/addon/routes/scope/roles/create.js | 1 -
ui/lib/kmip/addon/routes/scopes/index.js | 8 +-
.../components/edit-form-kmip-role.hbs | 107 +
.../addon/templates/credentials/index.hbs | 9 +-
ui/lib/kmip/addon/templates/role/edit.hbs | 5 +-
ui/lib/kmip/addon/templates/scope/roles.hbs | 9 +-
.../addon/templates/scope/roles/create.hbs | 6 +-
ui/lib/kmip/addon/templates/scopes/index.hbs | 5 +-
ui/lib/kmip/index.js | 4 -
.../addon/components/page/configure.js | 2 +-
.../addon/components/page/credentials.js | 2 +-
.../addon/components/page/overview.js | 2 +-
.../components/page/role/create-and-edit.js | 2 +-
.../addon/components/page/role/details.js | 2 +-
.../addon/components/page/roles.hbs | 7 +-
.../kubernetes/addon/components/page/roles.js | 2 +-
ui/lib/kubernetes/addon/engine.js | 2 +-
ui/lib/kubernetes/addon/routes/index.js | 2 +-
.../addon/routes/roles/role/index.js | 2 +-
ui/lib/kv/addon/components/kv-data-fields.hbs | 27 +-
ui/lib/kv/addon/components/kv-data-fields.js | 9 -
ui/lib/kv/addon/components/kv-list-filter.js | 2 +-
.../addon/components/kv-patch/editor/form.js | 7 +-
.../components/kv-patch/subkeys-reveal.hbs | 2 +-
ui/lib/kv/addon/components/kv-paths-card.hbs | 4 +-
ui/lib/kv/addon/components/page/list.hbs | 24 +-
ui/lib/kv/addon/components/page/list.js | 6 +-
.../addon/components/page/secret/details.js | 2 +-
.../kv/addon/components/page/secret/edit.js | 2 +-
.../page/secret/metadata/details.js | 5 +-
.../page/secret/metadata/version-history.hbs | 6 +-
.../kv/addon/components/page/secret/patch.js | 2 +-
.../addon/components/page/secrets/create.js | 6 +-
ui/lib/kv/addon/engine.js | 3 +-
ui/lib/kv/addon/routes/index.js | 2 +-
ui/lib/kv/addon/routes/list-directory.js | 6 +-
ui/lib/kv/addon/routes/secret/index.js | 2 +-
ui/lib/kv/addon/routes/secret/patch.js | 2 +-
.../ldap/addon/components/page/configure.ts | 2 +-
.../ldap/addon/components/page/libraries.hbs | 11 +-
.../page/library/create-and-edit.ts | 2 +-
.../addon/components/page/library/details.ts | 2 +-
.../page/library/details/accounts.hbs | 2 +-
.../page/library/details/accounts.ts | 4 +-
ui/lib/ldap/addon/components/page/overview.ts | 2 +-
.../components/page/role/create-and-edit.ts | 8 +-
.../addon/components/page/role/details.ts | 8 +-
ui/lib/ldap/addon/components/page/roles.hbs | 27 +-
ui/lib/ldap/addon/components/page/roles.ts | 9 +-
ui/lib/ldap/addon/engine.js | 2 +-
.../routes/libraries/library/check-out.ts | 2 +-
.../routes/libraries/library/details/index.ts | 2 +-
.../addon/routes/libraries/library/index.ts | 2 +-
ui/lib/ldap/addon/routes/roles.ts | 6 +-
ui/lib/ldap/addon/routes/roles/role/index.ts | 2 +-
ui/lib/open-api-explorer/addon/engine.js | 2 +-
.../page/pki-configuration-details.ts | 2 +-
.../components/page/pki-configuration-edit.ts | 2 +-
.../components/page/pki-configure-create.ts | 6 +-
.../addon/components/page/pki-issuer-edit.ts | 2 +-
.../addon/components/page/pki-issuer-list.hbs | 5 +-
.../components/page/pki-issuer-rotate-root.ts | 6 +-
.../addon/components/page/pki-key-details.ts | 2 +-
.../addon/components/page/pki-key-list.hbs | 6 +-
.../pki/addon/components/page/pki-overview.ts | 2 +-
.../addon/components/page/pki-role-details.ts | 2 +-
.../addon/components/page/pki-tidy-status.ts | 2 +-
.../pki/addon/components/pki-generate-root.ts | 2 +-
ui/lib/pki/addon/components/pki-key-form.hbs | 125 +-
ui/lib/pki/addon/components/pki-key-form.ts | 10 +-
.../pki/addon/components/pki-role-generate.ts | 8 +-
ui/lib/pki/addon/components/pki-tidy-form.ts | 2 +-
ui/lib/pki/addon/engine.js | 3 +-
ui/lib/pki/addon/routes/certificates/index.js | 5 +-
ui/lib/pki/addon/routes/index.js | 2 +-
ui/lib/pki/addon/routes/issuers/index.js | 4 +-
ui/lib/pki/addon/routes/keys/index.js | 5 +-
ui/lib/pki/addon/routes/roles/create.js | 6 -
ui/lib/pki/addon/routes/roles/index.js | 5 +-
.../addon/templates/certificates/index.hbs | 5 +-
ui/lib/pki/addon/templates/roles/index.hbs | 6 +-
.../components/replication-overview-mode.hbs | 49 -
.../components/replication-overview-mode.js | 39 -
.../addon/components/replication-summary.js | 8 +-
.../addon/controllers/application.js | 2 +-
.../mode/secondaries/config-edit.js | 2 +-
.../addon/controllers/replication-mode.js | 2 +-
ui/lib/replication/addon/engine.js | 2 +-
.../replication/addon/routes/application.js | 2 +-
ui/lib/replication/addon/routes/mode.js | 2 +-
ui/lib/replication/addon/routes/mode/index.js | 2 +-
.../replication/addon/routes/mode/manage.js | 2 +-
.../addon/routes/mode/secondaries.js | 2 +-
.../routes/mode/secondaries/config-create.js | 2 +-
.../addon/routes/replication-base.js | 2 +-
.../components/replication-summary.hbs | 113 +-
ui/lib/replication/addon/templates/index.hbs | 31 +-
.../templates/mode/secondaries/index.hbs | 6 +-
.../components/secrets/destination-header.ts | 8 +-
.../components/secrets/page/destinations.hbs | 9 +-
.../components/secrets/page/destinations.ts | 8 +-
.../page/destinations/create-and-edit.ts | 8 +-
.../page/destinations/destination/secrets.hbs | 13 +-
.../page/destinations/destination/secrets.ts | 8 +-
.../page/destinations/destination/sync.ts | 6 +-
.../components/secrets/page/overview.hbs | 6 +-
.../secrets/sync-activation-modal.ts | 2 +-
ui/lib/sync/addon/engine.js | 2 +-
ui/lib/sync/addon/routes/index.ts | 2 +-
ui/lib/sync/addon/routes/secrets.ts | 2 +-
.../secrets/destinations/destination.ts | 2 +-
.../secrets/destinations/destination/index.ts | 2 +-
.../destinations/destination/secrets.ts | 6 +-
.../routes/secrets/destinations/index.ts | 8 +-
ui/lib/sync/addon/routes/secrets/overview.ts | 2 +-
ui/mirage/handlers/ldap.js | 71 +-
ui/mirage/models/ldap-account-status.js | 13 -
ui/mirage/scenarios/ldap.js | 13 +-
ui/package.json | 15 +-
.../access/identity/_shared-tests.js | 8 +-
ui/tests/acceptance/access/methods-test.js | 72 +-
ui/tests/acceptance/auth-list-test.js | 16 +-
ui/tests/acceptance/auth/test-helper.js | 15 +-
ui/tests/acceptance/enterprise-kmip-test.js | 65 +-
ui/tests/acceptance/enterprise-kmse-test.js | 5 +-
.../acceptance/enterprise-transform-test.js | 39 +-
ui/tests/acceptance/mfa-method-test.js | 8 +-
.../acceptance/oidc-config/clients-test.js | 4 +-
.../pki/pki-engine-workflow-test.js | 8 +-
ui/tests/acceptance/raft-storage-test.js | 5 +-
.../secrets/backend/alicloud/secret-test.js | 6 +-
.../backend/aws/aws-configuration-test.js | 5 +-
.../secrets/backend/engines-test.js | 77 +-
.../secrets/backend/gcpkms/secrets-test.js | 6 +-
.../backend/kv/kv-v2-workflow-create-test.js | 18 +-
.../kv/kv-v2-workflow-edge-cases-test.js | 30 +-
.../kv/kv-v2-workflow-navigation-test.js | 16 +-
.../secrets/backend/kv/secret-test.js | 31 +-
.../secrets/backend/ldap/libraries-test.js | 39 +-
.../secrets/backend/ldap/overview-test.js | 67 +-
.../secrets/backend/ldap/roles-test.js | 30 +-
.../secrets/backend/ssh/configuration-test.js | 7 +-
.../secrets/backend/ssh/roles-test.js | 25 +-
ui/tests/acceptance/settings-test.js | 35 +-
.../acceptance/settings/auth/enable-test.js | 36 +-
.../settings/mount-secret-backend-test.js | 254 +-
ui/tests/helpers/auth/auth-form-selectors.ts | 1 -
ui/tests/helpers/auth/auth-helpers.ts | 11 +-
.../components/mount-backend-form-helpers.js | 19 -
.../mount-backend-form-selectors.ts | 10 -
ui/tests/helpers/general-selectors.ts | 2 -
ui/tests/helpers/kv/kv-selectors.js | 4 +-
.../helpers/secret-engine/policy-generator.ts | 2 +-
.../secret-engine/secret-engine-helpers.js | 46 -
.../secret-engine/secret-engine-selectors.ts | 8 +-
.../integration/components/auth/page-test.js | 2 +-
.../components/json-editor-test.js | 31 -
.../components/kv/kv-data-fields-test.js | 32 +-
.../kv/kv-patch/editor/form-test.js | 8 +-
.../components/kv/kv-patch/json-form-test.js | 6 +-
.../components/kv/page/kv-page-list-test.js | 1 -
.../kv/page/kv-page-secret-details-test.js | 16 +-
.../page/library/details/accounts-test.js | 8 +-
.../components/ldap/page/roles-test.js | 4 +-
.../components/mount-backend-form-test.js | 147 +-
.../mount-backend/type-form-test.js | 31 +-
.../components/okta-number-challenge-test.js | 3 +-
.../components/page/mode-index-test.js | 2 +-
.../replication-overview-mode-test.js | 124 -
.../sync/secrets/destination-header-test.js | 2 +-
.../sync/secrets/page/destinations-test.js | 2 +-
.../page/destinations/create-and-edit-test.js | 2 +-
.../destinations/destination/sync-test.js | 2 +-
.../components/transit-edit-test.js | 6 -
.../components/transit-key-actions-test.js | 44 -
.../integration/helpers/transition-to-test.js | 77 -
ui/tests/pages/access/methods.js | 24 +
.../pages/components/mount-backend-form.js | 33 +
ui/tests/pages/secrets/backend/kmip/roles.js | 2 +-
.../pages/secrets/backend/ssh/edit-role.js | 9 +
.../pages/secrets/backend/ssh/generate-otp.js | 8 +-
ui/tests/pages/settings/auth/enable.js | 5 +-
.../pages/settings/mount-secret-backend.js | 5 +-
ui/tests/unit/adapters/auth-method-test.js | 4 +-
ui/tests/unit/adapters/kmip/role-test.js | 10 +-
ui/tests/unit/adapters/kv/metadata-test.js | 2 +-
.../unit/adapters/sync/associations-test.js | 3 +-
.../model-expanded-attributes-test.js | 108 +-
.../unit/decorators/model-form-fields-test.js | 140 +-
.../unit/decorators/model-validations-test.js | 2 +-
ui/tests/unit/models/generated-item-test.js | 18 -
ui/tests/unit/services/pagination-test.js | 288 -
ui/tests/unit/services/store-test.js | 257 +
ui/tests/unit/utils/kmip-role-fields-test.js | 52 -
ui/tests/unit/utils/openapi-helpers-test.js | 247 +-
ui/tests/unit/utils/openapi-to-attrs-test.js | 219 +-
.../{model-helpers => }/validators-test.js | 2 +-
ui/types/vault/services/pagination.d.ts | 16 -
ui/types/vault/services/store.d.ts | 11 +-
ui/yarn.lock | 4773 +++++++++--------
vault/activity/activity_log.pb.go | 181 +-
vault/activity/activity_log.proto | 4 -
vault/activity_log.go | 803 +--
vault/activity_log_stubs_oss.go | 15 -
vault/activity_log_test.go | 1100 +---
vault/activity_log_testing_util.go | 149 +-
vault/activity_log_util_common.go | 42 +-
vault/activity_log_util_common_test.go | 70 +-
vault/bench/smoke_bench_test.go | 136 -
vault/core.go | 13 -
vault/core_test.go | 69 +-
.../acme_regeneration_test.go | 6 +-
.../activity_testonly_oss_test.go | 2 +-
.../activity_testonly_test.go | 26 +-
vault/external_tests/audit/audit_test.go | 300 --
vault/external_tests/raft/raft_test.go | 30 -
vault/external_tests/raftha/raft_ha_test.go | 104 -
vault/ha.go | 13 -
.../proto/link_control/link_control.pb.go | 48 +-
.../link_control/link_control_grpc.pb.go | 25 +-
vault/hcp_link/proto/meta/meta.pb.go | 378 +-
vault/hcp_link/proto/meta/meta_grpc.pb.go | 25 +-
vault/hcp_link/proto/node_status/status.pb.go | 48 +-
vault/identity_store_test.go | 232 -
vault/identity_store_test_stubs_oss.go | 21 -
vault/identity_store_util.go | 100 +-
vault/logical_system.go | 88 +-
.../logical_system_activity_write_testonly.go | 158 +-
...cal_system_activity_write_testonly_test.go | 29 +-
vault/logical_system_test.go | 45 -
vault/raft.go | 7 +-
vault/request_forwarding.go | 114 -
vault/request_forwarding_service.pb.go | 136 +-
vault/request_forwarding_service_grpc.pb.go | 69 +-
vault/request_forwarding_test.go | 131 -
vault/seal/multi_wrap_value.pb.go | 26 +-
vault/testing.go | 96 +-
vault/tokens/token.pb.go | 48 +-
version/VERSION | 2 +-
website/content/api-docs/auth/cert.mdx | 2 +-
website/content/api-docs/secret/pki/index.mdx | 12 +-
website/content/api-docs/secret/transit.mdx | 70 +-
.../content/api-docs/system/seal-status.mdx | 42 +-
.../api-docs/system/storage/raftautopilot.mdx | 112 +-
.../docs/agent-and-proxy/autoauth/index.mdx | 4 -
.../agent-and-proxy/autoauth/methods/cert.mdx | 13 +-
website/content/docs/audit/index.mdx | 4 -
website/content/docs/commands/auth/move.mdx | 14 -
.../content/docs/commands/operator/raft.mdx | 165 +-
.../content/docs/commands/secrets/move.mdx | 15 -
website/content/docs/commands/server.mdx | 5 -
website/content/docs/commands/status.mdx | 3 +-
website/content/docs/concepts/events.mdx | 8 -
.../concepts/integrated-storage/autopilot.mdx | 43 +-
.../concepts/integrated-storage/index.mdx | 18 +-
.../docs/enterprise/entropy-augmentation.mdx | 35 +-
website/content/docs/enterprise/lts.mdx | 18 +-
.../docs/enterprise/redundancy-zones.mdx | 4 -
website/content/docs/enterprise/sealwrap.mdx | 19 -
.../docs/internals/integrated-storage.mdx | 22 -
.../docs/platform/aws/lambda-extension.mdx | 3 +-
.../content/docs/platform/k8s/helm/index.mdx | 13 +-
.../content/docs/platform/k8s/helm/run.mdx | 20 +-
website/content/docs/secrets/aws.mdx | 3 +-
.../docs/secrets/databases/mongodbatlas.mdx | 4 +-
.../secrets/kv/kv-v2/cookbook/patch-data.mdx | 7 +-
.../content/docs/secrets/transform/index.mdx | 3 +-
.../docs/secrets/transform/tokenization.mdx | 4 +-
.../content/docs/secrets/transit/index.mdx | 4 +-
.../docs/upgrading/upgrade-to-1.16.x.mdx | 2 +-
.../docs/upgrading/upgrade-to-1.17.x.mdx | 6 +-
.../docs/upgrading/upgrade-to-1.19.x.mdx | 44 -
website/content/partials/autopilot/config.mdx | 53 -
.../content/partials/autopilot/node-types.mdx | 6 -
.../partials/autopilot/redundancy-zones.mdx | 25 -
website/content/partials/cli/README.md | 98 +-
.../partials/cli/agent/args/file_path.mdx | 4 -
.../partials/cli/agent/flags/config.mdx | 5 -
.../content/partials/cli/agent/flags/exec.mdx | 4 -
.../cli/agent/flags/exit-after-auth.mdx | 4 -
.../content/partials/cli/agent/flags/path.mdx | 4 -
.../content/partials/cli/agent/flags/type.mdx | 4 -
.../partials/cli/audit/args/device_path.mdx | 6 +-
.../partials/cli/audit/args/device_type.mdx | 8 +-
.../cli/audit/args/file/file_path.mdx | 6 +-
.../partials/cli/audit/args/file/mode.mdx | 6 +-
.../cli/audit/args/socket/address.mdx | 6 +-
.../cli/audit/args/socket/socket_type.mdx | 6 +-
.../cli/audit/args/socket/write_timeout.mdx | 6 +-
.../cli/audit/args/syslog/facility.mdx | 6 +-
.../partials/cli/audit/args/syslog/tag.mdx | 6 +-
.../partials/cli/audit/flags/description.mdx | 4 -
.../partials/cli/audit/flags/detailed.mdx | 4 -
.../partials/cli/audit/flags/local.mdx | 4 -
.../content/partials/cli/audit/flags/path.mdx | 4 -
.../audit/options/elide_list_responses.mdx | 6 +-
.../partials/cli/audit/options/exclude.mdx | 14 +-
.../partials/cli/audit/options/fallback.mdx | 6 +-
.../partials/cli/audit/options/filter.mdx | 6 +-
.../partials/cli/audit/options/format.mdx | 6 +-
.../cli/audit/options/hmac_accessor.mdx | 6 +-
.../partials/cli/audit/options/log_raw.mdx | 6 +-
.../partials/cli/audit/options/prefix.mdx | 6 +-
.../partials/cli/shared/flags/log-file.mdx | 4 -
.../cli/shared/flags/log-rotate-bytes.mdx | 4 -
.../cli/shared/flags/log-rotate-duration.mdx | 4 -
.../cli/shared/flags/log-rotate-max-files.mdx | 6 +-
.../partials/global-settings/both/address.mdx | 4 -
.../global-settings/both/agent-address.mdx | 4 -
.../partials/global-settings/both/ca-cert.mdx | 4 -
.../partials/global-settings/both/ca-path.mdx | 4 -
.../global-settings/both/client-cert.mdx | 4 -
.../global-settings/both/client-key.mdx | 4 -
.../both/disable-redirects.mdx | 4 -
.../partials/global-settings/both/format.mdx | 10 +-
.../global-settings/both/log-format.mdx | 4 -
.../global-settings/both/log-level.mdx | 4 -
.../partials/global-settings/both/mfa.mdx | 4 -
.../global-settings/both/namespace.mdx | 4 -
.../global-settings/both/tls-server-name.mdx | 4 -
.../global-settings/both/tls-skip-verify.mdx | 4 -
.../global-settings/both/wrap-ttl.mdx | 4 -
.../global-settings/env/cli_no_color.mdx | 4 -
.../global-settings/env/client_timeout.mdx | 4 -
.../global-settings/env/cluster_addr.mdx | 4 -
.../global-settings/env/http_proxy.mdx | 4 -
.../partials/global-settings/env/license.mdx | 4 -
.../global-settings/env/license_path.mdx | 4 -
.../global-settings/env/max_retries.mdx | 4 -
.../global-settings/env/proxy_addr.mdx | 4 -
.../global-settings/env/rate_limit.mdx | 4 -
.../global-settings/env/redirect_addr.mdx | 4 -
.../global-settings/env/skip_verify.mdx | 4 -
.../global-settings/env/srv_lookup.mdx | 4 -
.../partials/global-settings/env/token.mdx | 4 -
.../partials/global-settings/flags/header.mdx | 4 -
.../global-settings/flags/non-interactive.mdx | 4 -
.../flags/output-curl-string.mdx | 4 -
.../global-settings/flags/output-policy.mdx | 4 -
.../global-settings/flags/policy-override.mdx | 4 -
.../global-settings/flags/unlock-key.mdx | 4 -
.../gui-page-instructions/select-kv-data.mdx | 14 -
.../known-issues/aws-auth-external-id.mdx | 19 -
...ync-activation-flags-cache-not-updated.mdx | 23 -
.../kubernetes-supported-versions.mdx | 2 +-
website/data/docs-nav-data.json | 4 -
website/public/img/gui/kv/patch-data.png | Bin 53104 -> 35270 bytes
website/public/img/gui/kv/random-string.png | Bin 26691 -> 23378 bytes
website/public/img/gui/kv/read-data.png | Bin 28571 -> 26248 bytes
website/public/img/gui/kv/undelete-data.png | Bin 38935 -> 32274 bytes
website/templates/cli-page.mdx | 104 -
627 files changed, 10909 insertions(+), 18220 deletions(-)
delete mode 100644 .github/actions/run-apupgrade-tests/action.yml
delete mode 100644 .github/actions/set-up-sqlc/action.yml
delete mode 100644 .github/scripts/install-cob.sh
delete mode 100644 .github/workflows/hack-week-benchmark.yml
delete mode 100644 api/auth/approle/LICENSE
delete mode 100644 api/auth/aws/LICENSE
delete mode 100644 api/auth/azure/LICENSE
delete mode 100644 api/auth/gcp/LICENSE
delete mode 100644 api/auth/kubernetes/LICENSE
delete mode 100644 api/auth/ldap/LICENSE
delete mode 100644 api/auth/userpass/LICENSE
delete mode 100644 builtin/logical/pki/issuing/issuers_oss.go
delete mode 100644 builtin/logical/pki/path_fetch_issuers_stubs_oss.go
delete mode 100644 builtin/logical/transit/api_utils.go
delete mode 100644 builtin/logical/transit/api_utils_test.go
delete mode 100644 builtin/logical/transit/path_datakey_test.go
delete mode 100644 builtin/logical/transit/path_sign_verify_ce.go
delete mode 100644 changelog/18615.txt
delete mode 100644 changelog/22726.txt
delete mode 100644 changelog/25486.txt
delete mode 100644 changelog/27033.txt
delete mode 100644 changelog/27920.txt
delete mode 100644 changelog/27927.txt
delete mode 100644 changelog/28126.txt
delete mode 100644 changelog/28330.txt
delete mode 100644 changelog/28456.txt
delete mode 100644 changelog/28596.txt
delete mode 100644 changelog/28654.txt
delete mode 100644 changelog/28678.txt
delete mode 100644 changelog/28798.txt
delete mode 100644 changelog/28808.txt
delete mode 100644 changelog/28822.txt
delete mode 100644 changelog/28867.txt
delete mode 100644 changelog/28875.txt
delete mode 100644 changelog/28938.txt
delete mode 100644 command/agentproxyshared/auth/cert/test-fixtures/keys/cert1.pem
delete mode 100644 command/agentproxyshared/auth/cert/test-fixtures/keys/key1.pem
delete mode 100644 ui/app/models/generated-item.js
rename ui/app/services/{pagination.js => store.js} (60%)
create mode 100644 ui/app/styles/components/replication-mode-summary.scss
rename ui/app/utils/{model-helpers => }/database-helpers.js (100%)
delete mode 100644 ui/app/utils/model-helpers/kmip-role-fields.js
rename ui/app/utils/{model-helpers => }/validators.js (100%)
create mode 100644 ui/lib/core/addon/components/replication-mode-summary.js
delete mode 100644 ui/lib/core/addon/helpers/transition-to.js
create mode 100644 ui/lib/core/addon/templates/components/replication-mode-summary.hbs
create mode 100644 ui/lib/core/app/components/replication-mode-summary.js
create mode 100644 ui/lib/kmip/addon/components/edit-form-kmip-role.js
delete mode 100644 ui/lib/kmip/addon/components/kmip/role-form.hbs
delete mode 100644 ui/lib/kmip/addon/components/kmip/role-form.js
rename ui/lib/{core/app/helpers/transition-to.js => kmip/addon/resolver.js} (54%)
create mode 100644 ui/lib/kmip/addon/templates/components/edit-form-kmip-role.hbs
delete mode 100644 ui/lib/replication/addon/components/replication-overview-mode.hbs
delete mode 100644 ui/lib/replication/addon/components/replication-overview-mode.js
delete mode 100644 ui/mirage/models/ldap-account-status.js
delete mode 100644 ui/tests/helpers/components/mount-backend-form-helpers.js
delete mode 100644 ui/tests/helpers/components/mount-backend-form-selectors.ts
delete mode 100644 ui/tests/integration/components/replication-overview-mode-test.js
delete mode 100644 ui/tests/integration/helpers/transition-to-test.js
create mode 100644 ui/tests/pages/access/methods.js
create mode 100644 ui/tests/pages/components/mount-backend-form.js
delete mode 100644 ui/tests/unit/models/generated-item-test.js
delete mode 100644 ui/tests/unit/services/pagination-test.js
create mode 100644 ui/tests/unit/services/store-test.js
delete mode 100644 ui/tests/unit/utils/kmip-role-fields-test.js
rename ui/tests/unit/utils/{model-helpers => }/validators-test.js (98%)
delete mode 100644 ui/types/vault/services/pagination.d.ts
delete mode 100644 vault/activity_log_stubs_oss.go
delete mode 100644 vault/bench/smoke_bench_test.go
delete mode 100644 vault/external_tests/audit/audit_test.go
delete mode 100644 vault/identity_store_test_stubs_oss.go
delete mode 100644 vault/request_forwarding_test.go
delete mode 100644 website/content/docs/upgrading/upgrade-to-1.19.x.mdx
delete mode 100644 website/content/partials/autopilot/config.mdx
delete mode 100644 website/content/partials/autopilot/node-types.mdx
delete mode 100644 website/content/partials/autopilot/redundancy-zones.mdx
delete mode 100644 website/content/partials/gui-page-instructions/select-kv-data.mdx
delete mode 100644 website/content/partials/known-issues/aws-auth-external-id.mdx
delete mode 100644 website/content/partials/known-issues/sync-activation-flags-cache-not-updated.mdx
delete mode 100644 website/templates/cli-page.mdx
diff --git a/.github/actions/changed-files/action.yml b/.github/actions/changed-files/action.yml
index 92e6638dfe28..d36c450877f6 100644
--- a/.github/actions/changed-files/action.yml
+++ b/.github/actions/changed-files/action.yml
@@ -19,9 +19,6 @@ outputs:
ui-changed:
description: Whether or not the web UI was modified.
value: ${{ steps.changed-files.outputs.ui-changed }}
- autopilot-changed:
- description: Whether or not files pertaining to Autopilot were modified.
- value: ${{ steps.changed-files.outputs.autopilot-changed }}
files:
description: All of the file names that changed.
value: ${{ steps.changed-files.outputs.files }}
diff --git a/.github/actions/install-external-tools/action.yml b/.github/actions/install-external-tools/action.yml
index 5dfeaf61fce2..1f649ca74208 100644
--- a/.github/actions/install-external-tools/action.yml
+++ b/.github/actions/install-external-tools/action.yml
@@ -19,13 +19,12 @@ runs:
- uses: ./.github/actions/set-up-gotestsum
- uses: ./.github/actions/set-up-misspell
- uses: ./.github/actions/set-up-shfmt
- - uses: ./.github/actions/set-up-sqlc
- uses: ./.github/actions/set-up-staticcheck
# We assume that the Go toolchain will be managed by the caller workflow so we don't set one
# up here.
- - run: ./.github/scripts/retry-command.sh go install google.golang.org/protobuf/cmd/protoc-gen-go@latest
+ - run: ./.github/scripts/retry-command.sh go install google.golang.org/protobuf/cmd/protoc-gen-go@v1.34.2
shell: bash
- - run: ./.github/scripts/retry-command.sh go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest
+ - run: ./.github/scripts/retry-command.sh go install google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.4.0
shell: bash
- run: ./.github/scripts/retry-command.sh go install github.com/favadi/protoc-go-inject-tag@latest
shell: bash
diff --git a/.github/actions/run-apupgrade-tests/action.yml b/.github/actions/run-apupgrade-tests/action.yml
deleted file mode 100644
index 960682d6709d..000000000000
--- a/.github/actions/run-apupgrade-tests/action.yml
+++ /dev/null
@@ -1,123 +0,0 @@
-# Copyright (c) HashiCorp, Inc.
-# SPDX-License-Identifier: BUSL-1.1
-
----
-name: Run Autopilot upgrade tests
-description: |
- This action runs the Autopilot upgrade tests on Vault Enterprise.
- It downloads the needed Vault Enterprise source version binaries from the releases page, checks out the specified ref
- from the Vault Enterprise repository, builds the target version binary of Vault for Autopilot upgrade testing,
- and runs the Autopilot upgrade tool with the specified source versions.
-inputs:
- checkout-ref:
- required: true
- type: string
- description: |
- The branch, tag, or SHA to checkout from the Vault Enterprise repository, e.g. 'refs/heads/main'.
- The target version binary of Vault for Autopilot upgrade testing will be built from this checkout.
- github-token:
- required: true
- type: string
- description: |
- The GitHub token to use for checking out the needed repositories.
- source-versions:
- required: true
- type: string
- description: |
- The source versions of Vault for Autopilot upgrade testing as a comma-separated string,
- e.g. '1.16.9+ent,1.17.7+ent,1.18.0+ent'.
-
-runs:
- using: composite
- steps:
- - name: Authenticate to Vault
- id: vault-auth
- shell: bash
- run: vault-auth
- - name: Fetch Secrets
- id: secrets
- uses: hashicorp/vault-action@d1720f055e0635fd932a1d2a48f87a666a57906c # v3.0.0
- with:
- url: ${{ steps.vault-auth.outputs.addr }}
- caCertificate: ${{ steps.vault-auth.outputs.ca_certificate }}
- token: ${{ steps.vault-auth.outputs.token }}
- secrets: |
- kv/data/github/${{ github.repository }}/github-token username-and-token | github-token;
- kv/data/github/${{ github.repository }}/license license_1 | VAULT_LICENSE_CI;
- kv/data/github/${{ github.repository }}/license license_2 | VAULT_LICENSE_2;
- - name: Setup Git configuration (private)
- id: setup-git-private
- if: github.repository == 'hashicorp/vault-enterprise'
- shell: bash
- run: |
- git config --global url."https://${{ steps.secrets.outputs.github-token }}@github.com".insteadOf https://github.com
- - name: Check out the .release/versions.hcl file from Vault Enterprise repository
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- with:
- ref: ${{ inputs.checkout-ref }}
- - uses: ./.github/actions/set-up-go
- with:
- github-token: ${{ inputs.github-token }}
- - name: Build external tools
- uses: ./.github/actions/install-external-tools
- - name: Checkout VCM repository
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- with:
- repository: hashicorp/vcm
- ref: 1fcab6591e3bdc81d2921ca77441bfcf913c6a57
- token: ${{ inputs.github-token }}
- path: vcm
- - name: Checkout Vault tools repository to get the Autopilot upgrade tool
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- with:
- repository: hashicorp/vault-tools
- token: ${{ inputs.github-token }}
- path: vault-tools
- - name: Install needed Vault versions
- env:
- GOPATH: /home/runner/go
- shell: bash
- run: |
- # Split the matched versions into an array
- IFS=',' read -r -a versions <<< "${{ inputs.source-versions }}"
-
- for version in "${versions[@]}"; do
- echo "Installing Vault version $version"
- "${GITHUB_WORKSPACE}/vault-tools/vvm/vvm" install-ent "${version}"
- done
- - name: Build dev binary for binary tests
- env:
- GOPATH: /home/runner/go
- GOPRIVATE: github.com/hashicorp/*
- shell: bash
- run: |
- cd "${GITHUB_WORKSPACE}" || exit 1
- time make prep dev
-
- # Save the binary we just build under the current Vault version number to VVM
- # for apupgrade to use as a target version
- current_version=$(cat version/VERSION)
- "${GITHUB_WORKSPACE}/vault-tools/vvm/vvm" save "${current_version}"
- echo "VAULT_TARGET_VERSION=${current_version}" >> "${GITHUB_ENV}"
- - name: Build VCM
- shell: bash
- run: |
- cd "${GITHUB_WORKSPACE}/vcm" || exit 1
- make
- - name: Build Autopilot upgrade tool
- shell: bash
- run: |
- cd "${GITHUB_WORKSPACE}/vault-tools/apupgrade" || exit 1
- go build -o apupgrade .
- - name: Run Autopilot upgrade tool
- shell: bash
- run: |
- # Write the license to a file for VCM to use
- echo "${{ steps.secrets.outputs.VAULT_LICENSE_CI }}" > "${GITHUB_WORKSPACE}/license.hclic"
- export VAULT_LICENSE_PATH="${GITHUB_WORKSPACE}/license.hclic"
-
- # Unset VAULT_ADDR to avoid any conflicts with VCM and apupgrade
- unset VAULT_ADDR
-
- echo "Running Autopilot upgrade tool with source versions: ${{ inputs.source-versions }} and target version: ${{ env.VAULT_TARGET_VERSION }}"
- "${GITHUB_WORKSPACE}/vault-tools/apupgrade/apupgrade" -versions "${{ inputs.source-versions }},${VAULT_TARGET_VERSION}" -use-vvm
diff --git a/.github/actions/set-up-sqlc/action.yml b/.github/actions/set-up-sqlc/action.yml
deleted file mode 100644
index 8986b3f5e4f1..000000000000
--- a/.github/actions/set-up-sqlc/action.yml
+++ /dev/null
@@ -1,62 +0,0 @@
-# Copyright (c) HashiCorp, Inc.
-# SPDX-License-Identifier: BUSL-1.1
-
----
-name: Set up sqlc from Github releases
-description: Set up sqlc from Github releases
-
-inputs:
- destination:
- description: "Where to install the sqlc binary (default: $HOME/bin/sqlc)"
- type: string
- default: "$HOME/bin/sqlc"
- version:
- description: "The version to install (default: latest)"
- type: string
- default: Latest
-
-outputs:
- destination:
- description: Where the installed sqlc binary is
- value: ${{ steps.install.outputs.destination }}
- destination-dir:
- description: The directory where the installed sqlc binary is
- value: ${{ steps.install.outputs.destination-dir }}
- version:
- description: The installed version of sqlc
- value: ${{ steps.install.outputs.version }}
-
-runs:
- using: composite
- steps:
- - id: install
- shell: bash
- env:
- GH_TOKEN: ${{ github.token }}
- run: |
- VERSION=$(./.github/scripts/retry-command.sh gh release list -R sqlc-dev/sqlc --exclude-drafts --exclude-pre-releases | grep ${{ inputs.version }} | cut -f1)
-
- mkdir -p $(dirname ${{ inputs.destination }})
- DESTINATION="$(readlink -f "${{ inputs.destination }}")"
- DESTINATION_DIR="$(dirname "$DESTINATION")"
- echo "$DESTINATION_DIR" >> "$GITHUB_PATH"
-
- {
- echo "destination=$DESTINATION"
- echo "destination-dir=$DESTINATION_DIR"
- echo "version=$VERSION"
- } | tee -a "$GITHUB_OUTPUT"
-
- ARCH="$(echo "$RUNNER_ARCH" | tr '[:upper:]' '[:lower:]')"
- OS="$(echo "$RUNNER_OS" | tr '[:upper:]' '[:lower:]')"
- if [ "$ARCH" = "x64" ]; then
- export ARCH="amd64"
- fi
- if [ "$OS" = "macos" ]; then
- export OS="darwin"
- fi
-
- ./.github/scripts/retry-command.sh gh release download "$VERSION" --clobber -p "sqlc*_${OS}_${ARCH}.zip" -O sqlc.zip -R sqlc-dev/sqlc
- unzip sqlc.zip
- chmod +x sqlc
- mv sqlc "$DESTINATION"
diff --git a/.github/pull_request_template.md b/.github/pull_request_template.md
index 6f4e36e3f985..4257adb67ed2 100644
--- a/.github/pull_request_template.md
+++ b/.github/pull_request_template.md
@@ -2,8 +2,9 @@
What does this PR do?
### TODO only if you're a HashiCorp employee
-- [ ] **Backport Labels:** If this fix needs to be backported, use the appropriate `backport/` label that matches the desired release branch. Note that in the CE repo, the latest release branch will look like `backport/x.x.x`, but older release branches will be `backport/ent/x.x.x+ent`.
- - [ ] **LTS**: If this fixes a critical security vulnerability or [severity 1](https://www.hashicorp.com/customer-success/enterprise-support) bug, it will also need to be backported to the current [LTS versions](https://developer.hashicorp.com/vault/docs/enterprise/lts#why-is-there-a-risk-to-updating-to-a-non-lts-vault-enterprise-version) of Vault. To ensure this, use **all** available enterprise labels.
+- [ ] **Backport Labels:** If this PR is in the ENT repo and needs to be backported, backport
+ to N, N-1, and N-2, using the `backport/ent/x.x.x+ent` labels. If this PR is in the CE repo, you should only backport to N, using the `backport/x.x.x` label, not the enterprise labels.
+ - [ ] If this fixes a critical security vulnerability or [severity 1](https://www.hashicorp.com/customer-success/enterprise-support) bug, it will also need to be backported to the current [LTS versions](https://developer.hashicorp.com/vault/docs/enterprise/lts#why-is-there-a-risk-to-updating-to-a-non-lts-vault-enterprise-version) of Vault. To ensure this, use **all** available enterprise labels.
- [ ] **ENT Breakage:** If this PR either 1) removes a public function OR 2) changes the signature
of a public function, even if that change is in a CE file, _double check_ that
applying the patch for this PR to the ENT repo and running tests doesn't
diff --git a/.github/scripts/changed-files.sh b/.github/scripts/changed-files.sh
index 1b77fbab104e..f44c6fc26b9d 100755
--- a/.github/scripts/changed-files.sh
+++ b/.github/scripts/changed-files.sh
@@ -28,7 +28,6 @@ fi
docs_changed=false
ui_changed=false
app_changed=false
-autopilot_changed=false
if ! files="$(git diff "${base_commit}...${head_commit}" --name-only)"; then
echo "failed to get changed files from git"
@@ -54,26 +53,14 @@ for file in $(awk -F "/" '{ print $1}' <<< "$files" | uniq); do
app_changed=true
done
-# if the app changed, check to see if anything referencing autopilot specifically was changed
-if [ "$app_changed" = true ]; then
- for file in $files; do
- if grep "raft-autopilot" "$file"; then
- autopilot_changed=true
- break
- fi
- done
-fi
-
echo "app-changed=${app_changed}"
echo "docs-changed=${docs_changed}"
echo "ui-changed=${ui_changed}"
-echo "autopilot_changed=${autopilot_changed}"
echo "files='${files}'"
[ -n "$GITHUB_OUTPUT" ] && {
echo "app-changed=${app_changed}"
echo "docs-changed=${docs_changed}"
echo "ui-changed=${ui_changed}"
- echo "autopilot-changed=${autopilot_changed}"
# Use a random delimiter for multiline strings.
# https://docs.github.com/en/actions/using-workflows/workflow-commands-for-github-actions#multiline-strings
delimiter="$(openssl rand -hex 8)"
diff --git a/.github/scripts/install-cob.sh b/.github/scripts/install-cob.sh
deleted file mode 100644
index 224c2c8b13f7..000000000000
--- a/.github/scripts/install-cob.sh
+++ /dev/null
@@ -1,409 +0,0 @@
-#!/bin/sh
-set -e
-# Code generated by godownloader on 2020-01-12T16:31:14Z. DO NOT EDIT.
-#
-
-usage() {
- this=$1
- cat </dev/null
-}
-echoerr() {
- echo "$@" 1>&2
-}
-log_prefix() {
- echo "$0"
-}
-_logp=6
-log_set_priority() {
- _logp="$1"
-}
-log_priority() {
- if test -z "$1"; then
- echo "$_logp"
- return
- fi
- [ "$1" -le "$_logp" ]
-}
-log_tag() {
- case $1 in
- 0) echo "emerg" ;;
- 1) echo "alert" ;;
- 2) echo "crit" ;;
- 3) echo "err" ;;
- 4) echo "warning" ;;
- 5) echo "notice" ;;
- 6) echo "info" ;;
- 7) echo "debug" ;;
- *) echo "$1" ;;
- esac
-}
-log_debug() {
- log_priority 7 || return 0
- echoerr "$(log_prefix)" "$(log_tag 7)" "$@"
-}
-log_info() {
- log_priority 6 || return 0
- echoerr "$(log_prefix)" "$(log_tag 6)" "$@"
-}
-log_err() {
- log_priority 3 || return 0
- echoerr "$(log_prefix)" "$(log_tag 3)" "$@"
-}
-log_crit() {
- log_priority 2 || return 0
- echoerr "$(log_prefix)" "$(log_tag 2)" "$@"
-}
-uname_os() {
- os=$(uname -s | tr '[:upper:]' '[:lower:]')
- case "$os" in
- cygwin_nt*) os="windows" ;;
- mingw*) os="windows" ;;
- msys_nt*) os="windows" ;;
- esac
- echo "$os"
-}
-uname_arch() {
- arch=$(uname -m)
- case $arch in
- x86_64) arch="amd64" ;;
- x86) arch="386" ;;
- i686) arch="386" ;;
- i386) arch="386" ;;
- aarch64) arch="arm64" ;;
- armv5*) arch="armv5" ;;
- armv6*) arch="armv6" ;;
- armv7*) arch="armv7" ;;
- esac
- echo ${arch}
-}
-uname_os_check() {
- os=$(uname_os)
- case "$os" in
- darwin) return 0 ;;
- dragonfly) return 0 ;;
- freebsd) return 0 ;;
- linux) return 0 ;;
- android) return 0 ;;
- nacl) return 0 ;;
- netbsd) return 0 ;;
- openbsd) return 0 ;;
- plan9) return 0 ;;
- solaris) return 0 ;;
- windows) return 0 ;;
- esac
- log_crit "uname_os_check '$(uname -s)' got converted to '$os' which is not a GOOS value. Please file bug at https://github.com/client9/shlib"
- return 1
-}
-uname_arch_check() {
- arch=$(uname_arch)
- case "$arch" in
- 386) return 0 ;;
- amd64) return 0 ;;
- arm64) return 0 ;;
- armv5) return 0 ;;
- armv6) return 0 ;;
- armv7) return 0 ;;
- ppc64) return 0 ;;
- ppc64le) return 0 ;;
- mips) return 0 ;;
- mipsle) return 0 ;;
- mips64) return 0 ;;
- mips64le) return 0 ;;
- s390x) return 0 ;;
- amd64p32) return 0 ;;
- esac
- log_crit "uname_arch_check '$(uname -m)' got converted to '$arch' which is not a GOARCH value. Please file bug report at https://github.com/client9/shlib"
- return 1
-}
-untar() {
- tarball=$1
- case "${tarball}" in
- *.tar.gz | *.tgz) tar --no-same-owner -xzf "${tarball}" ;;
- *.tar) tar --no-same-owner -xf "${tarball}" ;;
- *.zip) unzip "${tarball}" ;;
- *)
- log_err "untar unknown archive format for ${tarball}"
- return 1
- ;;
- esac
-}
-http_download_curl() {
- local_file=$1
- source_url=$2
- header=$3
- if [ -z "$header" ]; then
- code=$(curl -w '%{http_code}' -sL -o "$local_file" "$source_url")
- else
- code=$(curl -w '%{http_code}' -sL -H "$header" -o "$local_file" "$source_url")
- fi
- if [ "$code" != "200" ]; then
- log_debug "http_download_curl received HTTP status $code"
- return 1
- fi
- return 0
-}
-http_download_wget() {
- local_file=$1
- source_url=$2
- header=$3
- if [ -z "$header" ]; then
- wget -q -O "$local_file" "$source_url"
- else
- wget -q --header "$header" -O "$local_file" "$source_url"
- fi
-}
-http_download() {
- log_debug "http_download $2"
- if is_command curl; then
- http_download_curl "$@"
- return
- elif is_command wget; then
- http_download_wget "$@"
- return
- fi
- log_crit "http_download unable to find wget or curl"
- return 1
-}
-http_copy() {
- tmp=$(mktemp)
- http_download "${tmp}" "$1" "$2" || return 1
- body=$(cat "$tmp")
- rm -f "${tmp}"
- echo "$body"
-}
-github_release() {
- owner_repo=$1
- version=$2
- test -z "$version" && version="latest"
- giturl="https://github.com/${owner_repo}/releases/${version}"
- json=$(http_copy "$giturl" "Accept:application/json")
- test -z "$json" && return 1
- version=$(echo "$json" | tr -s '\n' ' ' | sed 's/.*"tag_name":"//' | sed 's/".*//')
- test -z "$version" && return 1
- echo "$version"
-}
-hash_sha256() {
- TARGET=${1:-/dev/stdin}
- if is_command gsha256sum; then
- hash=$(gsha256sum "$TARGET") || return 1
- echo "$hash" | cut -d ' ' -f 1
- elif is_command sha256sum; then
- hash=$(sha256sum "$TARGET") || return 1
- echo "$hash" | cut -d ' ' -f 1
- elif is_command shasum; then
- hash=$(shasum -a 256 "$TARGET" 2>/dev/null) || return 1
- echo "$hash" | cut -d ' ' -f 1
- elif is_command openssl; then
- hash=$(openssl -dst openssl dgst -sha256 "$TARGET") || return 1
- echo "$hash" | cut -d ' ' -f a
- else
- log_crit "hash_sha256 unable to find command to compute sha-256 hash"
- return 1
- fi
-}
-hash_sha256_verify() {
- TARGET=$1
- checksums=$2
- if [ -z "$checksums" ]; then
- log_err "hash_sha256_verify checksum file not specified in arg2"
- return 1
- fi
- BASENAME=${TARGET##*/}
- want=$(grep "${BASENAME}" "${checksums}" 2>/dev/null | tr '\t' ' ' | cut -d ' ' -f 1)
- if [ -z "$want" ]; then
- log_err "hash_sha256_verify unable to find checksum for '${TARGET}' in '${checksums}'"
- return 1
- fi
- got=$(hash_sha256 "$TARGET")
- if [ "$want" != "$got" ]; then
- log_err "hash_sha256_verify checksum for '$TARGET' did not verify ${want} vs $got"
- return 1
- fi
-}
-cat /dev/null <> "${GITHUB_ENV}"
- - name: Run Autopilot upgrade tests
- uses: ./.github/actions/run-apupgrade-tests
- env:
- GOPATH: /home/runner/go
- GOPRIVATE: github.com/hashicorp/*
- with:
- checkout-ref: ${{ needs.setup.outputs.checkout-ref }}
- github-token: ${{ secrets.ELEVATED_GITHUB_TOKEN }}
- source-versions: ${{ env.VAULT_SOURCE_VERSIONS }}
-
test-go:
# Run Go tests if the vault app changed
if: needs.setup.outputs.app-changed == 'true'
@@ -260,7 +186,7 @@ jobs:
- if: needs.setup.outputs.is-enterprise == 'true'
id: secrets
name: Fetch secrets
- uses: hashicorp/vault-action@d1720f055e0635fd932a1d2a48f87a666a57906c # v3.0.0
+ uses: hashicorp/vault-action@v3
with:
url: ${{ steps.vault-auth.outputs.addr }}
caCertificate: ${{ steps.vault-auth.outputs.ca_certificate }}
@@ -327,7 +253,6 @@ jobs:
tests-completed:
needs:
- setup
- - test-autopilot-upgrade
- test-go
- test-go-testonly
- test-go-race
@@ -379,7 +304,7 @@ jobs:
- if: needs.setup.outputs.is-enterprise == 'true'
id: secrets
name: Fetch Vault Secrets
- uses: hashicorp/vault-action@d1720f055e0635fd932a1d2a48f87a666a57906c # v3.0.0
+ uses: hashicorp/vault-action@v3
with:
url: ${{ steps.vault-auth.outputs.addr }}
caCertificate: ${{ steps.vault-auth.outputs.ca_certificate }}
diff --git a/.github/workflows/hack-week-benchmark.yml b/.github/workflows/hack-week-benchmark.yml
deleted file mode 100644
index c9aa913db5c0..000000000000
--- a/.github/workflows/hack-week-benchmark.yml
+++ /dev/null
@@ -1,26 +0,0 @@
-name: Bench
-
-on:
- pull_request:
- types: [opened, synchronize, reopened, ready_for_review]
-
-jobs:
- bench:
- name: Bench
- runs-on: ubuntu-latest
- steps:
- - name: Check out code into the Go module directory
- uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
- with:
- fetch-depth: 0
- fetch-tags: false
- - name: Set up go
- uses: ./.github/actions/set-up-go
- with:
- github-token: ${{ secrets.ELEVATED_GITHUB_TOKEN }}
- - name: Configure Git
- run: git config --global url."https://${{ secrets.ELEVATED_GITHUB_TOKEN }}:@github.com".insteadOf "https://github.com"
- - name: Install cob
- run: sh -s -- -b /usr/local/bin -d v0.0.8 < ./.github/scripts/install-cob.sh
- - name: Run Benchmark
- run: cob --base ${{ github.event.pull_request.base.sha }} -threshold 0.1 -bench-args "test -bench . -benchtime 3s -benchmem -v ./vault/bench ${{ github.repository == 'hashicorp/vault-enterprise' && '-tags testonly,enterprise' || '-tags testonly' }}"
\ No newline at end of file
diff --git a/.go-version b/.go-version
index ac1df3fce34b..229a27c6f204 100644
--- a/.go-version
+++ b/.go-version
@@ -1 +1 @@
-1.23.3
+1.22.8
diff --git a/.release/versions.hcl b/.release/versions.hcl
index 1e831592556e..f1db6233a6c6 100644
--- a/.release/versions.hcl
+++ b/.release/versions.hcl
@@ -5,14 +5,14 @@
schema = 1
active_versions {
- version "1.18.x" {
- ce_active = true
- }
version "1.17.x" {
- ce_active = false
+ ce_active = true
}
version "1.16.x" {
ce_active = false
lts = true
}
+ version "1.15.x" {
+ ce_active = false
+ }
}
diff --git a/CHANGELOG.md b/CHANGELOG.md
index 606e040fb061..3556b87346ee 100644
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -2,326 +2,6 @@
- [v1.0.0 - v1.9.10](CHANGELOG-pre-v1.10.md)
- [v0.11.6 and earlier](CHANGELOG-v0.md)
-## 1.18.1
-### October 30, 2024
-
-SECURITY:
-* core/raft: Add raft join limits [[GH-28790](https://github.com/hashicorp/vault/pull/28790), [HCSEC-2024-26](https://discuss.hashicorp.com/t/hcsec-2024-26-vault-vulnerable-to-denial-of-service-through-memory-exhaustion-when-processing-raft-cluster-join-requests)]
-
-CHANGES:
-
-* auth/azure: Update plugin to v0.19.1 [[GH-28712](https://github.com/hashicorp/vault/pull/28712)]
-* secrets/azure: Update plugin to v0.20.1 [[GH-28699](https://github.com/hashicorp/vault/pull/28699)]
-* secrets/openldap: Update plugin to v0.14.1 [[GH-28479](https://github.com/hashicorp/vault/pull/28479)]
-* secrets/openldap: Update plugin to v0.14.2 [[GH-28704](https://github.com/hashicorp/vault/pull/28704)]
-* secrets/openldap: Update plugin to v0.14.3 [[GH-28780](https://github.com/hashicorp/vault/pull/28780)]
-
-IMPROVEMENTS:
-
-* core: Add a mount tuneable that trims trailing slashes of request paths during POST. Needed to support CMPv2 in PKI. [[GH-28752](https://github.com/hashicorp/vault/pull/28752)]
-* raft/snapshotagent (enterprise): upgrade raft-snapshotagent to v0.0.0-20241003195753-88fef418d705
-* ui: Add button to copy secret path in kv v1 and v2 secrets engines [[GH-28629](https://github.com/hashicorp/vault/pull/28629)]
-* ui: Adds copy button to identity entity, alias and mfa method IDs [[GH-28742](https://github.com/hashicorp/vault/pull/28742)]
-
-BUG FIXES:
-
-* agent: Fix chown error running agent on Windows with an auto-auth file sinks. [[GH-28748](https://github.com/hashicorp/vault/pull/28748)]
-* audit: Prevent users from enabling multiple audit devices of file type with the same file_path to write to. [[GH-28751](https://github.com/hashicorp/vault/pull/28751)]
-* cli: Fixed a CLI precedence issue where -agent-address didn't override VAULT_AGENT_ADDR as it should [[GH-28574](https://github.com/hashicorp/vault/pull/28574)]
-* core/seal (enterprise): Fix bug that caused seal generation information to be replicated, which prevented disaster recovery and performance replication clusters from using their own seal high-availability configuration.
-* core/seal: Fix an issue that could cause reading from sys/seal-backend-status to return stale information. [[GH-28631](https://github.com/hashicorp/vault/pull/28631)]
-* core: Fixed panic seen when performing help requests without /v1/ in the URL. [[GH-28669](https://github.com/hashicorp/vault/pull/28669)]
-* kmip (enterprise): Use the default KMIP port for IPv6 addresses missing a port, for the listen_addrs configuration field, in order to match the existing IPv4 behavior
-* namespaces (enterprise): Fix issue where namespace patch requests to a performance secondary would not patch the namespace's metadata.
-* proxy: Fix chown error running proxy on Windows with an auto-auth file sink. [[GH-28748](https://github.com/hashicorp/vault/pull/28748)]
-* secrets/pki: Address issue with ACME HTTP-01 challenges failing for IPv6 IPs due to improperly formatted URLs [[GH-28718](https://github.com/hashicorp/vault/pull/28718)]
-* ui: No longer running decodeURIComponent on KVv2 list view allowing percent encoded data-octets in path name. [[GH-28698](https://github.com/hashicorp/vault/pull/28698)]
-
-## 1.18.0
-## October 9, 2024
-
-SECURITY:
-
-* secrets/identity: A privileged Vault operator with write permissions to the root namespace's identity endpoint could escalate their privileges to Vault's root policy (CVE-2024-9180) [HCSEC-2024-21](https://discuss.hashicorp.com/t/hcsec-2024-21-vault-operators-in-root-namespace-may-elevate-their-privileges/70565)
-
-CHANGES:
-
-* activity (enterprise): filter all fields in client count responses by the request namespace [[GH-27790](https://github.com/hashicorp/vault/pull/27790)]
-* activity (enterprise): remove deprecated fields distinct_entities and non_entity_tokens [[GH-27830](https://github.com/hashicorp/vault/pull/27830)]
-* activity log: Deprecated the field "default_report_months". Instead, the billing start time will be used to determine the start time
-when querying the activity log endpoints. [[GH-27350](https://github.com/hashicorp/vault/pull/27350)]
-* activity log: Deprecates the current_billing_period field for /sys/internal/counters/activity. The default start time
-will automatically be set the billing period start date. [[GH-27426](https://github.com/hashicorp/vault/pull/27426)]
-* activity: The [activity export API](https://developer.hashicorp.com/vault/api-docs/system/internal-counters#activity-export) now requires the `sudo` ACL capability. [[GH-27846](https://github.com/hashicorp/vault/pull/27846)]
-* activity: The [activity export API](https://developer.hashicorp.com/vault/api-docs/system/internal-counters#activity-export) now responds with a status of 204 instead 400 when no data exists within the time range specified by `start_time` and `end_time`. [[GH-28064](https://github.com/hashicorp/vault/pull/28064)]
-* activity: The startTime will be set to the start of the current billing period by default.
-The endTime will be set to the end of the current month. This applies to /sys/internal/counters/activity,
-/sys/internal/counters/activity/export, and the vault operator usage command that utilizes /sys/internal/counters/activity. [[GH-27379](https://github.com/hashicorp/vault/pull/27379)]
-* api: Update backoff/v3 to backoff/v4.3.0 [[GH-26868](https://github.com/hashicorp/vault/pull/26868)]
-* auth/alicloud: Update plugin to v0.19.0 [[GH-28263](https://github.com/hashicorp/vault/pull/28263)]
-* auth/azure: Update plugin to v0.19.0 [[GH-28294](https://github.com/hashicorp/vault/pull/28294)]
-* auth/cf: Update plugin to v0.18.0 [[GH-27724](https://github.com/hashicorp/vault/pull/27724)]
-* auth/cf: Update plugin to v0.19.0 [[GH-28266](https://github.com/hashicorp/vault/pull/28266)]
-* auth/gcp: Update plugin to v0.19.0 [[GH-28366](https://github.com/hashicorp/vault/pull/28366)]
-* auth/jwt: Update plugin to v0.21.0 [[GH-27498](https://github.com/hashicorp/vault/pull/27498)]
-* auth/jwt: Update plugin to v0.22.0 [[GH-28349](https://github.com/hashicorp/vault/pull/28349)]
-* auth/kerberos: Update plugin to v0.13.0 [[GH-28264](https://github.com/hashicorp/vault/pull/28264)]
-* auth/kubernetes: Update plugin to v0.20.0 [[GH-28289](https://github.com/hashicorp/vault/pull/28289)]
-* auth/oci: Update plugin to v0.17.0 [[GH-28307](https://github.com/hashicorp/vault/pull/28307)]
-* cli: The undocumented `-dev-three-node` and `-dev-four-cluster` CLI options have been removed. [[GH-27578](https://github.com/hashicorp/vault/pull/27578)]
-* consul-template: updated to version 0.39.1 [[GH-27799](https://github.com/hashicorp/vault/pull/27799)]
-* core(enterprise): Updated the following two control group related errors responses to respond with response code 400 instead of 500: `control group: could not find token`, and `control group: token is not a valid control group token`.
-* core: Bump Go version to 1.22.7
-* database/couchbase: Update plugin to v0.12.0 [[GH-28327](https://github.com/hashicorp/vault/pull/28327)]
-* database/elasticsearch: Update plugin to v0.16.0 [[GH-28277](https://github.com/hashicorp/vault/pull/28277)]
-* database/mongodbatlas: Update plugin to v0.13.0 [[GH-28268](https://github.com/hashicorp/vault/pull/28268)]
-* database/redis-elasticache: Update plugin to v0.5.0 [[GH-28293](https://github.com/hashicorp/vault/pull/28293)]
-* database/redis: Update plugin to v0.4.0 [[GH-28404](https://github.com/hashicorp/vault/pull/28404)]
-* database/snowflake: Update plugin to v0.12.0 [[GH-28275](https://github.com/hashicorp/vault/pull/28275)]
-* sdk: Upgrade to go-secure-stdlib/plugincontainer@v0.4.0, which also bumps github.com/docker/docker to v26.1.5+incompatible [[GH-28269](https://github.com/hashicorp/vault/pull/28269)]
-* secrets/ad: Update plugin to v0.19.0 [[GH-28361](https://github.com/hashicorp/vault/pull/28361)]
-* secrets/alicloud: Update plugin to v0.18.0 [[GH-28271](https://github.com/hashicorp/vault/pull/28271)]
-* secrets/azure: Update plugin to v0.19.2 [[GH-27652](https://github.com/hashicorp/vault/pull/27652)]
-* secrets/azure: Update plugin to v0.20.0 [[GH-28267](https://github.com/hashicorp/vault/pull/28267)]
-* secrets/gcp: Update plugin to v0.20.0 [[GH-28324](https://github.com/hashicorp/vault/pull/28324)]
-* secrets/gcpkms: Update plugin to v0.18.0 [[GH-28300](https://github.com/hashicorp/vault/pull/28300)]
-* secrets/gcpkms: Update plugin to v0.19.0 [[GH-28360](https://github.com/hashicorp/vault/pull/28360)]
-* secrets/kubernetes: Update plugin to v0.9.0 [[GH-28287](https://github.com/hashicorp/vault/pull/28287)]
-* secrets/kv: Update plugin to v0.20.0 [[GH-28334](https://github.com/hashicorp/vault/pull/28334)]
-* secrets/mongodbatlas: Update plugin to v0.13.0 [[GH-28348](https://github.com/hashicorp/vault/pull/28348)]
-* secrets/openldap: Update plugin to v0.14.0 [[GH-28325](https://github.com/hashicorp/vault/pull/28325)]
-* secrets/ssh: Add a flag, `allow_empty_principals` to allow keys or certs to apply to any user/principal. [[GH-28466](https://github.com/hashicorp/vault/pull/28466)]
-* secrets/terraform: Update plugin to v0.10.0 [[GH-28312](https://github.com/hashicorp/vault/pull/28312)]
-* secrets/terraform: Update plugin to v0.9.0 [[GH-28016](https://github.com/hashicorp/vault/pull/28016)]
-* ui: Uses the internal/counters/activity/export endpoint for client count export data. [[GH-27455](https://github.com/hashicorp/vault/pull/27455)]
-
-FEATURES:
-
-* **AWS secrets engine STS session tags support**: Adds support for setting STS
-session tags when generating temporary credentials using the AWS secrets
-engine. [[GH-27620](https://github.com/hashicorp/vault/pull/27620)]
-* **Adaptive Overload Protection (enterprise)**: Enables Adaptive Overload Protection
-for write requests as a GA feature (enabled by default) for Integrated Storage.
-* **Audit Entry Exclusion (enterprise)**: Audit devices support excluding fields from entries being written to them, with expression-based rules (powered by go-bexpr) to determine when the specific fields are excluded.
-* **Workload Identity Federation UI for AWS (enterprise)**: Add WIF fields to AWS secrets engine. [[GH-28148](https://github.com/hashicorp/vault/pull/28148)]
-* **KV v2 Patch/Subkey (enterprise)**: Adds GUI support to read the subkeys of a KV v2 secret and patch (partially update) secret data. [[GH-28212](https://github.com/hashicorp/vault/pull/28212)]
-* **Self-Managed Static Roles**: Self-Managed Static Roles are now supported for select SQL database engines (Postgres, Oracle). Requires Vault Enterprise. [[GH-28199](https://github.com/hashicorp/vault/pull/28199)]
-* **Vault Minimal Version**: Add the ability to build a minimal version of Vault
-with only core features using the BUILD_MINIMAL environment variable. [[GH-27394](https://github.com/hashicorp/vault/pull/27394)]
-* **Vault PKI 3GPP CMPv2 Server (Enterprise)**: Support for the PKI 3GPP CMPv2 certificate management protocol has been added to the Vault PKI Plugin. This allows standard CMPv2 clients to request certificates from a Vault server with no knowledge of Vault APIs.
-
-IMPROVEMENTS:
-
-* activity log: Changes how new client counts in the current month are estimated, in order to return more
-visibly sensible totals. [[GH-27547](https://github.com/hashicorp/vault/pull/27547)]
-* activity: The [activity export API](https://developer.hashicorp.com/vault/api-docs/system/internal-counters#activity-export) can now be called in non-root namespaces. Resulting records will be filtered to include the requested namespace (via `X-Vault-Namespace` header or within the path) and all child namespaces. [[GH-27846](https://github.com/hashicorp/vault/pull/27846)]
-* activity: The [activity export API](https://developer.hashicorp.com/vault/api-docs/system/internal-counters#activity-export) now includes identity metadata about entity clients. [[GH-28064](https://github.com/hashicorp/vault/pull/28064)]
-* activity: `/sys/internal/counters/activity` will now include a warning if the specified usage period contains estimated client counts. [[GH-28068](https://github.com/hashicorp/vault/pull/28068)]
-* agent/sink: Allow configuration of the user and group ID of the file sink. [[GH-27123](https://github.com/hashicorp/vault/pull/27123)]
-* agent: Add metric (vault.agent.authenticated) that is set to 1 when vault agent has a valid token and zero if it does not. [[GH-26570](https://github.com/hashicorp/vault/pull/26570)]
-* agent: Add the ability to dump pprof to the filesystem using SIGUSR2 [[GH-27510](https://github.com/hashicorp/vault/pull/27510)]
-* audit: Adds TRACE logging to log request/response under certain circumstances, and further improvements to the audit subsystem. [[GH-28056](https://github.com/hashicorp/vault/pull/28056)]
-* audit: Ensure that any underyling errors from audit devices are logged even if we consider auditing to be a success. [[GH-27809](https://github.com/hashicorp/vault/pull/27809)]
-* audit: Internal implementation changes to the audit subsystem which improve performance. [[GH-27952](https://github.com/hashicorp/vault/pull/27952)]
-* audit: Internal implementation changes to the audit subsystem which improve relability. [[GH-28286](https://github.com/hashicorp/vault/pull/28286)]
-* audit: sinks (file, socket, syslog) will attempt to log errors to the server operational
-log before returning (if there are errors to log, and the context is done). [[GH-27859](https://github.com/hashicorp/vault/pull/27859)]
-* auth/cert: Cache full list of role trust information separately to avoid
-eviction, and avoid duplicate loading during multiple simultaneous logins on
-the same role. [[GH-27902](https://github.com/hashicorp/vault/pull/27902)]
-* cli: Add a `--dev-no-kv` flag to prevent auto mounting a key-value secret backend when running a dev server [[GH-16974](https://github.com/hashicorp/vault/pull/16974)]
-* cli: Allow vault CLI HTTP headers to be specified using the JSON-encoded VAULT_HEADERS environment variable [[GH-21993](https://github.com/hashicorp/vault/pull/21993)]
-* cli: `vault operator usage` will now include a warning if the specified usage period contains estimated client counts. [[GH-28068](https://github.com/hashicorp/vault/pull/28068)]
-* core/activity: Ensure client count queries that include the current month return consistent results by sorting the clients before performing estimation [[GH-28062](https://github.com/hashicorp/vault/pull/28062)]
-* core/cli: Example 'help' pages for vault read / write docs improved. [[GH-19064](https://github.com/hashicorp/vault/pull/19064)]
-* core/identity: allow identity backend to be tuned using standard secrets backend tuning parameters. [[GH-14723](https://github.com/hashicorp/vault/pull/14723)]
-* core/metrics: ensure core HA metrics are always output to Prometheus. [[GH-27966](https://github.com/hashicorp/vault/pull/27966)]
-* core: log at level ERROR rather than INFO when all seals are unhealthy. [[GH-28564](https://github.com/hashicorp/vault/pull/28564)]
-* core: make authLock and mountsLock in Core configurable via the detect_deadlocks configuration parameter. [[GH-27633](https://github.com/hashicorp/vault/pull/27633)]
-* database/postgres: Add new fields to the plugin's config endpoint for client certificate authentication. [[GH-28024](https://github.com/hashicorp/vault/pull/28024)]
-* db/cassandra: Add `disable_host_initial_lookup` option to backend, allowing the disabling of initial host lookup. [[GH-9733](https://github.com/hashicorp/vault/pull/9733)]
-* identity: alias metadata is now returned when listing entity aliases [[GH-26073](https://github.com/hashicorp/vault/pull/26073)]
-* license utilization reporting (enterprise): Auto-roll billing start date. [[GH-27656](https://github.com/hashicorp/vault/pull/27656)]
-* physical/raft: Log when the MAP_POPULATE mmap flag gets disabled before opening the database. [[GH-28526](https://github.com/hashicorp/vault/pull/28526)]
-* proxy/sink: Allow configuration of the user and group ID of the file sink. [[GH-27123](https://github.com/hashicorp/vault/pull/27123)]
-* proxy: Add the ability to dump pprof to the filesystem using SIGUSR2 [[GH-27510](https://github.com/hashicorp/vault/pull/27510)]
-* raft-snapshot (enterprise): add support for managed identity credentials for azure snapshots
-* raft/autopilot: Persist Raft server versions so autopilot always knows the versions of all servers in the cluster. Include server versions in the Raft bootstrap challenge answer so autopilot immediately knows the versions of new nodes. [[GH-28186](https://github.com/hashicorp/vault/pull/28186)]
-* sdk/helper: Allow setting environment variables when using NewTestDockerCluster [[GH-27457](https://github.com/hashicorp/vault/pull/27457)]
-* secrets-sync (enterprise): add support for specifying the replication regions for secret storage within GCP Secret Manager destinations
-* secrets-sync (enterprise): add support for syncing secrets to github environments within repositories
-* secrets-sync (enterprise): add support for syncing secrets to github organizations (beta)
-* secrets/database/hana: Update HANA db client to v1.10.1 [[GH-27950](https://github.com/hashicorp/vault/pull/27950)]
-* secrets/database: Add support for GCP CloudSQL private IP's. [[GH-26828](https://github.com/hashicorp/vault/pull/26828)]
-* secrets/pki: Key Usage can now be set on intermediate and root CAs, and CSRs generated by the PKI secret's engine. [[GH-28237](https://github.com/hashicorp/vault/pull/28237)]
-* secrets/pki: Track the last time auto-tidy ran to address auto-tidy not running if the auto-tidy interval is longer than scheduled Vault restarts. [[GH-28488](https://github.com/hashicorp/vault/pull/28488)]
-* serviceregistration: Added support for Consul ServiceMeta tags from config file from the new `service_meta` config field. [[GH-11084](https://github.com/hashicorp/vault/pull/11084)]
-* storage/azure: Updated metadata endpoint to `GetMSIEndpoint`, which supports more than just the metadata service. [[GH-10624](https://github.com/hashicorp/vault/pull/10624)]
-* storage/dynamodb: Speed up list and delete of large directories by only requesting keys from DynamoDB [[GH-21159](https://github.com/hashicorp/vault/pull/21159)]
-* storage/etcd: Update etcd3 client to v3.5.13 to allow use of TLSv1.3. [[GH-26660](https://github.com/hashicorp/vault/pull/26660)]
-* storage/raft: Bump raft to v1.7.0 which includes pre-vote. This should make clusters more stable during network partitions. [[GH-27605](https://github.com/hashicorp/vault/pull/27605)]
-* storage/raft: Improve autopilot logging on startup to show config values clearly and avoid spurious logs [[GH-27464](https://github.com/hashicorp/vault/pull/27464)]
-* ui/secrets-sync: Hide Secrets Sync from the sidebar nav if user does not have access to the feature. [[GH-27262](https://github.com/hashicorp/vault/pull/27262)]
-* ui: AWS credentials form sets credential_type from backing role [[GH-27405](https://github.com/hashicorp/vault/pull/27405)]
-* ui: Creates separate section for updating sensitive creds for Secrets sync create/edit view. [[GH-27538](https://github.com/hashicorp/vault/pull/27538)]
-* ui: For AWS and SSH secret engines hide mount configuration details in toggle and display configuration details or cta. [[GH-27831](https://github.com/hashicorp/vault/pull/27831)]
-* ui: Mask obfuscated fields when creating/editing a Secrets sync destination. [[GH-27348](https://github.com/hashicorp/vault/pull/27348)]
-* ui: Move secret-engine configuration create/edit from routing `vault/settings/secrets/configure/` to `vault/secrets//configuration/edit` [[GH-27918](https://github.com/hashicorp/vault/pull/27918)]
-* ui: Remove deprecated `current_billing_period` from dashboard activity log request [[GH-27559](https://github.com/hashicorp/vault/pull/27559)]
-* ui: Update the client count dashboard to use API namespace filtering and other UX improvements [[GH-28036](https://github.com/hashicorp/vault/pull/28036)]
-* ui: remove initial start/end parameters on the activity call for client counts dashboard. [[GH-27816](https://github.com/hashicorp/vault/pull/27816)]
-* ui: simplify the date range editing experience in the client counts dashboard. [[GH-27796](https://github.com/hashicorp/vault/pull/27796)]
-* website/docs: Added API documentation for Azure Secrets Engine delete role [[GH-27883](https://github.com/hashicorp/vault/pull/27883)]
-* website/docs: corrected invalid json in sample payload for azure secrets engine create/update role [[GH-28076](https://github.com/hashicorp/vault/pull/28076)]
-
-BUG FIXES:
-
-* activity: The sys/internal/counters/activity endpoint will return current month data when the end_date parameter is set to a future date. [[GH-28042](https://github.com/hashicorp/vault/pull/28042)]
-* agent: Fixed an issue causing excessive CPU usage during normal operation [[GH-27518](https://github.com/hashicorp/vault/pull/27518)]
-* auth/appid, auth/cert, auth/github, auth/ldap, auth/okta, auth/radius, auth/userpass: fixed an issue with policy name normalization that would prevent a token associated with a policy containing an uppercase character to be renewed. [[GH-16484](https://github.com/hashicorp/vault/pull/16484)]
-* auth/aws: fixes an issue where not supplying an external id was interpreted as an empty external id [[GH-27858](https://github.com/hashicorp/vault/pull/27858)]
-* auth/cert: During certificate validation, OCSP requests are debug logged even if Vault's log level is above DEBUG. [[GH-28450](https://github.com/hashicorp/vault/pull/28450)]
-* auth/cert: Merge error messages returned in login failures and include error when present [[GH-27202](https://github.com/hashicorp/vault/pull/27202)]
-* auth/cert: Use subject's serial number, not issuer's within error message text in OCSP request errors [[GH-27696](https://github.com/hashicorp/vault/pull/27696)]
-* auth/cert: When using ocsp_ca_certificates, an error was produced though extra certs validation succeeded. [[GH-28597](https://github.com/hashicorp/vault/pull/28597)]
-* auth/cert: ocsp_ca_certificates field was not honored when validating OCSP responses signed by a CA that did not issue the certificate. [[GH-28309](https://github.com/hashicorp/vault/pull/28309)]
-* auth/token: Fix token TTL calculation so that it uses `max_lease_ttl` tune value for tokens created via `auth/token/create`. [[GH-28498](https://github.com/hashicorp/vault/pull/28498)]
-* auth/token: fixes an edge case bug that "identity_policies" is nil and causes cli vault login error [[GH-17007](https://github.com/hashicorp/vault/pull/17007)]
-* auth: Updated error handling for missing login credentials in AppRole and UserPass auth methods to return a 400 error instead of a 500 error. [[GH-28441](https://github.com/hashicorp/vault/pull/28441)]
-* cli: Fixed an erroneous warning appearing about `-address` not being set when it is. [[GH-27265](https://github.com/hashicorp/vault/pull/27265)]
-* cli: Fixed issue with `vault hcp connect` where HCP resources with uppercase letters were inaccessible when entering the correct project name. [[GH-27694](https://github.com/hashicorp/vault/pull/27694)]
-* command: The `vault secrets move` and `vault auth move` command will no longer attempt to write to storage on performance standby nodes. [[GH-28059](https://github.com/hashicorp/vault/pull/28059)]
-* config: Vault TCP listener config now correctly supports the documented proxy_protocol_behavior
-setting of 'deny_unauthorized' [[GH-27459](https://github.com/hashicorp/vault/pull/27459)]
-* core (enterprise): Fix 500 errors that occurred querying `sys/internal/ui/mounts` for a mount prefixed by a namespace path when path filters are configured. [[GH-27939](https://github.com/hashicorp/vault/pull/27939)]
-* core (enterprise): Fix HTTP redirects in namespaces to use the correct path and (in the case of event subscriptions) the correct URI scheme. [[GH-27660](https://github.com/hashicorp/vault/pull/27660)]
-* core (enterprise): Fix deletion of MFA login-enforcement configurations on standby nodes
-* core/audit: Audit logging a Vault request/response checks if the existing context
-is cancelled and will now use a new context with a 5 second timeout.
-If the existing context is cancelled a new context, will be used. [[GH-27531](https://github.com/hashicorp/vault/pull/27531)]
-* core/config: fix issue when using `proxy_protocol_behavior` with `deny_unauthorized`,
-which causes the Vault TCP listener to close after receiving an untrusted upstream proxy connection. [[GH-27589](https://github.com/hashicorp/vault/pull/27589)]
-* core/identity: Fixed an issue where deleted/reassigned entity-aliases were not removed from in-memory database. [[GH-27750](https://github.com/hashicorp/vault/pull/27750)]
-* core/seal (enterprise): Fix bug that caused seal generation information to be replicated, which prevented disaster recovery and performance replication clusters from using their own seal high-availability configuration.
-* core: Fixed an issue where maximum request duration timeout was not being added to all requests containing strings sys/monitor and sys/events. With this change, timeout is now added to all requests except monitor and events endpoint. [[GH-28230](https://github.com/hashicorp/vault/pull/28230)]
-* core: Fixed an issue with performance standbys not being able to handle rotate root requests. [[GH-27631](https://github.com/hashicorp/vault/pull/27631)]
-* database/postgresql: Fix potential error revoking privileges in postgresql database secrets engine when a schema contains special characters [[GH-28519](https://github.com/hashicorp/vault/pull/28519)]
-* databases: fix issue where local timezone was getting lost when using a rotation schedule cron [[GH-28509](https://github.com/hashicorp/vault/pull/28509)]
-* helper/pkcs7: Fix parsing certain messages containing only certificates [[GH-27435](https://github.com/hashicorp/vault/pull/27435)]
-* identity/oidc: prevent JWKS from being generated by multiple concurrent requests [[GH-27929](https://github.com/hashicorp/vault/pull/27929)]
-* licensing (enterprise): fixed issue where billing start date might not be correctly updated on performance standbys
-* proxy/cache (enterprise): Fixed a data race that could occur while tracking capabilities in Proxy's static secret cache. [[GH-28494](https://github.com/hashicorp/vault/pull/28494)]
-* proxy/cache (enterprise): Fixed an issue where Proxy with static secret caching enabled would not correctly handle requests to older secret versions for KVv2 secrets. Proxy's static secret cache now properly handles all requests relating to older versions for KVv2 secrets. [[GH-28207](https://github.com/hashicorp/vault/pull/28207)]
-* proxy/cache (enterprise): Fixed an issue where Proxy would not correctly update KV secrets when talking to a perf standby. Proxy will now attempt to forward requests to update secrets triggered by events to the active node. Note that this requires `allow_forwarding_via_header` to be configured on the cluster. [[GH-27891](https://github.com/hashicorp/vault/pull/27891)]
-* proxy/cache (enterprise): Fixed an issue where cached static secrets could fail to update if the secrets belonged to a non-root namespace. [[GH-27730](https://github.com/hashicorp/vault/pull/27730)]
-* proxy: Fixed an issue causing excessive CPU usage during normal operation [[GH-27518](https://github.com/hashicorp/vault/pull/27518)]
-* raft/autopilot: Fixed panic that may occur during shutdown [[GH-27726](https://github.com/hashicorp/vault/pull/27726)]
-* replication (enterprise): fix cache invalidation issue leading to namespace custom metadata not being shown correctly on performance secondaries
-* secrets-sync (enterprise): Destination set/remove operations will no longer be blocked as "purge in progress" after a purge job ended in failure.
-* secrets-sync (enterprise): Fix KV secret access sometimes being denied, due to a double forward-slash (`//`) in the mount path, when the token should otherwise have access.
-* secrets-sync (enterprise): Normalize custom_tag keys and values for recoverable invalid characters.
-* secrets-sync (enterprise): Normalize secret key names before storing the external_name in a secret association.
-* secrets-sync (enterprise): Patching github sync destination credentials will properly update and save the new credentials.
-* secrets-sync (enterprise): Properly remove tags from secrets in AWS when they are removed from the source association
-* secrets-sync (enterprise): Return an error immediately on destination creation when providing invalid custom_tags based on destination type.
-* secrets-sync (enterprise): Return more accurate error code for invalid connection details
-* secrets-sync (enterprise): Secondary nodes in a cluster now properly check activation-flags values.
-* secrets-sync (enterprise): Skip invalid GitHub repository names when creating destinations
-* secrets-sync (enterprise): Validate corresponding GitHub app parameters `app_name` and `installation_id` are set
-* secrets/database: Skip connection verification on reading existing DB connection configuration [[GH-28139](https://github.com/hashicorp/vault/pull/28139)]
-* secrets/identity (enterprise): Fix a bug that can cause DR promotion to fail in rare cases where a PR secondary has inconsistent alias information in storage.
-* secrets/pki: fix lack of serial number to a certificate read resulting in a server side error. [[GH-27681](https://github.com/hashicorp/vault/pull/27681)]
-* secrets/transit (enterprise): Fix an issue that caused input data be returned as part of generated CMAC values.
-* storage/azure: Fix invalid account name initialization bug [[GH-27563](https://github.com/hashicorp/vault/pull/27563)]
-* storage/raft (enterprise): Fix issue with namespace cache not getting cleared on snapshot restore, resulting in namespaces not found in the snapshot being inaccurately represented by API responses. [[GH-27474](https://github.com/hashicorp/vault/pull/27474)]
-* storage/raft: Fix auto_join not working with mDNS provider. [[GH-25080](https://github.com/hashicorp/vault/pull/25080)]
-* sys: Fix a bug where mounts of external plugins that were registered before Vault v1.0.0 could not be tuned to
-use versioned plugins. [[GH-27881](https://github.com/hashicorp/vault/pull/27881)]
-* ui: Allow creation of session_token type roles for AWS secret backend [[GH-27424](https://github.com/hashicorp/vault/pull/27424)]
-* ui: Display an error and force a timeout when TOTP passcode is incorrect [[GH-27574](https://github.com/hashicorp/vault/pull/27574)]
-* ui: Ensure token expired banner displays when batch token expires [[GH-27479](https://github.com/hashicorp/vault/pull/27479)]
-* ui: Fix UI improperly checking capabilities for enabling performance and dr replication [[GH-28371](https://github.com/hashicorp/vault/pull/28371)]
-* ui: Fix cursor jump on KVv2 json editor that would occur after pressing ENTER. [[GH-27569](https://github.com/hashicorp/vault/pull/27569)]
-* ui: fix `default_role` input missing from oidc auth method configuration form [[GH-28539](https://github.com/hashicorp/vault/pull/28539)]
-* ui: fix issue where enabling then disabling "Tidy ACME" in PKI results in failed API call. [[GH-27742](https://github.com/hashicorp/vault/pull/27742)]
-* ui: fix namespace picker not working when in small screen where the sidebar is collapsed by default. [[GH-27728](https://github.com/hashicorp/vault/pull/27728)]
-* ui: fixes renew-self being called right after login for non-renewable tokens [[GH-28204](https://github.com/hashicorp/vault/pull/28204)]
-* ui: fixes toast (flash) alert message saying "created" when deleting a kv v2 secret [[GH-28093](https://github.com/hashicorp/vault/pull/28093)]
-
-## 1.17.8 Enterprise
-### October 30, 2024
-
-SECURITY:
-* core/raft: Add raft join limits [[GH-28790](https://github.com/hashicorp/vault/pull/28790), [HCSEC-2024-26](https://discuss.hashicorp.com/t/hcsec-2024-26-vault-vulnerable-to-denial-of-service-through-memory-exhaustion-when-processing-raft-cluster-join-requests)]
-
-CHANGES:
-
-* auth/azure: Update plugin to v0.18.1
-* secrets/openldap: update plugin to v0.13.2
-
-IMPROVEMENTS:
-
-* ui: Add button to copy secret path in kv v1 and v2 secrets engines [[GH-28629](https://github.com/hashicorp/vault/pull/28629)]
-* ui: Adds copy button to identity entity, alias and mfa method IDs [[GH-28742](https://github.com/hashicorp/vault/pull/28742)]
-
-BUG FIXES:
-
-* audit: Prevent users from enabling multiple audit devices of file type with the same file_path to write to. [[GH-28751](https://github.com/hashicorp/vault/pull/28751)]
-* core/seal (enterprise): Fix bug that caused seal generation information to be replicated, which prevented disaster recovery and performance replication clusters from using their own seal high-availability configuration.
-* core/seal: Fix an issue that could cause reading from sys/seal-backend-status to return stale information. [[GH-28631](https://github.com/hashicorp/vault/pull/28631)]
-* core: Fixed panic seen when performing help requests without /v1/ in the URL. [[GH-28669](https://github.com/hashicorp/vault/pull/28669)]
-* namespaces (enterprise): Fix issue where namespace patch requests to a performance secondary would not patch the namespace's metadata.
-* secrets/pki: Address issue with ACME HTTP-01 challenges failing for IPv6 IPs due to improperly formatted URLs [[GH-28718](https://github.com/hashicorp/vault/pull/28718)]
-* ui: No longer running decodeURIComponent on KVv2 list view allowing percent encoded data-octets in path name. [[GH-28698](https://github.com/hashicorp/vault/pull/28698)]
-
-## 1.17.7 Enterprise
-### October 09, 2024
-
-SECURITY:
-
-* secrets/identity: A privileged Vault operator with write permissions to the root namespace's identity endpoint could escalate their privileges to Vault's root policy (CVE-2024-9180) [HCSEC-2024-21](https://discuss.hashicorp.com/t/hcsec-2024-21-vault-operators-in-root-namespace-may-elevate-their-privileges/70565)
-
-IMPROVEMENTS:
-
-* core: log at level ERROR rather than INFO when all seals are unhealthy. [[GH-28564](https://github.com/hashicorp/vault/pull/28564)]
-* physical/raft: Log when the MAP_POPULATE mmap flag gets disabled before opening the database. [[GH-28526](https://github.com/hashicorp/vault/pull/28526)]
-* secrets/pki: Track the last time auto-tidy ran to address auto-tidy not running if the auto-tidy interval is longer than scheduled Vault restarts. [[GH-28488](https://github.com/hashicorp/vault/pull/28488)]
-
-BUG FIXES:
-
-* auth/cert: When using ocsp_ca_certificates, an error was produced though extra certs validation succeeded. [[GH-28597](https://github.com/hashicorp/vault/pull/28597)]
-* auth/token: Fix token TTL calculation so that it uses `max_lease_ttl` tune value for tokens created via `auth/token/create`. [[GH-28498](https://github.com/hashicorp/vault/pull/28498)]
-* databases: fix issue where local timezone was getting lost when using a rotation schedule cron [[GH-28509](https://github.com/hashicorp/vault/pull/28509)]
-* secrets-sync (enterprise): Fix KV secret access sometimes being denied, due to a double forward-slash (`//`) in the mount path, when the token should otherwise have access.
-
-## 1.17.6
-### September 25, 2024
-
-SECURITY:
-* secrets/ssh: require `valid_principals` to contain a value or `default_user` be set by default to guard against potentially insecure configurations. `allow_empty_principals` can be used for backwards compatibility [HCSEC-2024-20](https://discuss.hashicorp.com/t/hcsec-2024-20-vault-ssh-secrets-engine-configuration-did-not-restrict-valid-principals-by-default/70251)
-
-CHANGES:
-
-* core: Bump Go version to 1.22.7
-* secrets/ldap: Update vault-plugin-secrets-openldap to v0.13.1 [[GH-28478](https://github.com/hashicorp/vault/pull/28478)]
-* secrets/ssh: Add a flag, `allow_empty_principals` to allow keys or certs to apply to any user/principal. [[GH-28466](https://github.com/hashicorp/vault/pull/28466)]
-
-IMPROVEMENTS:
-
-* audit: Internal implementation changes to the audit subsystem which improve relability. [[GH-28286](https://github.com/hashicorp/vault/pull/28286)]
-* ui: Remove deprecated `current_billing_period` from dashboard activity log request [[GH-27559](https://github.com/hashicorp/vault/pull/27559)]
-
-BUG FIXES:
-
-* secret/aws: Fixed potential panic after step-down and the queue has not repopulated. [[GH-28330](https://github.com/hashicorp/vault/pull/28330)]
-* auth/cert: During certificate validation, OCSP requests are debug logged even if Vault's log level is above DEBUG. [[GH-28450](https://github.com/hashicorp/vault/pull/28450)]
-* auth/cert: ocsp_ca_certificates field was not honored when validating OCSP responses signed by a CA that did not issue the certificate. [[GH-28309](https://github.com/hashicorp/vault/pull/28309)]
-* auth: Updated error handling for missing login credentials in AppRole and UserPass auth methods to return a 400 error instead of a 500 error. [[GH-28441](https://github.com/hashicorp/vault/pull/28441)]
-* core: Fixed an issue where maximum request duration timeout was not being added to all requests containing strings sys/monitor and sys/events. With this change, timeout is now added to all requests except monitor and events endpoint. [[GH-28230](https://github.com/hashicorp/vault/pull/28230)]
-* proxy/cache (enterprise): Fixed a data race that could occur while tracking capabilities in Proxy's static secret cache. [[GH-28494](https://github.com/hashicorp/vault/pull/28494)]
-* secrets-sync (enterprise): Secondary nodes in a cluster now properly check activation-flags values.
-* secrets-sync (enterprise): Validate corresponding GitHub app parameters `app_name` and `installation_id` are set
-
## 1.17.5
## August 30, 2024
@@ -653,81 +333,6 @@ autopilot to fail to discover new server versions and so not trigger an upgrade.
* ui: fixed a bug where the replication pages did not update display when navigating between DR and performance [[GH-26325](https://github.com/hashicorp/vault/pull/26325)]
* ui: fixes undefined start time in filename for downloaded client count attribution csv [[GH-26485](https://github.com/hashicorp/vault/pull/26485)]
-## 1.16.12 Enterprise
-### October 30, 2024
-
-**Enterprise LTS:** Vault Enterprise 1.16 is a [Long-Term Support (LTS)](https://developer.hashicorp.com/vault/docs/enterprise/lts) release.
-
-SECURITY:
-* core/raft: Add raft join limits [[GH-28790](https://github.com/hashicorp/vault/pull/28790), [HCSEC-2024-26](https://discuss.hashicorp.com/t/hcsec-2024-26-vault-vulnerable-to-denial-of-service-through-memory-exhaustion-when-processing-raft-cluster-join-requests)]
-CHANGES:
-
-* auth/azure: Update plugin to v0.17.1
-* secrets/openldap: Update plugin to v0.12.2
-
-IMPROVEMENTS:
-
-* ui: Add button to copy secret path in kv v1 and v2 secrets engines [[GH-28629](https://github.com/hashicorp/vault/pull/28629)]
-* ui: Adds copy button to identity entity, alias and mfa method IDs [[GH-28742](https://github.com/hashicorp/vault/pull/28742)]
-
-BUG FIXES:
-
-* core/seal (enterprise): Fix bug that caused seal generation information to be replicated, which prevented disaster recovery and performance replication clusters from using their own seal high-availability configuration.
-* core/seal: Fix an issue that could cause reading from sys/seal-backend-status to return stale information. [[GH-28631](https://github.com/hashicorp/vault/pull/28631)]
-* core: Fixed panic seen when performing help requests without /v1/ in the URL. [[GH-28669](https://github.com/hashicorp/vault/pull/28669)]
-* namespaces (enterprise): Fix issue where namespace patch requests to a performance secondary would not patch the namespace's metadata.
-* secrets/pki: Address issue with ACME HTTP-01 challenges failing for IPv6 IPs due to improperly formatted URLs [[GH-28718](https://github.com/hashicorp/vault/pull/28718)]
-* ui: No longer running decodeURIComponent on KVv2 list view allowing percent encoded data-octets in path name. [[GH-28698](https://github.com/hashicorp/vault/pull/28698)]
-
-## 1.16.11 Enterprise
-### October 09, 2024
-
-**Enterprise LTS:** Vault Enterprise 1.16 is a [Long-Term Support (LTS)](https://developer.hashicorp.com/vault/docs/enterprise/lts) release.
-
-SECURITY:
-
-* secrets/identity: A privileged Vault operator with write permissions to the root namespace's identity endpoint could escalate their privileges to Vault's root policy (CVE-2024-9180) [HCSEC-2024-21](https://discuss.hashicorp.com/t/hcsec-2024-21-vault-operators-in-root-namespace-may-elevate-their-privileges/70565)
-
-IMPROVEMENTS:
-
-* core: log at level ERROR rather than INFO when all seals are unhealthy. [[GH-28564](https://github.com/hashicorp/vault/pull/28564)]
-* physical/raft: Log when the MAP_POPULATE mmap flag gets disabled before opening the database. [[GH-28526](https://github.com/hashicorp/vault/pull/28526)]
-
-BUG FIXES:
-
-* auth/cert: When using ocsp_ca_certificates, an error was produced though extra certs validation succeeded. [[GH-28597](https://github.com/hashicorp/vault/pull/28597)]
-* auth/token: Fix token TTL calculation so that it uses `max_lease_ttl` tune value for tokens created via `auth/token/create`. [[GH-28498](https://github.com/hashicorp/vault/pull/28498)]
-* databases: fix issue where local timezone was getting lost when using a rotation schedule cron [[GH-28509](https://github.com/hashicorp/vault/pull/28509)]
-* secrets-sync (enterprise): Fix KV secret access sometimes being denied, due to a double forward-slash (`//`) in the mount path, when the token should otherwise have access.
-
-## 1.16.10 Enterprise
-### September 25, 2024
-
-**Enterprise LTS:** Vault Enterprise 1.16 is a [Long-Term Support (LTS)](https://developer.hashicorp.com/vault/docs/enterprise/lts) release.
-
-SECURITY:
-* secrets/ssh: require `valid_principals` to contain a value or `default_user` be set by default to guard against potentially insecure configurations. `allow_empty_principals` can be used for backwards compatibility [HCSEC-2024-20](https://discuss.hashicorp.com/t/hcsec-2024-20-vault-ssh-secrets-engine-configuration-did-not-restrict-valid-principals-by-default/7025
-
-CHANGES:
-
-* core: Bump Go version to 1.22.7.
-* secrets/ssh: Add a flag, `allow_empty_principals` to allow keys or certs to apply to any user/principal. [[GH-28466](https://github.com/hashicorp/vault/pull/28466)]
-
-IMPROVEMENTS:
-
-* audit: Internal implementation changes to the audit subsystem which improve relability. [[GH-28286](https://github.com/hashicorp/vault/pull/28286)]
-* ui: Remove deprecated `current_billing_period` from dashboard activity log request [[GH-27559](https://github.com/hashicorp/vault/pull/27559)]
-
-BUG FIXES:
-
-* secret/aws: Fixed potential panic after step-down and the queue has not repopulated. [[GH-28330](https://github.com/hashicorp/vault/pull/28330)]
-* auth/cert: During certificate validation, OCSP requests are debug logged even if Vault's log level is above DEBUG. [[GH-28450](https://github.com/hashicorp/vault/pull/28450)]
-* auth/cert: ocsp_ca_certificates field was not honored when validating OCSP responses signed by a CA that did not issue the certificate. [[GH-28309](https://github.com/hashicorp/vault/pull/28309)]
-* auth: Updated error handling for missing login credentials in AppRole and UserPass auth methods to return a 400 error instead of a 500 error. [[GH-28441](https://github.com/hashicorp/vault/pull/28441)]
-* core: Fixed an issue where maximum request duration timeout was not being added to all requests containing strings sys/monitor and sys/events. With this change, timeout is now added to all requests except monitor and events endpoint. [[GH-28230](https://github.com/hashicorp/vault/pull/28230)]
-* proxy/cache (enterprise): Fixed a data race that could occur while tracking capabilities in Proxy's static secret cache. [[GH-28494](https://github.com/hashicorp/vault/pull/28494)]
-* secrets-sync (enterprise): Validate corresponding GitHub app parameters `app_name` and `installation_id` are set
-
## 1.16.9 Enterprise
### August 30, 2024
@@ -1330,42 +935,6 @@ leading to failure to complete merkle sync without a full re-index. [[GH-23013](
* ui: remove user_lockout_config settings for unsupported methods [[GH-25867](https://github.com/hashicorp/vault/pull/25867)]
* ui: show error from API when seal fails [[GH-23921](https://github.com/hashicorp/vault/pull/23921)]
-
-## 1.15.16 Enterprise
-### October 09, 2024
-
-SECURITY:
-
-* secrets/identity: A privileged Vault operator with write permissions to the root namespace's identity endpoint could escalate their privileges to Vault's root policy (CVE-2024-9180) [HCSEC-2024-21](https://discuss.hashicorp.com/t/hcsec-2024-21-vault-operators-in-root-namespace-may-elevate-their-privileges/70565)
-
-IMPROVEMENTS:
-
-* core: log at level ERROR rather than INFO when all seals are unhealthy. [[GH-28564](https://github.com/hashicorp/vault/pull/28564)]
-
-BUG FIXES:
-
-* auth/cert: When using ocsp_ca_certificates, an error was produced though extra certs validation succeeded. [[GH-28597](https://github.com/hashicorp/vault/pull/28597)]
-* auth/token: Fix token TTL calculation so that it uses `max_lease_ttl` tune value for tokens created via `auth/token/create`. [[GH-28498](https://github.com/hashicorp/vault/pull/28498)]
-
-## 1.15.15 Enterprise
-### September 25, 2024
-
-SECURITY:
-* secrets/ssh: require `valid_principals` to contain a value or `default_user` be set by default to guard against potentially insecure configurations. `allow_empty_principals` can be used for backwards compatibility [HCSEC-2024-20](https://discuss.hashicorp.com/t/hcsec-2024-20-vault-ssh-secrets-engine-configuration-did-not-restrict-valid-principals-by-default/7025
-
-CHANGES:
-
-* core: Bump Go version to 1.22.7.
-* secrets/ssh: Add a flag, `allow_empty_principals` to allow keys or certs to apply to any user/principal. [[GH-28466](https://github.com/hashicorp/vault/pull/28466)]
-
-BUG FIXES:
-
-* secret/aws: Fixed potential panic after step-down and the queue has not repopulated. [[GH-28330](https://github.com/hashicorp/vault/pull/28330)]
-* auth/cert: During certificate validation, OCSP requests are debug logged even if Vault's log level is above DEBUG. [[GH-28450](https://github.com/hashicorp/vault/pull/28450)]
-* auth/cert: ocsp_ca_certificates field was not honored when validating OCSP responses signed by a CA that did not issue the certificate. [[GH-28309](https://github.com/hashicorp/vault/pull/28309)]
-* auth: Updated error handling for missing login credentials in AppRole and UserPass auth methods to return a 400 error instead of a 500 error. [[GH-28441](https://github.com/hashicorp/vault/pull/28441)]
-* core: Fixed an issue where maximum request duration timeout was not being added to all requests containing strings sys/monitor and sys/events. With this change, timeout is now added to all requests except monitor and events endpoint. [[GH-28230](https://github.com/hashicorp/vault/pull/28230)]
-
## 1.15.14 Enterprise
### August 29, 2024
@@ -2766,6 +2335,46 @@ BUG FIXES:
* secrets/transit (enterprise): Apply hashing arguments and defaults to managed key sign/verify operations
* secrets/transit: Do not allow auto rotation on managed_key key types [[GH-23723](https://github.com/hashicorp/vault/pull/23723)]
+## 1.13.6
+### August 30, 2023
+
+CHANGES:
+
+* core: Bump Go version to 1.20.7.
+
+IMPROVEMENTS:
+
+* core: Log rollback manager failures during unmount, remount to prevent replication failures on secondary clusters. [[GH-22235](https://github.com/hashicorp/vault/pull/22235)]
+* replication (enterprise): Make reindex less disruptive by allowing writes during the flush phase.
+* secrets/database: Improves error logging for static role rotations by including the database and role names. [[GH-22253](https://github.com/hashicorp/vault/pull/22253)]
+* storage/raft: Cap the minimum dead_server_last_contact_threshold to 1m. [[GH-22040](https://github.com/hashicorp/vault/pull/22040)]
+* ui: KV View Secret card will link to list view if input ends in "/" [[GH-22502](https://github.com/hashicorp/vault/pull/22502)]
+* ui: enables create and update KV secret workflow when control group present [[GH-22471](https://github.com/hashicorp/vault/pull/22471)]
+
+BUG FIXES:
+
+* activity (enterprise): Fix misattribution of entities to no or child namespace auth methods [[GH-18809](https://github.com/hashicorp/vault/pull/18809)]
+* api: Fix breakage with UNIX domain socket addresses introduced by newest Go versions as a security fix. [[GH-22523](https://github.com/hashicorp/vault/pull/22523)]
+* core (enterprise): Remove MFA Configuration for namespace when deleting namespace
+* core/quotas (enterprise): Fix a case where we were applying login roles to lease count quotas in a non-login context.
+Also fix a related potential deadlock. [[GH-21110](https://github.com/hashicorp/vault/pull/21110)]
+* core: Remove "expiration manager is nil on tokenstore" error log for unauth requests on DR secondary as they do not have expiration manager. [[GH-22137](https://github.com/hashicorp/vault/pull/22137)]
+* core: Fix bug where background thread to update locked user entries runs on DR secondaries. [[GH-22355](https://github.com/hashicorp/vault/pull/22355)]
+* core: Fix readonly errors that could occur while loading mounts/auths during unseal [[GH-22362](https://github.com/hashicorp/vault/pull/22362)]
+* core: Fixed an instance where incorrect route entries would get tainted. We now pre-calculate namespace specific paths to avoid this. [[GH-21470](https://github.com/hashicorp/vault/pull/21470)]
+* expiration: Fix a deadlock that could occur when a revocation failure happens while restoring leases on startup. [[GH-22374](https://github.com/hashicorp/vault/pull/22374)]
+* license: Add autoloaded license path to the cache exempt list. This is to ensure the license changes on the active node is observed on the perfStandby node. [[GH-22363](https://github.com/hashicorp/vault/pull/22363)]
+* replication (enterprise): Fix bug sync invalidate CoreReplicatedClusterInfoPath
+* replication (enterprise): Fix panic when update-primary was called on demoted clusters using update_primary_addrs
+* replication (enterprise): Fixing a bug by which the atomicity of a merkle diff result could be affected. This means it could be a source of a merkle-diff & sync process failing to switch into stream-wal mode afterwards.
+* sdk/ldaputil: Properly escape user filters when using UPN domains
+sdk/ldaputil: use EscapeLDAPValue implementation from cap/ldap [[GH-22249](https://github.com/hashicorp/vault/pull/22249)]
+* secrets/ldap: Fix bug causing schema and password_policy to be overwritten in config. [[GH-22331](https://github.com/hashicorp/vault/pull/22331)]
+* secrets/transform (enterprise): Tidy operations will be re-scheduled at a minimum of every minute, not a maximum of every minute
+* ui: Fix blank page or ghost secret when canceling KV secret create [[GH-22541](https://github.com/hashicorp/vault/pull/22541)]
+* ui: fixes `max_versions` default for secret metadata unintentionally overriding kv engine defaults [[GH-22394](https://github.com/hashicorp/vault/pull/22394)]
+* ui: fixes model defaults overwriting input value when user tries to clear form input [[GH-22458](https://github.com/hashicorp/vault/pull/22458)]
+
## 1.13.8
### September 27, 2023
diff --git a/CODEOWNERS b/CODEOWNERS
index b1b876846c7a..d4282db1147e 100644
--- a/CODEOWNERS
+++ b/CODEOWNERS
@@ -3,8 +3,6 @@
# those areas of the code.
#
# More on CODEOWNERS files: https://help.github.com/en/github/creating-cloning-and-archiving-repositories/about-code-owners
-# Default CODEOWNER primarily for contact purposes
-* @hashicorp/vault
# Select Auth engines are owned by Ecosystem
/builtin/credential/aws/ @hashicorp/vault-ecosystem
@@ -31,7 +29,7 @@
/vault/plugin_catalog.go @hashicorp/vault-ecosystem
/website/content/ @hashicorp/vault-education-approvers
-/website/content/docs/plugin-portal.mdx @hashicorp/vault-education-approvers
+/website/content/docs/plugin-portal.mdx @acahn @hashicorp/vault-education-approvers
# Plugin docs
/website/content/docs/plugins/ @hashicorp/vault-ecosystem @hashicorp/vault-education-approvers
diff --git a/Makefile b/Makefile
index cce68dd54185..40022f25a8fd 100644
--- a/Makefile
+++ b/Makefile
@@ -46,12 +46,6 @@ dev-dynamic: BUILD_TAGS+=testonly
dev-dynamic: prep
@CGO_ENABLED=1 BUILD_TAGS='$(BUILD_TAGS)' VAULT_DEV_BUILD=1 sh -c "'$(CURDIR)/scripts/build.sh'"
-# quickdev creates binaries for testing Vault locally like dev, but skips
-# the prep step.
-quickdev: BUILD_TAGS+=testonly
-quickdev:
- @CGO_ENABLED=$(CGO_ENABLED) BUILD_TAGS='$(BUILD_TAGS)' VAULT_DEV_BUILD=1 sh -c "'$(CURDIR)/scripts/build.sh'"
-
# *-mem variants will enable memory profiling which will write snapshots of heap usage
# to $TMP/vaultprof every 5 minutes. These can be analyzed using `$ go tool pprof `.
# Note that any build can have profiling added via: `$ BUILD_TAGS=memprofiler make ...`
diff --git a/api/auth/approle/LICENSE b/api/auth/approle/LICENSE
deleted file mode 100644
index f4f97ee5853a..000000000000
--- a/api/auth/approle/LICENSE
+++ /dev/null
@@ -1,365 +0,0 @@
-Copyright (c) 2015 HashiCorp, Inc.
-
-Mozilla Public License, version 2.0
-
-1. Definitions
-
-1.1. "Contributor"
-
- means each individual or legal entity that creates, contributes to the
- creation of, or owns Covered Software.
-
-1.2. "Contributor Version"
-
- means the combination of the Contributions of others (if any) used by a
- Contributor and that particular Contributor's Contribution.
-
-1.3. "Contribution"
-
- means Covered Software of a particular Contributor.
-
-1.4. "Covered Software"
-
- means Source Code Form to which the initial Contributor has attached the
- notice in Exhibit A, the Executable Form of such Source Code Form, and
- Modifications of such Source Code Form, in each case including portions
- thereof.
-
-1.5. "Incompatible With Secondary Licenses"
- means
-
- a. that the initial Contributor has attached the notice described in
- Exhibit B to the Covered Software; or
-
- b. that the Covered Software was made available under the terms of
- version 1.1 or earlier of the License, but not also under the terms of
- a Secondary License.
-
-1.6. "Executable Form"
-
- means any form of the work other than Source Code Form.
-
-1.7. "Larger Work"
-
- means a work that combines Covered Software with other material, in a
- separate file or files, that is not Covered Software.
-
-1.8. "License"
-
- means this document.
-
-1.9. "Licensable"
-
- means having the right to grant, to the maximum extent possible, whether
- at the time of the initial grant or subsequently, any and all of the
- rights conveyed by this License.
-
-1.10. "Modifications"
-
- means any of the following:
-
- a. any file in Source Code Form that results from an addition to,
- deletion from, or modification of the contents of Covered Software; or
-
- b. any new file in Source Code Form that contains any Covered Software.
-
-1.11. "Patent Claims" of a Contributor
-
- means any patent claim(s), including without limitation, method,
- process, and apparatus claims, in any patent Licensable by such
- Contributor that would be infringed, but for the grant of the License,
- by the making, using, selling, offering for sale, having made, import,
- or transfer of either its Contributions or its Contributor Version.
-
-1.12. "Secondary License"
-
- means either the GNU General Public License, Version 2.0, the GNU Lesser
- General Public License, Version 2.1, the GNU Affero General Public
- License, Version 3.0, or any later versions of those licenses.
-
-1.13. "Source Code Form"
-
- means the form of the work preferred for making modifications.
-
-1.14. "You" (or "Your")
-
- means an individual or a legal entity exercising rights under this
- License. For legal entities, "You" includes any entity that controls, is
- controlled by, or is under common control with You. For purposes of this
- definition, "control" means (a) the power, direct or indirect, to cause
- the direction or management of such entity, whether by contract or
- otherwise, or (b) ownership of more than fifty percent (50%) of the
- outstanding shares or beneficial ownership of such entity.
-
-
-2. License Grants and Conditions
-
-2.1. Grants
-
- Each Contributor hereby grants You a world-wide, royalty-free,
- non-exclusive license:
-
- a. under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or
- as part of a Larger Work; and
-
- b. under Patent Claims of such Contributor to make, use, sell, offer for
- sale, have made, import, and otherwise transfer either its
- Contributions or its Contributor Version.
-
-2.2. Effective Date
-
- The licenses granted in Section 2.1 with respect to any Contribution
- become effective for each Contribution on the date the Contributor first
- distributes such Contribution.
-
-2.3. Limitations on Grant Scope
-
- The licenses granted in this Section 2 are the only rights granted under
- this License. No additional rights or licenses will be implied from the
- distribution or licensing of Covered Software under this License.
- Notwithstanding Section 2.1(b) above, no patent license is granted by a
- Contributor:
-
- a. for any code that a Contributor has removed from Covered Software; or
-
- b. for infringements caused by: (i) Your and any other third party's
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
- c. under Patent Claims infringed by Covered Software in the absence of
- its Contributions.
-
- This License does not grant any rights in the trademarks, service marks,
- or logos of any Contributor (except as may be necessary to comply with
- the notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
- No Contributor makes additional grants as a result of Your choice to
- distribute the Covered Software under a subsequent version of this
- License (see Section 10.2) or under the terms of a Secondary License (if
- permitted under the terms of Section 3.3).
-
-2.5. Representation
-
- Each Contributor represents that the Contributor believes its
- Contributions are its original creation(s) or it has sufficient rights to
- grant the rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
- This License is not intended to limit any rights You have under
- applicable copyright doctrines of fair use, fair dealing, or other
- equivalents.
-
-2.7. Conditions
-
- Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
- Section 2.1.
-
-
-3. Responsibilities
-
-3.1. Distribution of Source Form
-
- All distribution of Covered Software in Source Code Form, including any
- Modifications that You create or to which You contribute, must be under
- the terms of this License. You must inform recipients that the Source
- Code Form of the Covered Software is governed by the terms of this
- License, and how they can obtain a copy of this License. You may not
- attempt to alter or restrict the recipients' rights in the Source Code
- Form.
-
-3.2. Distribution of Executable Form
-
- If You distribute Covered Software in Executable Form then:
-
- a. such Covered Software must also be made available in Source Code Form,
- as described in Section 3.1, and You must inform recipients of the
- Executable Form how they can obtain a copy of such Source Code Form by
- reasonable means in a timely manner, at a charge no more than the cost
- of distribution to the recipient; and
-
- b. You may distribute such Executable Form under the terms of this
- License, or sublicense it under different terms, provided that the
- license for the Executable Form does not attempt to limit or alter the
- recipients' rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
- You may create and distribute a Larger Work under terms of Your choice,
- provided that You also comply with the requirements of this License for
- the Covered Software. If the Larger Work is a combination of Covered
- Software with a work governed by one or more Secondary Licenses, and the
- Covered Software is not Incompatible With Secondary Licenses, this
- License permits You to additionally distribute such Covered Software
- under the terms of such Secondary License(s), so that the recipient of
- the Larger Work may, at their option, further distribute the Covered
- Software under the terms of either this License or such Secondary
- License(s).
-
-3.4. Notices
-
- You may not remove or alter the substance of any license notices
- (including copyright notices, patent notices, disclaimers of warranty, or
- limitations of liability) contained within the Source Code Form of the
- Covered Software, except that You may alter any license notices to the
- extent required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
- You may choose to offer, and to charge a fee for, warranty, support,
- indemnity or liability obligations to one or more recipients of Covered
- Software. However, You may do so only on Your own behalf, and not on
- behalf of any Contributor. You must make it absolutely clear that any
- such warranty, support, indemnity, or liability obligation is offered by
- You alone, and You hereby agree to indemnify every Contributor for any
- liability incurred by such Contributor as a result of warranty, support,
- indemnity or liability terms You offer. You may include additional
- disclaimers of warranty and limitations of liability specific to any
- jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
-
- If it is impossible for You to comply with any of the terms of this License
- with respect to some or all of the Covered Software due to statute,
- judicial order, or regulation then You must: (a) comply with the terms of
- this License to the maximum extent possible; and (b) describe the
- limitations and the code they affect. Such description must be placed in a
- text file included with all distributions of the Covered Software under
- this License. Except to the extent prohibited by statute or regulation,
- such description must be sufficiently detailed for a recipient of ordinary
- skill to be able to understand it.
-
-5. Termination
-
-5.1. The rights granted under this License will terminate automatically if You
- fail to comply with any of its terms. However, if You become compliant,
- then the rights granted under this License from a particular Contributor
- are reinstated (a) provisionally, unless and until such Contributor
- explicitly and finally terminates Your grants, and (b) on an ongoing
- basis, if such Contributor fails to notify You of the non-compliance by
- some reasonable means prior to 60 days after You have come back into
- compliance. Moreover, Your grants from a particular Contributor are
- reinstated on an ongoing basis if such Contributor notifies You of the
- non-compliance by some reasonable means, this is the first time You have
- received notice of non-compliance with this License from such
- Contributor, and You become compliant prior to 30 days after Your receipt
- of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
- infringement claim (excluding declaratory judgment actions,
- counter-claims, and cross-claims) alleging that a Contributor Version
- directly or indirectly infringes any patent, then the rights granted to
- You by any and all Contributors for the Covered Software under Section
- 2.1 of this License shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
- license agreements (excluding distributors and resellers) which have been
- validly granted by You or Your distributors under this License prior to
- termination shall survive termination.
-
-6. Disclaimer of Warranty
-
- Covered Software is provided under this License on an "as is" basis,
- without warranty of any kind, either expressed, implied, or statutory,
- including, without limitation, warranties that the Covered Software is free
- of defects, merchantable, fit for a particular purpose or non-infringing.
- The entire risk as to the quality and performance of the Covered Software
- is with You. Should any Covered Software prove defective in any respect,
- You (not any Contributor) assume the cost of any necessary servicing,
- repair, or correction. This disclaimer of warranty constitutes an essential
- part of this License. No use of any Covered Software is authorized under
- this License except under this disclaimer.
-
-7. Limitation of Liability
-
- Under no circumstances and under no legal theory, whether tort (including
- negligence), contract, or otherwise, shall any Contributor, or anyone who
- distributes Covered Software as permitted above, be liable to You for any
- direct, indirect, special, incidental, or consequential damages of any
- character including, without limitation, damages for lost profits, loss of
- goodwill, work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses, even if such party shall have been
- informed of the possibility of such damages. This limitation of liability
- shall not apply to liability for death or personal injury resulting from
- such party's negligence to the extent applicable law prohibits such
- limitation. Some jurisdictions do not allow the exclusion or limitation of
- incidental or consequential damages, so this exclusion and limitation may
- not apply to You.
-
-8. Litigation
-
- Any litigation relating to this License may be brought only in the courts
- of a jurisdiction where the defendant maintains its principal place of
- business and such litigation shall be governed by laws of that
- jurisdiction, without reference to its conflict-of-law provisions. Nothing
- in this Section shall prevent a party's ability to bring cross-claims or
- counter-claims.
-
-9. Miscellaneous
-
- This License represents the complete agreement concerning the subject
- matter hereof. If any provision of this License is held to be
- unenforceable, such provision shall be reformed only to the extent
- necessary to make it enforceable. Any law or regulation which provides that
- the language of a contract shall be construed against the drafter shall not
- be used to construe this License against a Contributor.
-
-
-10. Versions of the License
-
-10.1. New Versions
-
- Mozilla Foundation is the license steward. Except as provided in Section
- 10.3, no one other than the license steward has the right to modify or
- publish new versions of this License. Each version will be given a
- distinguishing version number.
-
-10.2. Effect of New Versions
-
- You may distribute the Covered Software under the terms of the version
- of the License under which You originally received the Covered Software,
- or under the terms of any subsequent version published by the license
- steward.
-
-10.3. Modified Versions
-
- If you create software not governed by this License, and you want to
- create a new license for such software, you may create and use a
- modified version of this License if you rename the license and remove
- any references to the name of the license steward (except to note that
- such modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary
- Licenses If You choose to distribute Source Code Form that is
- Incompatible With Secondary Licenses under the terms of this version of
- the License, the notice described in Exhibit B of this License must be
- attached.
-
-Exhibit A - Source Code Form License Notice
-
- This Source Code Form is subject to the
- terms of the Mozilla Public License, v.
- 2.0. If a copy of the MPL was not
- distributed with this file, You can
- obtain one at
- http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular file,
-then You may include the notice in a location (such as a LICENSE file in a
-relevant directory) where a recipient would be likely to look for such a
-notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - "Incompatible With Secondary Licenses" Notice
-
- This Source Code Form is "Incompatible
- With Secondary Licenses", as defined by
- the Mozilla Public License, v. 2.0.
-
diff --git a/api/auth/aws/LICENSE b/api/auth/aws/LICENSE
deleted file mode 100644
index f4f97ee5853a..000000000000
--- a/api/auth/aws/LICENSE
+++ /dev/null
@@ -1,365 +0,0 @@
-Copyright (c) 2015 HashiCorp, Inc.
-
-Mozilla Public License, version 2.0
-
-1. Definitions
-
-1.1. "Contributor"
-
- means each individual or legal entity that creates, contributes to the
- creation of, or owns Covered Software.
-
-1.2. "Contributor Version"
-
- means the combination of the Contributions of others (if any) used by a
- Contributor and that particular Contributor's Contribution.
-
-1.3. "Contribution"
-
- means Covered Software of a particular Contributor.
-
-1.4. "Covered Software"
-
- means Source Code Form to which the initial Contributor has attached the
- notice in Exhibit A, the Executable Form of such Source Code Form, and
- Modifications of such Source Code Form, in each case including portions
- thereof.
-
-1.5. "Incompatible With Secondary Licenses"
- means
-
- a. that the initial Contributor has attached the notice described in
- Exhibit B to the Covered Software; or
-
- b. that the Covered Software was made available under the terms of
- version 1.1 or earlier of the License, but not also under the terms of
- a Secondary License.
-
-1.6. "Executable Form"
-
- means any form of the work other than Source Code Form.
-
-1.7. "Larger Work"
-
- means a work that combines Covered Software with other material, in a
- separate file or files, that is not Covered Software.
-
-1.8. "License"
-
- means this document.
-
-1.9. "Licensable"
-
- means having the right to grant, to the maximum extent possible, whether
- at the time of the initial grant or subsequently, any and all of the
- rights conveyed by this License.
-
-1.10. "Modifications"
-
- means any of the following:
-
- a. any file in Source Code Form that results from an addition to,
- deletion from, or modification of the contents of Covered Software; or
-
- b. any new file in Source Code Form that contains any Covered Software.
-
-1.11. "Patent Claims" of a Contributor
-
- means any patent claim(s), including without limitation, method,
- process, and apparatus claims, in any patent Licensable by such
- Contributor that would be infringed, but for the grant of the License,
- by the making, using, selling, offering for sale, having made, import,
- or transfer of either its Contributions or its Contributor Version.
-
-1.12. "Secondary License"
-
- means either the GNU General Public License, Version 2.0, the GNU Lesser
- General Public License, Version 2.1, the GNU Affero General Public
- License, Version 3.0, or any later versions of those licenses.
-
-1.13. "Source Code Form"
-
- means the form of the work preferred for making modifications.
-
-1.14. "You" (or "Your")
-
- means an individual or a legal entity exercising rights under this
- License. For legal entities, "You" includes any entity that controls, is
- controlled by, or is under common control with You. For purposes of this
- definition, "control" means (a) the power, direct or indirect, to cause
- the direction or management of such entity, whether by contract or
- otherwise, or (b) ownership of more than fifty percent (50%) of the
- outstanding shares or beneficial ownership of such entity.
-
-
-2. License Grants and Conditions
-
-2.1. Grants
-
- Each Contributor hereby grants You a world-wide, royalty-free,
- non-exclusive license:
-
- a. under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or
- as part of a Larger Work; and
-
- b. under Patent Claims of such Contributor to make, use, sell, offer for
- sale, have made, import, and otherwise transfer either its
- Contributions or its Contributor Version.
-
-2.2. Effective Date
-
- The licenses granted in Section 2.1 with respect to any Contribution
- become effective for each Contribution on the date the Contributor first
- distributes such Contribution.
-
-2.3. Limitations on Grant Scope
-
- The licenses granted in this Section 2 are the only rights granted under
- this License. No additional rights or licenses will be implied from the
- distribution or licensing of Covered Software under this License.
- Notwithstanding Section 2.1(b) above, no patent license is granted by a
- Contributor:
-
- a. for any code that a Contributor has removed from Covered Software; or
-
- b. for infringements caused by: (i) Your and any other third party's
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
- c. under Patent Claims infringed by Covered Software in the absence of
- its Contributions.
-
- This License does not grant any rights in the trademarks, service marks,
- or logos of any Contributor (except as may be necessary to comply with
- the notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
- No Contributor makes additional grants as a result of Your choice to
- distribute the Covered Software under a subsequent version of this
- License (see Section 10.2) or under the terms of a Secondary License (if
- permitted under the terms of Section 3.3).
-
-2.5. Representation
-
- Each Contributor represents that the Contributor believes its
- Contributions are its original creation(s) or it has sufficient rights to
- grant the rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
- This License is not intended to limit any rights You have under
- applicable copyright doctrines of fair use, fair dealing, or other
- equivalents.
-
-2.7. Conditions
-
- Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
- Section 2.1.
-
-
-3. Responsibilities
-
-3.1. Distribution of Source Form
-
- All distribution of Covered Software in Source Code Form, including any
- Modifications that You create or to which You contribute, must be under
- the terms of this License. You must inform recipients that the Source
- Code Form of the Covered Software is governed by the terms of this
- License, and how they can obtain a copy of this License. You may not
- attempt to alter or restrict the recipients' rights in the Source Code
- Form.
-
-3.2. Distribution of Executable Form
-
- If You distribute Covered Software in Executable Form then:
-
- a. such Covered Software must also be made available in Source Code Form,
- as described in Section 3.1, and You must inform recipients of the
- Executable Form how they can obtain a copy of such Source Code Form by
- reasonable means in a timely manner, at a charge no more than the cost
- of distribution to the recipient; and
-
- b. You may distribute such Executable Form under the terms of this
- License, or sublicense it under different terms, provided that the
- license for the Executable Form does not attempt to limit or alter the
- recipients' rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
- You may create and distribute a Larger Work under terms of Your choice,
- provided that You also comply with the requirements of this License for
- the Covered Software. If the Larger Work is a combination of Covered
- Software with a work governed by one or more Secondary Licenses, and the
- Covered Software is not Incompatible With Secondary Licenses, this
- License permits You to additionally distribute such Covered Software
- under the terms of such Secondary License(s), so that the recipient of
- the Larger Work may, at their option, further distribute the Covered
- Software under the terms of either this License or such Secondary
- License(s).
-
-3.4. Notices
-
- You may not remove or alter the substance of any license notices
- (including copyright notices, patent notices, disclaimers of warranty, or
- limitations of liability) contained within the Source Code Form of the
- Covered Software, except that You may alter any license notices to the
- extent required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
- You may choose to offer, and to charge a fee for, warranty, support,
- indemnity or liability obligations to one or more recipients of Covered
- Software. However, You may do so only on Your own behalf, and not on
- behalf of any Contributor. You must make it absolutely clear that any
- such warranty, support, indemnity, or liability obligation is offered by
- You alone, and You hereby agree to indemnify every Contributor for any
- liability incurred by such Contributor as a result of warranty, support,
- indemnity or liability terms You offer. You may include additional
- disclaimers of warranty and limitations of liability specific to any
- jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
-
- If it is impossible for You to comply with any of the terms of this License
- with respect to some or all of the Covered Software due to statute,
- judicial order, or regulation then You must: (a) comply with the terms of
- this License to the maximum extent possible; and (b) describe the
- limitations and the code they affect. Such description must be placed in a
- text file included with all distributions of the Covered Software under
- this License. Except to the extent prohibited by statute or regulation,
- such description must be sufficiently detailed for a recipient of ordinary
- skill to be able to understand it.
-
-5. Termination
-
-5.1. The rights granted under this License will terminate automatically if You
- fail to comply with any of its terms. However, if You become compliant,
- then the rights granted under this License from a particular Contributor
- are reinstated (a) provisionally, unless and until such Contributor
- explicitly and finally terminates Your grants, and (b) on an ongoing
- basis, if such Contributor fails to notify You of the non-compliance by
- some reasonable means prior to 60 days after You have come back into
- compliance. Moreover, Your grants from a particular Contributor are
- reinstated on an ongoing basis if such Contributor notifies You of the
- non-compliance by some reasonable means, this is the first time You have
- received notice of non-compliance with this License from such
- Contributor, and You become compliant prior to 30 days after Your receipt
- of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
- infringement claim (excluding declaratory judgment actions,
- counter-claims, and cross-claims) alleging that a Contributor Version
- directly or indirectly infringes any patent, then the rights granted to
- You by any and all Contributors for the Covered Software under Section
- 2.1 of this License shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
- license agreements (excluding distributors and resellers) which have been
- validly granted by You or Your distributors under this License prior to
- termination shall survive termination.
-
-6. Disclaimer of Warranty
-
- Covered Software is provided under this License on an "as is" basis,
- without warranty of any kind, either expressed, implied, or statutory,
- including, without limitation, warranties that the Covered Software is free
- of defects, merchantable, fit for a particular purpose or non-infringing.
- The entire risk as to the quality and performance of the Covered Software
- is with You. Should any Covered Software prove defective in any respect,
- You (not any Contributor) assume the cost of any necessary servicing,
- repair, or correction. This disclaimer of warranty constitutes an essential
- part of this License. No use of any Covered Software is authorized under
- this License except under this disclaimer.
-
-7. Limitation of Liability
-
- Under no circumstances and under no legal theory, whether tort (including
- negligence), contract, or otherwise, shall any Contributor, or anyone who
- distributes Covered Software as permitted above, be liable to You for any
- direct, indirect, special, incidental, or consequential damages of any
- character including, without limitation, damages for lost profits, loss of
- goodwill, work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses, even if such party shall have been
- informed of the possibility of such damages. This limitation of liability
- shall not apply to liability for death or personal injury resulting from
- such party's negligence to the extent applicable law prohibits such
- limitation. Some jurisdictions do not allow the exclusion or limitation of
- incidental or consequential damages, so this exclusion and limitation may
- not apply to You.
-
-8. Litigation
-
- Any litigation relating to this License may be brought only in the courts
- of a jurisdiction where the defendant maintains its principal place of
- business and such litigation shall be governed by laws of that
- jurisdiction, without reference to its conflict-of-law provisions. Nothing
- in this Section shall prevent a party's ability to bring cross-claims or
- counter-claims.
-
-9. Miscellaneous
-
- This License represents the complete agreement concerning the subject
- matter hereof. If any provision of this License is held to be
- unenforceable, such provision shall be reformed only to the extent
- necessary to make it enforceable. Any law or regulation which provides that
- the language of a contract shall be construed against the drafter shall not
- be used to construe this License against a Contributor.
-
-
-10. Versions of the License
-
-10.1. New Versions
-
- Mozilla Foundation is the license steward. Except as provided in Section
- 10.3, no one other than the license steward has the right to modify or
- publish new versions of this License. Each version will be given a
- distinguishing version number.
-
-10.2. Effect of New Versions
-
- You may distribute the Covered Software under the terms of the version
- of the License under which You originally received the Covered Software,
- or under the terms of any subsequent version published by the license
- steward.
-
-10.3. Modified Versions
-
- If you create software not governed by this License, and you want to
- create a new license for such software, you may create and use a
- modified version of this License if you rename the license and remove
- any references to the name of the license steward (except to note that
- such modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary
- Licenses If You choose to distribute Source Code Form that is
- Incompatible With Secondary Licenses under the terms of this version of
- the License, the notice described in Exhibit B of this License must be
- attached.
-
-Exhibit A - Source Code Form License Notice
-
- This Source Code Form is subject to the
- terms of the Mozilla Public License, v.
- 2.0. If a copy of the MPL was not
- distributed with this file, You can
- obtain one at
- http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular file,
-then You may include the notice in a location (such as a LICENSE file in a
-relevant directory) where a recipient would be likely to look for such a
-notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - "Incompatible With Secondary Licenses" Notice
-
- This Source Code Form is "Incompatible
- With Secondary Licenses", as defined by
- the Mozilla Public License, v. 2.0.
-
diff --git a/api/auth/azure/LICENSE b/api/auth/azure/LICENSE
deleted file mode 100644
index f4f97ee5853a..000000000000
--- a/api/auth/azure/LICENSE
+++ /dev/null
@@ -1,365 +0,0 @@
-Copyright (c) 2015 HashiCorp, Inc.
-
-Mozilla Public License, version 2.0
-
-1. Definitions
-
-1.1. "Contributor"
-
- means each individual or legal entity that creates, contributes to the
- creation of, or owns Covered Software.
-
-1.2. "Contributor Version"
-
- means the combination of the Contributions of others (if any) used by a
- Contributor and that particular Contributor's Contribution.
-
-1.3. "Contribution"
-
- means Covered Software of a particular Contributor.
-
-1.4. "Covered Software"
-
- means Source Code Form to which the initial Contributor has attached the
- notice in Exhibit A, the Executable Form of such Source Code Form, and
- Modifications of such Source Code Form, in each case including portions
- thereof.
-
-1.5. "Incompatible With Secondary Licenses"
- means
-
- a. that the initial Contributor has attached the notice described in
- Exhibit B to the Covered Software; or
-
- b. that the Covered Software was made available under the terms of
- version 1.1 or earlier of the License, but not also under the terms of
- a Secondary License.
-
-1.6. "Executable Form"
-
- means any form of the work other than Source Code Form.
-
-1.7. "Larger Work"
-
- means a work that combines Covered Software with other material, in a
- separate file or files, that is not Covered Software.
-
-1.8. "License"
-
- means this document.
-
-1.9. "Licensable"
-
- means having the right to grant, to the maximum extent possible, whether
- at the time of the initial grant or subsequently, any and all of the
- rights conveyed by this License.
-
-1.10. "Modifications"
-
- means any of the following:
-
- a. any file in Source Code Form that results from an addition to,
- deletion from, or modification of the contents of Covered Software; or
-
- b. any new file in Source Code Form that contains any Covered Software.
-
-1.11. "Patent Claims" of a Contributor
-
- means any patent claim(s), including without limitation, method,
- process, and apparatus claims, in any patent Licensable by such
- Contributor that would be infringed, but for the grant of the License,
- by the making, using, selling, offering for sale, having made, import,
- or transfer of either its Contributions or its Contributor Version.
-
-1.12. "Secondary License"
-
- means either the GNU General Public License, Version 2.0, the GNU Lesser
- General Public License, Version 2.1, the GNU Affero General Public
- License, Version 3.0, or any later versions of those licenses.
-
-1.13. "Source Code Form"
-
- means the form of the work preferred for making modifications.
-
-1.14. "You" (or "Your")
-
- means an individual or a legal entity exercising rights under this
- License. For legal entities, "You" includes any entity that controls, is
- controlled by, or is under common control with You. For purposes of this
- definition, "control" means (a) the power, direct or indirect, to cause
- the direction or management of such entity, whether by contract or
- otherwise, or (b) ownership of more than fifty percent (50%) of the
- outstanding shares or beneficial ownership of such entity.
-
-
-2. License Grants and Conditions
-
-2.1. Grants
-
- Each Contributor hereby grants You a world-wide, royalty-free,
- non-exclusive license:
-
- a. under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or
- as part of a Larger Work; and
-
- b. under Patent Claims of such Contributor to make, use, sell, offer for
- sale, have made, import, and otherwise transfer either its
- Contributions or its Contributor Version.
-
-2.2. Effective Date
-
- The licenses granted in Section 2.1 with respect to any Contribution
- become effective for each Contribution on the date the Contributor first
- distributes such Contribution.
-
-2.3. Limitations on Grant Scope
-
- The licenses granted in this Section 2 are the only rights granted under
- this License. No additional rights or licenses will be implied from the
- distribution or licensing of Covered Software under this License.
- Notwithstanding Section 2.1(b) above, no patent license is granted by a
- Contributor:
-
- a. for any code that a Contributor has removed from Covered Software; or
-
- b. for infringements caused by: (i) Your and any other third party's
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
- c. under Patent Claims infringed by Covered Software in the absence of
- its Contributions.
-
- This License does not grant any rights in the trademarks, service marks,
- or logos of any Contributor (except as may be necessary to comply with
- the notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
- No Contributor makes additional grants as a result of Your choice to
- distribute the Covered Software under a subsequent version of this
- License (see Section 10.2) or under the terms of a Secondary License (if
- permitted under the terms of Section 3.3).
-
-2.5. Representation
-
- Each Contributor represents that the Contributor believes its
- Contributions are its original creation(s) or it has sufficient rights to
- grant the rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
- This License is not intended to limit any rights You have under
- applicable copyright doctrines of fair use, fair dealing, or other
- equivalents.
-
-2.7. Conditions
-
- Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
- Section 2.1.
-
-
-3. Responsibilities
-
-3.1. Distribution of Source Form
-
- All distribution of Covered Software in Source Code Form, including any
- Modifications that You create or to which You contribute, must be under
- the terms of this License. You must inform recipients that the Source
- Code Form of the Covered Software is governed by the terms of this
- License, and how they can obtain a copy of this License. You may not
- attempt to alter or restrict the recipients' rights in the Source Code
- Form.
-
-3.2. Distribution of Executable Form
-
- If You distribute Covered Software in Executable Form then:
-
- a. such Covered Software must also be made available in Source Code Form,
- as described in Section 3.1, and You must inform recipients of the
- Executable Form how they can obtain a copy of such Source Code Form by
- reasonable means in a timely manner, at a charge no more than the cost
- of distribution to the recipient; and
-
- b. You may distribute such Executable Form under the terms of this
- License, or sublicense it under different terms, provided that the
- license for the Executable Form does not attempt to limit or alter the
- recipients' rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
- You may create and distribute a Larger Work under terms of Your choice,
- provided that You also comply with the requirements of this License for
- the Covered Software. If the Larger Work is a combination of Covered
- Software with a work governed by one or more Secondary Licenses, and the
- Covered Software is not Incompatible With Secondary Licenses, this
- License permits You to additionally distribute such Covered Software
- under the terms of such Secondary License(s), so that the recipient of
- the Larger Work may, at their option, further distribute the Covered
- Software under the terms of either this License or such Secondary
- License(s).
-
-3.4. Notices
-
- You may not remove or alter the substance of any license notices
- (including copyright notices, patent notices, disclaimers of warranty, or
- limitations of liability) contained within the Source Code Form of the
- Covered Software, except that You may alter any license notices to the
- extent required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
- You may choose to offer, and to charge a fee for, warranty, support,
- indemnity or liability obligations to one or more recipients of Covered
- Software. However, You may do so only on Your own behalf, and not on
- behalf of any Contributor. You must make it absolutely clear that any
- such warranty, support, indemnity, or liability obligation is offered by
- You alone, and You hereby agree to indemnify every Contributor for any
- liability incurred by such Contributor as a result of warranty, support,
- indemnity or liability terms You offer. You may include additional
- disclaimers of warranty and limitations of liability specific to any
- jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
-
- If it is impossible for You to comply with any of the terms of this License
- with respect to some or all of the Covered Software due to statute,
- judicial order, or regulation then You must: (a) comply with the terms of
- this License to the maximum extent possible; and (b) describe the
- limitations and the code they affect. Such description must be placed in a
- text file included with all distributions of the Covered Software under
- this License. Except to the extent prohibited by statute or regulation,
- such description must be sufficiently detailed for a recipient of ordinary
- skill to be able to understand it.
-
-5. Termination
-
-5.1. The rights granted under this License will terminate automatically if You
- fail to comply with any of its terms. However, if You become compliant,
- then the rights granted under this License from a particular Contributor
- are reinstated (a) provisionally, unless and until such Contributor
- explicitly and finally terminates Your grants, and (b) on an ongoing
- basis, if such Contributor fails to notify You of the non-compliance by
- some reasonable means prior to 60 days after You have come back into
- compliance. Moreover, Your grants from a particular Contributor are
- reinstated on an ongoing basis if such Contributor notifies You of the
- non-compliance by some reasonable means, this is the first time You have
- received notice of non-compliance with this License from such
- Contributor, and You become compliant prior to 30 days after Your receipt
- of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
- infringement claim (excluding declaratory judgment actions,
- counter-claims, and cross-claims) alleging that a Contributor Version
- directly or indirectly infringes any patent, then the rights granted to
- You by any and all Contributors for the Covered Software under Section
- 2.1 of this License shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
- license agreements (excluding distributors and resellers) which have been
- validly granted by You or Your distributors under this License prior to
- termination shall survive termination.
-
-6. Disclaimer of Warranty
-
- Covered Software is provided under this License on an "as is" basis,
- without warranty of any kind, either expressed, implied, or statutory,
- including, without limitation, warranties that the Covered Software is free
- of defects, merchantable, fit for a particular purpose or non-infringing.
- The entire risk as to the quality and performance of the Covered Software
- is with You. Should any Covered Software prove defective in any respect,
- You (not any Contributor) assume the cost of any necessary servicing,
- repair, or correction. This disclaimer of warranty constitutes an essential
- part of this License. No use of any Covered Software is authorized under
- this License except under this disclaimer.
-
-7. Limitation of Liability
-
- Under no circumstances and under no legal theory, whether tort (including
- negligence), contract, or otherwise, shall any Contributor, or anyone who
- distributes Covered Software as permitted above, be liable to You for any
- direct, indirect, special, incidental, or consequential damages of any
- character including, without limitation, damages for lost profits, loss of
- goodwill, work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses, even if such party shall have been
- informed of the possibility of such damages. This limitation of liability
- shall not apply to liability for death or personal injury resulting from
- such party's negligence to the extent applicable law prohibits such
- limitation. Some jurisdictions do not allow the exclusion or limitation of
- incidental or consequential damages, so this exclusion and limitation may
- not apply to You.
-
-8. Litigation
-
- Any litigation relating to this License may be brought only in the courts
- of a jurisdiction where the defendant maintains its principal place of
- business and such litigation shall be governed by laws of that
- jurisdiction, without reference to its conflict-of-law provisions. Nothing
- in this Section shall prevent a party's ability to bring cross-claims or
- counter-claims.
-
-9. Miscellaneous
-
- This License represents the complete agreement concerning the subject
- matter hereof. If any provision of this License is held to be
- unenforceable, such provision shall be reformed only to the extent
- necessary to make it enforceable. Any law or regulation which provides that
- the language of a contract shall be construed against the drafter shall not
- be used to construe this License against a Contributor.
-
-
-10. Versions of the License
-
-10.1. New Versions
-
- Mozilla Foundation is the license steward. Except as provided in Section
- 10.3, no one other than the license steward has the right to modify or
- publish new versions of this License. Each version will be given a
- distinguishing version number.
-
-10.2. Effect of New Versions
-
- You may distribute the Covered Software under the terms of the version
- of the License under which You originally received the Covered Software,
- or under the terms of any subsequent version published by the license
- steward.
-
-10.3. Modified Versions
-
- If you create software not governed by this License, and you want to
- create a new license for such software, you may create and use a
- modified version of this License if you rename the license and remove
- any references to the name of the license steward (except to note that
- such modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary
- Licenses If You choose to distribute Source Code Form that is
- Incompatible With Secondary Licenses under the terms of this version of
- the License, the notice described in Exhibit B of this License must be
- attached.
-
-Exhibit A - Source Code Form License Notice
-
- This Source Code Form is subject to the
- terms of the Mozilla Public License, v.
- 2.0. If a copy of the MPL was not
- distributed with this file, You can
- obtain one at
- http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular file,
-then You may include the notice in a location (such as a LICENSE file in a
-relevant directory) where a recipient would be likely to look for such a
-notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - "Incompatible With Secondary Licenses" Notice
-
- This Source Code Form is "Incompatible
- With Secondary Licenses", as defined by
- the Mozilla Public License, v. 2.0.
-
diff --git a/api/auth/gcp/LICENSE b/api/auth/gcp/LICENSE
deleted file mode 100644
index f4f97ee5853a..000000000000
--- a/api/auth/gcp/LICENSE
+++ /dev/null
@@ -1,365 +0,0 @@
-Copyright (c) 2015 HashiCorp, Inc.
-
-Mozilla Public License, version 2.0
-
-1. Definitions
-
-1.1. "Contributor"
-
- means each individual or legal entity that creates, contributes to the
- creation of, or owns Covered Software.
-
-1.2. "Contributor Version"
-
- means the combination of the Contributions of others (if any) used by a
- Contributor and that particular Contributor's Contribution.
-
-1.3. "Contribution"
-
- means Covered Software of a particular Contributor.
-
-1.4. "Covered Software"
-
- means Source Code Form to which the initial Contributor has attached the
- notice in Exhibit A, the Executable Form of such Source Code Form, and
- Modifications of such Source Code Form, in each case including portions
- thereof.
-
-1.5. "Incompatible With Secondary Licenses"
- means
-
- a. that the initial Contributor has attached the notice described in
- Exhibit B to the Covered Software; or
-
- b. that the Covered Software was made available under the terms of
- version 1.1 or earlier of the License, but not also under the terms of
- a Secondary License.
-
-1.6. "Executable Form"
-
- means any form of the work other than Source Code Form.
-
-1.7. "Larger Work"
-
- means a work that combines Covered Software with other material, in a
- separate file or files, that is not Covered Software.
-
-1.8. "License"
-
- means this document.
-
-1.9. "Licensable"
-
- means having the right to grant, to the maximum extent possible, whether
- at the time of the initial grant or subsequently, any and all of the
- rights conveyed by this License.
-
-1.10. "Modifications"
-
- means any of the following:
-
- a. any file in Source Code Form that results from an addition to,
- deletion from, or modification of the contents of Covered Software; or
-
- b. any new file in Source Code Form that contains any Covered Software.
-
-1.11. "Patent Claims" of a Contributor
-
- means any patent claim(s), including without limitation, method,
- process, and apparatus claims, in any patent Licensable by such
- Contributor that would be infringed, but for the grant of the License,
- by the making, using, selling, offering for sale, having made, import,
- or transfer of either its Contributions or its Contributor Version.
-
-1.12. "Secondary License"
-
- means either the GNU General Public License, Version 2.0, the GNU Lesser
- General Public License, Version 2.1, the GNU Affero General Public
- License, Version 3.0, or any later versions of those licenses.
-
-1.13. "Source Code Form"
-
- means the form of the work preferred for making modifications.
-
-1.14. "You" (or "Your")
-
- means an individual or a legal entity exercising rights under this
- License. For legal entities, "You" includes any entity that controls, is
- controlled by, or is under common control with You. For purposes of this
- definition, "control" means (a) the power, direct or indirect, to cause
- the direction or management of such entity, whether by contract or
- otherwise, or (b) ownership of more than fifty percent (50%) of the
- outstanding shares or beneficial ownership of such entity.
-
-
-2. License Grants and Conditions
-
-2.1. Grants
-
- Each Contributor hereby grants You a world-wide, royalty-free,
- non-exclusive license:
-
- a. under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or
- as part of a Larger Work; and
-
- b. under Patent Claims of such Contributor to make, use, sell, offer for
- sale, have made, import, and otherwise transfer either its
- Contributions or its Contributor Version.
-
-2.2. Effective Date
-
- The licenses granted in Section 2.1 with respect to any Contribution
- become effective for each Contribution on the date the Contributor first
- distributes such Contribution.
-
-2.3. Limitations on Grant Scope
-
- The licenses granted in this Section 2 are the only rights granted under
- this License. No additional rights or licenses will be implied from the
- distribution or licensing of Covered Software under this License.
- Notwithstanding Section 2.1(b) above, no patent license is granted by a
- Contributor:
-
- a. for any code that a Contributor has removed from Covered Software; or
-
- b. for infringements caused by: (i) Your and any other third party's
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
- c. under Patent Claims infringed by Covered Software in the absence of
- its Contributions.
-
- This License does not grant any rights in the trademarks, service marks,
- or logos of any Contributor (except as may be necessary to comply with
- the notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
- No Contributor makes additional grants as a result of Your choice to
- distribute the Covered Software under a subsequent version of this
- License (see Section 10.2) or under the terms of a Secondary License (if
- permitted under the terms of Section 3.3).
-
-2.5. Representation
-
- Each Contributor represents that the Contributor believes its
- Contributions are its original creation(s) or it has sufficient rights to
- grant the rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
- This License is not intended to limit any rights You have under
- applicable copyright doctrines of fair use, fair dealing, or other
- equivalents.
-
-2.7. Conditions
-
- Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
- Section 2.1.
-
-
-3. Responsibilities
-
-3.1. Distribution of Source Form
-
- All distribution of Covered Software in Source Code Form, including any
- Modifications that You create or to which You contribute, must be under
- the terms of this License. You must inform recipients that the Source
- Code Form of the Covered Software is governed by the terms of this
- License, and how they can obtain a copy of this License. You may not
- attempt to alter or restrict the recipients' rights in the Source Code
- Form.
-
-3.2. Distribution of Executable Form
-
- If You distribute Covered Software in Executable Form then:
-
- a. such Covered Software must also be made available in Source Code Form,
- as described in Section 3.1, and You must inform recipients of the
- Executable Form how they can obtain a copy of such Source Code Form by
- reasonable means in a timely manner, at a charge no more than the cost
- of distribution to the recipient; and
-
- b. You may distribute such Executable Form under the terms of this
- License, or sublicense it under different terms, provided that the
- license for the Executable Form does not attempt to limit or alter the
- recipients' rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
- You may create and distribute a Larger Work under terms of Your choice,
- provided that You also comply with the requirements of this License for
- the Covered Software. If the Larger Work is a combination of Covered
- Software with a work governed by one or more Secondary Licenses, and the
- Covered Software is not Incompatible With Secondary Licenses, this
- License permits You to additionally distribute such Covered Software
- under the terms of such Secondary License(s), so that the recipient of
- the Larger Work may, at their option, further distribute the Covered
- Software under the terms of either this License or such Secondary
- License(s).
-
-3.4. Notices
-
- You may not remove or alter the substance of any license notices
- (including copyright notices, patent notices, disclaimers of warranty, or
- limitations of liability) contained within the Source Code Form of the
- Covered Software, except that You may alter any license notices to the
- extent required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
- You may choose to offer, and to charge a fee for, warranty, support,
- indemnity or liability obligations to one or more recipients of Covered
- Software. However, You may do so only on Your own behalf, and not on
- behalf of any Contributor. You must make it absolutely clear that any
- such warranty, support, indemnity, or liability obligation is offered by
- You alone, and You hereby agree to indemnify every Contributor for any
- liability incurred by such Contributor as a result of warranty, support,
- indemnity or liability terms You offer. You may include additional
- disclaimers of warranty and limitations of liability specific to any
- jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
-
- If it is impossible for You to comply with any of the terms of this License
- with respect to some or all of the Covered Software due to statute,
- judicial order, or regulation then You must: (a) comply with the terms of
- this License to the maximum extent possible; and (b) describe the
- limitations and the code they affect. Such description must be placed in a
- text file included with all distributions of the Covered Software under
- this License. Except to the extent prohibited by statute or regulation,
- such description must be sufficiently detailed for a recipient of ordinary
- skill to be able to understand it.
-
-5. Termination
-
-5.1. The rights granted under this License will terminate automatically if You
- fail to comply with any of its terms. However, if You become compliant,
- then the rights granted under this License from a particular Contributor
- are reinstated (a) provisionally, unless and until such Contributor
- explicitly and finally terminates Your grants, and (b) on an ongoing
- basis, if such Contributor fails to notify You of the non-compliance by
- some reasonable means prior to 60 days after You have come back into
- compliance. Moreover, Your grants from a particular Contributor are
- reinstated on an ongoing basis if such Contributor notifies You of the
- non-compliance by some reasonable means, this is the first time You have
- received notice of non-compliance with this License from such
- Contributor, and You become compliant prior to 30 days after Your receipt
- of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
- infringement claim (excluding declaratory judgment actions,
- counter-claims, and cross-claims) alleging that a Contributor Version
- directly or indirectly infringes any patent, then the rights granted to
- You by any and all Contributors for the Covered Software under Section
- 2.1 of this License shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
- license agreements (excluding distributors and resellers) which have been
- validly granted by You or Your distributors under this License prior to
- termination shall survive termination.
-
-6. Disclaimer of Warranty
-
- Covered Software is provided under this License on an "as is" basis,
- without warranty of any kind, either expressed, implied, or statutory,
- including, without limitation, warranties that the Covered Software is free
- of defects, merchantable, fit for a particular purpose or non-infringing.
- The entire risk as to the quality and performance of the Covered Software
- is with You. Should any Covered Software prove defective in any respect,
- You (not any Contributor) assume the cost of any necessary servicing,
- repair, or correction. This disclaimer of warranty constitutes an essential
- part of this License. No use of any Covered Software is authorized under
- this License except under this disclaimer.
-
-7. Limitation of Liability
-
- Under no circumstances and under no legal theory, whether tort (including
- negligence), contract, or otherwise, shall any Contributor, or anyone who
- distributes Covered Software as permitted above, be liable to You for any
- direct, indirect, special, incidental, or consequential damages of any
- character including, without limitation, damages for lost profits, loss of
- goodwill, work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses, even if such party shall have been
- informed of the possibility of such damages. This limitation of liability
- shall not apply to liability for death or personal injury resulting from
- such party's negligence to the extent applicable law prohibits such
- limitation. Some jurisdictions do not allow the exclusion or limitation of
- incidental or consequential damages, so this exclusion and limitation may
- not apply to You.
-
-8. Litigation
-
- Any litigation relating to this License may be brought only in the courts
- of a jurisdiction where the defendant maintains its principal place of
- business and such litigation shall be governed by laws of that
- jurisdiction, without reference to its conflict-of-law provisions. Nothing
- in this Section shall prevent a party's ability to bring cross-claims or
- counter-claims.
-
-9. Miscellaneous
-
- This License represents the complete agreement concerning the subject
- matter hereof. If any provision of this License is held to be
- unenforceable, such provision shall be reformed only to the extent
- necessary to make it enforceable. Any law or regulation which provides that
- the language of a contract shall be construed against the drafter shall not
- be used to construe this License against a Contributor.
-
-
-10. Versions of the License
-
-10.1. New Versions
-
- Mozilla Foundation is the license steward. Except as provided in Section
- 10.3, no one other than the license steward has the right to modify or
- publish new versions of this License. Each version will be given a
- distinguishing version number.
-
-10.2. Effect of New Versions
-
- You may distribute the Covered Software under the terms of the version
- of the License under which You originally received the Covered Software,
- or under the terms of any subsequent version published by the license
- steward.
-
-10.3. Modified Versions
-
- If you create software not governed by this License, and you want to
- create a new license for such software, you may create and use a
- modified version of this License if you rename the license and remove
- any references to the name of the license steward (except to note that
- such modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary
- Licenses If You choose to distribute Source Code Form that is
- Incompatible With Secondary Licenses under the terms of this version of
- the License, the notice described in Exhibit B of this License must be
- attached.
-
-Exhibit A - Source Code Form License Notice
-
- This Source Code Form is subject to the
- terms of the Mozilla Public License, v.
- 2.0. If a copy of the MPL was not
- distributed with this file, You can
- obtain one at
- http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular file,
-then You may include the notice in a location (such as a LICENSE file in a
-relevant directory) where a recipient would be likely to look for such a
-notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - "Incompatible With Secondary Licenses" Notice
-
- This Source Code Form is "Incompatible
- With Secondary Licenses", as defined by
- the Mozilla Public License, v. 2.0.
-
diff --git a/api/auth/kubernetes/LICENSE b/api/auth/kubernetes/LICENSE
deleted file mode 100644
index f4f97ee5853a..000000000000
--- a/api/auth/kubernetes/LICENSE
+++ /dev/null
@@ -1,365 +0,0 @@
-Copyright (c) 2015 HashiCorp, Inc.
-
-Mozilla Public License, version 2.0
-
-1. Definitions
-
-1.1. "Contributor"
-
- means each individual or legal entity that creates, contributes to the
- creation of, or owns Covered Software.
-
-1.2. "Contributor Version"
-
- means the combination of the Contributions of others (if any) used by a
- Contributor and that particular Contributor's Contribution.
-
-1.3. "Contribution"
-
- means Covered Software of a particular Contributor.
-
-1.4. "Covered Software"
-
- means Source Code Form to which the initial Contributor has attached the
- notice in Exhibit A, the Executable Form of such Source Code Form, and
- Modifications of such Source Code Form, in each case including portions
- thereof.
-
-1.5. "Incompatible With Secondary Licenses"
- means
-
- a. that the initial Contributor has attached the notice described in
- Exhibit B to the Covered Software; or
-
- b. that the Covered Software was made available under the terms of
- version 1.1 or earlier of the License, but not also under the terms of
- a Secondary License.
-
-1.6. "Executable Form"
-
- means any form of the work other than Source Code Form.
-
-1.7. "Larger Work"
-
- means a work that combines Covered Software with other material, in a
- separate file or files, that is not Covered Software.
-
-1.8. "License"
-
- means this document.
-
-1.9. "Licensable"
-
- means having the right to grant, to the maximum extent possible, whether
- at the time of the initial grant or subsequently, any and all of the
- rights conveyed by this License.
-
-1.10. "Modifications"
-
- means any of the following:
-
- a. any file in Source Code Form that results from an addition to,
- deletion from, or modification of the contents of Covered Software; or
-
- b. any new file in Source Code Form that contains any Covered Software.
-
-1.11. "Patent Claims" of a Contributor
-
- means any patent claim(s), including without limitation, method,
- process, and apparatus claims, in any patent Licensable by such
- Contributor that would be infringed, but for the grant of the License,
- by the making, using, selling, offering for sale, having made, import,
- or transfer of either its Contributions or its Contributor Version.
-
-1.12. "Secondary License"
-
- means either the GNU General Public License, Version 2.0, the GNU Lesser
- General Public License, Version 2.1, the GNU Affero General Public
- License, Version 3.0, or any later versions of those licenses.
-
-1.13. "Source Code Form"
-
- means the form of the work preferred for making modifications.
-
-1.14. "You" (or "Your")
-
- means an individual or a legal entity exercising rights under this
- License. For legal entities, "You" includes any entity that controls, is
- controlled by, or is under common control with You. For purposes of this
- definition, "control" means (a) the power, direct or indirect, to cause
- the direction or management of such entity, whether by contract or
- otherwise, or (b) ownership of more than fifty percent (50%) of the
- outstanding shares or beneficial ownership of such entity.
-
-
-2. License Grants and Conditions
-
-2.1. Grants
-
- Each Contributor hereby grants You a world-wide, royalty-free,
- non-exclusive license:
-
- a. under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or
- as part of a Larger Work; and
-
- b. under Patent Claims of such Contributor to make, use, sell, offer for
- sale, have made, import, and otherwise transfer either its
- Contributions or its Contributor Version.
-
-2.2. Effective Date
-
- The licenses granted in Section 2.1 with respect to any Contribution
- become effective for each Contribution on the date the Contributor first
- distributes such Contribution.
-
-2.3. Limitations on Grant Scope
-
- The licenses granted in this Section 2 are the only rights granted under
- this License. No additional rights or licenses will be implied from the
- distribution or licensing of Covered Software under this License.
- Notwithstanding Section 2.1(b) above, no patent license is granted by a
- Contributor:
-
- a. for any code that a Contributor has removed from Covered Software; or
-
- b. for infringements caused by: (i) Your and any other third party's
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
- c. under Patent Claims infringed by Covered Software in the absence of
- its Contributions.
-
- This License does not grant any rights in the trademarks, service marks,
- or logos of any Contributor (except as may be necessary to comply with
- the notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
- No Contributor makes additional grants as a result of Your choice to
- distribute the Covered Software under a subsequent version of this
- License (see Section 10.2) or under the terms of a Secondary License (if
- permitted under the terms of Section 3.3).
-
-2.5. Representation
-
- Each Contributor represents that the Contributor believes its
- Contributions are its original creation(s) or it has sufficient rights to
- grant the rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
- This License is not intended to limit any rights You have under
- applicable copyright doctrines of fair use, fair dealing, or other
- equivalents.
-
-2.7. Conditions
-
- Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
- Section 2.1.
-
-
-3. Responsibilities
-
-3.1. Distribution of Source Form
-
- All distribution of Covered Software in Source Code Form, including any
- Modifications that You create or to which You contribute, must be under
- the terms of this License. You must inform recipients that the Source
- Code Form of the Covered Software is governed by the terms of this
- License, and how they can obtain a copy of this License. You may not
- attempt to alter or restrict the recipients' rights in the Source Code
- Form.
-
-3.2. Distribution of Executable Form
-
- If You distribute Covered Software in Executable Form then:
-
- a. such Covered Software must also be made available in Source Code Form,
- as described in Section 3.1, and You must inform recipients of the
- Executable Form how they can obtain a copy of such Source Code Form by
- reasonable means in a timely manner, at a charge no more than the cost
- of distribution to the recipient; and
-
- b. You may distribute such Executable Form under the terms of this
- License, or sublicense it under different terms, provided that the
- license for the Executable Form does not attempt to limit or alter the
- recipients' rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
- You may create and distribute a Larger Work under terms of Your choice,
- provided that You also comply with the requirements of this License for
- the Covered Software. If the Larger Work is a combination of Covered
- Software with a work governed by one or more Secondary Licenses, and the
- Covered Software is not Incompatible With Secondary Licenses, this
- License permits You to additionally distribute such Covered Software
- under the terms of such Secondary License(s), so that the recipient of
- the Larger Work may, at their option, further distribute the Covered
- Software under the terms of either this License or such Secondary
- License(s).
-
-3.4. Notices
-
- You may not remove or alter the substance of any license notices
- (including copyright notices, patent notices, disclaimers of warranty, or
- limitations of liability) contained within the Source Code Form of the
- Covered Software, except that You may alter any license notices to the
- extent required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
- You may choose to offer, and to charge a fee for, warranty, support,
- indemnity or liability obligations to one or more recipients of Covered
- Software. However, You may do so only on Your own behalf, and not on
- behalf of any Contributor. You must make it absolutely clear that any
- such warranty, support, indemnity, or liability obligation is offered by
- You alone, and You hereby agree to indemnify every Contributor for any
- liability incurred by such Contributor as a result of warranty, support,
- indemnity or liability terms You offer. You may include additional
- disclaimers of warranty and limitations of liability specific to any
- jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
-
- If it is impossible for You to comply with any of the terms of this License
- with respect to some or all of the Covered Software due to statute,
- judicial order, or regulation then You must: (a) comply with the terms of
- this License to the maximum extent possible; and (b) describe the
- limitations and the code they affect. Such description must be placed in a
- text file included with all distributions of the Covered Software under
- this License. Except to the extent prohibited by statute or regulation,
- such description must be sufficiently detailed for a recipient of ordinary
- skill to be able to understand it.
-
-5. Termination
-
-5.1. The rights granted under this License will terminate automatically if You
- fail to comply with any of its terms. However, if You become compliant,
- then the rights granted under this License from a particular Contributor
- are reinstated (a) provisionally, unless and until such Contributor
- explicitly and finally terminates Your grants, and (b) on an ongoing
- basis, if such Contributor fails to notify You of the non-compliance by
- some reasonable means prior to 60 days after You have come back into
- compliance. Moreover, Your grants from a particular Contributor are
- reinstated on an ongoing basis if such Contributor notifies You of the
- non-compliance by some reasonable means, this is the first time You have
- received notice of non-compliance with this License from such
- Contributor, and You become compliant prior to 30 days after Your receipt
- of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
- infringement claim (excluding declaratory judgment actions,
- counter-claims, and cross-claims) alleging that a Contributor Version
- directly or indirectly infringes any patent, then the rights granted to
- You by any and all Contributors for the Covered Software under Section
- 2.1 of this License shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
- license agreements (excluding distributors and resellers) which have been
- validly granted by You or Your distributors under this License prior to
- termination shall survive termination.
-
-6. Disclaimer of Warranty
-
- Covered Software is provided under this License on an "as is" basis,
- without warranty of any kind, either expressed, implied, or statutory,
- including, without limitation, warranties that the Covered Software is free
- of defects, merchantable, fit for a particular purpose or non-infringing.
- The entire risk as to the quality and performance of the Covered Software
- is with You. Should any Covered Software prove defective in any respect,
- You (not any Contributor) assume the cost of any necessary servicing,
- repair, or correction. This disclaimer of warranty constitutes an essential
- part of this License. No use of any Covered Software is authorized under
- this License except under this disclaimer.
-
-7. Limitation of Liability
-
- Under no circumstances and under no legal theory, whether tort (including
- negligence), contract, or otherwise, shall any Contributor, or anyone who
- distributes Covered Software as permitted above, be liable to You for any
- direct, indirect, special, incidental, or consequential damages of any
- character including, without limitation, damages for lost profits, loss of
- goodwill, work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses, even if such party shall have been
- informed of the possibility of such damages. This limitation of liability
- shall not apply to liability for death or personal injury resulting from
- such party's negligence to the extent applicable law prohibits such
- limitation. Some jurisdictions do not allow the exclusion or limitation of
- incidental or consequential damages, so this exclusion and limitation may
- not apply to You.
-
-8. Litigation
-
- Any litigation relating to this License may be brought only in the courts
- of a jurisdiction where the defendant maintains its principal place of
- business and such litigation shall be governed by laws of that
- jurisdiction, without reference to its conflict-of-law provisions. Nothing
- in this Section shall prevent a party's ability to bring cross-claims or
- counter-claims.
-
-9. Miscellaneous
-
- This License represents the complete agreement concerning the subject
- matter hereof. If any provision of this License is held to be
- unenforceable, such provision shall be reformed only to the extent
- necessary to make it enforceable. Any law or regulation which provides that
- the language of a contract shall be construed against the drafter shall not
- be used to construe this License against a Contributor.
-
-
-10. Versions of the License
-
-10.1. New Versions
-
- Mozilla Foundation is the license steward. Except as provided in Section
- 10.3, no one other than the license steward has the right to modify or
- publish new versions of this License. Each version will be given a
- distinguishing version number.
-
-10.2. Effect of New Versions
-
- You may distribute the Covered Software under the terms of the version
- of the License under which You originally received the Covered Software,
- or under the terms of any subsequent version published by the license
- steward.
-
-10.3. Modified Versions
-
- If you create software not governed by this License, and you want to
- create a new license for such software, you may create and use a
- modified version of this License if you rename the license and remove
- any references to the name of the license steward (except to note that
- such modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary
- Licenses If You choose to distribute Source Code Form that is
- Incompatible With Secondary Licenses under the terms of this version of
- the License, the notice described in Exhibit B of this License must be
- attached.
-
-Exhibit A - Source Code Form License Notice
-
- This Source Code Form is subject to the
- terms of the Mozilla Public License, v.
- 2.0. If a copy of the MPL was not
- distributed with this file, You can
- obtain one at
- http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular file,
-then You may include the notice in a location (such as a LICENSE file in a
-relevant directory) where a recipient would be likely to look for such a
-notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - "Incompatible With Secondary Licenses" Notice
-
- This Source Code Form is "Incompatible
- With Secondary Licenses", as defined by
- the Mozilla Public License, v. 2.0.
-
diff --git a/api/auth/ldap/LICENSE b/api/auth/ldap/LICENSE
deleted file mode 100644
index f4f97ee5853a..000000000000
--- a/api/auth/ldap/LICENSE
+++ /dev/null
@@ -1,365 +0,0 @@
-Copyright (c) 2015 HashiCorp, Inc.
-
-Mozilla Public License, version 2.0
-
-1. Definitions
-
-1.1. "Contributor"
-
- means each individual or legal entity that creates, contributes to the
- creation of, or owns Covered Software.
-
-1.2. "Contributor Version"
-
- means the combination of the Contributions of others (if any) used by a
- Contributor and that particular Contributor's Contribution.
-
-1.3. "Contribution"
-
- means Covered Software of a particular Contributor.
-
-1.4. "Covered Software"
-
- means Source Code Form to which the initial Contributor has attached the
- notice in Exhibit A, the Executable Form of such Source Code Form, and
- Modifications of such Source Code Form, in each case including portions
- thereof.
-
-1.5. "Incompatible With Secondary Licenses"
- means
-
- a. that the initial Contributor has attached the notice described in
- Exhibit B to the Covered Software; or
-
- b. that the Covered Software was made available under the terms of
- version 1.1 or earlier of the License, but not also under the terms of
- a Secondary License.
-
-1.6. "Executable Form"
-
- means any form of the work other than Source Code Form.
-
-1.7. "Larger Work"
-
- means a work that combines Covered Software with other material, in a
- separate file or files, that is not Covered Software.
-
-1.8. "License"
-
- means this document.
-
-1.9. "Licensable"
-
- means having the right to grant, to the maximum extent possible, whether
- at the time of the initial grant or subsequently, any and all of the
- rights conveyed by this License.
-
-1.10. "Modifications"
-
- means any of the following:
-
- a. any file in Source Code Form that results from an addition to,
- deletion from, or modification of the contents of Covered Software; or
-
- b. any new file in Source Code Form that contains any Covered Software.
-
-1.11. "Patent Claims" of a Contributor
-
- means any patent claim(s), including without limitation, method,
- process, and apparatus claims, in any patent Licensable by such
- Contributor that would be infringed, but for the grant of the License,
- by the making, using, selling, offering for sale, having made, import,
- or transfer of either its Contributions or its Contributor Version.
-
-1.12. "Secondary License"
-
- means either the GNU General Public License, Version 2.0, the GNU Lesser
- General Public License, Version 2.1, the GNU Affero General Public
- License, Version 3.0, or any later versions of those licenses.
-
-1.13. "Source Code Form"
-
- means the form of the work preferred for making modifications.
-
-1.14. "You" (or "Your")
-
- means an individual or a legal entity exercising rights under this
- License. For legal entities, "You" includes any entity that controls, is
- controlled by, or is under common control with You. For purposes of this
- definition, "control" means (a) the power, direct or indirect, to cause
- the direction or management of such entity, whether by contract or
- otherwise, or (b) ownership of more than fifty percent (50%) of the
- outstanding shares or beneficial ownership of such entity.
-
-
-2. License Grants and Conditions
-
-2.1. Grants
-
- Each Contributor hereby grants You a world-wide, royalty-free,
- non-exclusive license:
-
- a. under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or
- as part of a Larger Work; and
-
- b. under Patent Claims of such Contributor to make, use, sell, offer for
- sale, have made, import, and otherwise transfer either its
- Contributions or its Contributor Version.
-
-2.2. Effective Date
-
- The licenses granted in Section 2.1 with respect to any Contribution
- become effective for each Contribution on the date the Contributor first
- distributes such Contribution.
-
-2.3. Limitations on Grant Scope
-
- The licenses granted in this Section 2 are the only rights granted under
- this License. No additional rights or licenses will be implied from the
- distribution or licensing of Covered Software under this License.
- Notwithstanding Section 2.1(b) above, no patent license is granted by a
- Contributor:
-
- a. for any code that a Contributor has removed from Covered Software; or
-
- b. for infringements caused by: (i) Your and any other third party's
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
- c. under Patent Claims infringed by Covered Software in the absence of
- its Contributions.
-
- This License does not grant any rights in the trademarks, service marks,
- or logos of any Contributor (except as may be necessary to comply with
- the notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
- No Contributor makes additional grants as a result of Your choice to
- distribute the Covered Software under a subsequent version of this
- License (see Section 10.2) or under the terms of a Secondary License (if
- permitted under the terms of Section 3.3).
-
-2.5. Representation
-
- Each Contributor represents that the Contributor believes its
- Contributions are its original creation(s) or it has sufficient rights to
- grant the rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
- This License is not intended to limit any rights You have under
- applicable copyright doctrines of fair use, fair dealing, or other
- equivalents.
-
-2.7. Conditions
-
- Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
- Section 2.1.
-
-
-3. Responsibilities
-
-3.1. Distribution of Source Form
-
- All distribution of Covered Software in Source Code Form, including any
- Modifications that You create or to which You contribute, must be under
- the terms of this License. You must inform recipients that the Source
- Code Form of the Covered Software is governed by the terms of this
- License, and how they can obtain a copy of this License. You may not
- attempt to alter or restrict the recipients' rights in the Source Code
- Form.
-
-3.2. Distribution of Executable Form
-
- If You distribute Covered Software in Executable Form then:
-
- a. such Covered Software must also be made available in Source Code Form,
- as described in Section 3.1, and You must inform recipients of the
- Executable Form how they can obtain a copy of such Source Code Form by
- reasonable means in a timely manner, at a charge no more than the cost
- of distribution to the recipient; and
-
- b. You may distribute such Executable Form under the terms of this
- License, or sublicense it under different terms, provided that the
- license for the Executable Form does not attempt to limit or alter the
- recipients' rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
- You may create and distribute a Larger Work under terms of Your choice,
- provided that You also comply with the requirements of this License for
- the Covered Software. If the Larger Work is a combination of Covered
- Software with a work governed by one or more Secondary Licenses, and the
- Covered Software is not Incompatible With Secondary Licenses, this
- License permits You to additionally distribute such Covered Software
- under the terms of such Secondary License(s), so that the recipient of
- the Larger Work may, at their option, further distribute the Covered
- Software under the terms of either this License or such Secondary
- License(s).
-
-3.4. Notices
-
- You may not remove or alter the substance of any license notices
- (including copyright notices, patent notices, disclaimers of warranty, or
- limitations of liability) contained within the Source Code Form of the
- Covered Software, except that You may alter any license notices to the
- extent required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
- You may choose to offer, and to charge a fee for, warranty, support,
- indemnity or liability obligations to one or more recipients of Covered
- Software. However, You may do so only on Your own behalf, and not on
- behalf of any Contributor. You must make it absolutely clear that any
- such warranty, support, indemnity, or liability obligation is offered by
- You alone, and You hereby agree to indemnify every Contributor for any
- liability incurred by such Contributor as a result of warranty, support,
- indemnity or liability terms You offer. You may include additional
- disclaimers of warranty and limitations of liability specific to any
- jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
-
- If it is impossible for You to comply with any of the terms of this License
- with respect to some or all of the Covered Software due to statute,
- judicial order, or regulation then You must: (a) comply with the terms of
- this License to the maximum extent possible; and (b) describe the
- limitations and the code they affect. Such description must be placed in a
- text file included with all distributions of the Covered Software under
- this License. Except to the extent prohibited by statute or regulation,
- such description must be sufficiently detailed for a recipient of ordinary
- skill to be able to understand it.
-
-5. Termination
-
-5.1. The rights granted under this License will terminate automatically if You
- fail to comply with any of its terms. However, if You become compliant,
- then the rights granted under this License from a particular Contributor
- are reinstated (a) provisionally, unless and until such Contributor
- explicitly and finally terminates Your grants, and (b) on an ongoing
- basis, if such Contributor fails to notify You of the non-compliance by
- some reasonable means prior to 60 days after You have come back into
- compliance. Moreover, Your grants from a particular Contributor are
- reinstated on an ongoing basis if such Contributor notifies You of the
- non-compliance by some reasonable means, this is the first time You have
- received notice of non-compliance with this License from such
- Contributor, and You become compliant prior to 30 days after Your receipt
- of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
- infringement claim (excluding declaratory judgment actions,
- counter-claims, and cross-claims) alleging that a Contributor Version
- directly or indirectly infringes any patent, then the rights granted to
- You by any and all Contributors for the Covered Software under Section
- 2.1 of this License shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
- license agreements (excluding distributors and resellers) which have been
- validly granted by You or Your distributors under this License prior to
- termination shall survive termination.
-
-6. Disclaimer of Warranty
-
- Covered Software is provided under this License on an "as is" basis,
- without warranty of any kind, either expressed, implied, or statutory,
- including, without limitation, warranties that the Covered Software is free
- of defects, merchantable, fit for a particular purpose or non-infringing.
- The entire risk as to the quality and performance of the Covered Software
- is with You. Should any Covered Software prove defective in any respect,
- You (not any Contributor) assume the cost of any necessary servicing,
- repair, or correction. This disclaimer of warranty constitutes an essential
- part of this License. No use of any Covered Software is authorized under
- this License except under this disclaimer.
-
-7. Limitation of Liability
-
- Under no circumstances and under no legal theory, whether tort (including
- negligence), contract, or otherwise, shall any Contributor, or anyone who
- distributes Covered Software as permitted above, be liable to You for any
- direct, indirect, special, incidental, or consequential damages of any
- character including, without limitation, damages for lost profits, loss of
- goodwill, work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses, even if such party shall have been
- informed of the possibility of such damages. This limitation of liability
- shall not apply to liability for death or personal injury resulting from
- such party's negligence to the extent applicable law prohibits such
- limitation. Some jurisdictions do not allow the exclusion or limitation of
- incidental or consequential damages, so this exclusion and limitation may
- not apply to You.
-
-8. Litigation
-
- Any litigation relating to this License may be brought only in the courts
- of a jurisdiction where the defendant maintains its principal place of
- business and such litigation shall be governed by laws of that
- jurisdiction, without reference to its conflict-of-law provisions. Nothing
- in this Section shall prevent a party's ability to bring cross-claims or
- counter-claims.
-
-9. Miscellaneous
-
- This License represents the complete agreement concerning the subject
- matter hereof. If any provision of this License is held to be
- unenforceable, such provision shall be reformed only to the extent
- necessary to make it enforceable. Any law or regulation which provides that
- the language of a contract shall be construed against the drafter shall not
- be used to construe this License against a Contributor.
-
-
-10. Versions of the License
-
-10.1. New Versions
-
- Mozilla Foundation is the license steward. Except as provided in Section
- 10.3, no one other than the license steward has the right to modify or
- publish new versions of this License. Each version will be given a
- distinguishing version number.
-
-10.2. Effect of New Versions
-
- You may distribute the Covered Software under the terms of the version
- of the License under which You originally received the Covered Software,
- or under the terms of any subsequent version published by the license
- steward.
-
-10.3. Modified Versions
-
- If you create software not governed by this License, and you want to
- create a new license for such software, you may create and use a
- modified version of this License if you rename the license and remove
- any references to the name of the license steward (except to note that
- such modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary
- Licenses If You choose to distribute Source Code Form that is
- Incompatible With Secondary Licenses under the terms of this version of
- the License, the notice described in Exhibit B of this License must be
- attached.
-
-Exhibit A - Source Code Form License Notice
-
- This Source Code Form is subject to the
- terms of the Mozilla Public License, v.
- 2.0. If a copy of the MPL was not
- distributed with this file, You can
- obtain one at
- http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular file,
-then You may include the notice in a location (such as a LICENSE file in a
-relevant directory) where a recipient would be likely to look for such a
-notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - "Incompatible With Secondary Licenses" Notice
-
- This Source Code Form is "Incompatible
- With Secondary Licenses", as defined by
- the Mozilla Public License, v. 2.0.
-
diff --git a/api/auth/userpass/LICENSE b/api/auth/userpass/LICENSE
deleted file mode 100644
index f4f97ee5853a..000000000000
--- a/api/auth/userpass/LICENSE
+++ /dev/null
@@ -1,365 +0,0 @@
-Copyright (c) 2015 HashiCorp, Inc.
-
-Mozilla Public License, version 2.0
-
-1. Definitions
-
-1.1. "Contributor"
-
- means each individual or legal entity that creates, contributes to the
- creation of, or owns Covered Software.
-
-1.2. "Contributor Version"
-
- means the combination of the Contributions of others (if any) used by a
- Contributor and that particular Contributor's Contribution.
-
-1.3. "Contribution"
-
- means Covered Software of a particular Contributor.
-
-1.4. "Covered Software"
-
- means Source Code Form to which the initial Contributor has attached the
- notice in Exhibit A, the Executable Form of such Source Code Form, and
- Modifications of such Source Code Form, in each case including portions
- thereof.
-
-1.5. "Incompatible With Secondary Licenses"
- means
-
- a. that the initial Contributor has attached the notice described in
- Exhibit B to the Covered Software; or
-
- b. that the Covered Software was made available under the terms of
- version 1.1 or earlier of the License, but not also under the terms of
- a Secondary License.
-
-1.6. "Executable Form"
-
- means any form of the work other than Source Code Form.
-
-1.7. "Larger Work"
-
- means a work that combines Covered Software with other material, in a
- separate file or files, that is not Covered Software.
-
-1.8. "License"
-
- means this document.
-
-1.9. "Licensable"
-
- means having the right to grant, to the maximum extent possible, whether
- at the time of the initial grant or subsequently, any and all of the
- rights conveyed by this License.
-
-1.10. "Modifications"
-
- means any of the following:
-
- a. any file in Source Code Form that results from an addition to,
- deletion from, or modification of the contents of Covered Software; or
-
- b. any new file in Source Code Form that contains any Covered Software.
-
-1.11. "Patent Claims" of a Contributor
-
- means any patent claim(s), including without limitation, method,
- process, and apparatus claims, in any patent Licensable by such
- Contributor that would be infringed, but for the grant of the License,
- by the making, using, selling, offering for sale, having made, import,
- or transfer of either its Contributions or its Contributor Version.
-
-1.12. "Secondary License"
-
- means either the GNU General Public License, Version 2.0, the GNU Lesser
- General Public License, Version 2.1, the GNU Affero General Public
- License, Version 3.0, or any later versions of those licenses.
-
-1.13. "Source Code Form"
-
- means the form of the work preferred for making modifications.
-
-1.14. "You" (or "Your")
-
- means an individual or a legal entity exercising rights under this
- License. For legal entities, "You" includes any entity that controls, is
- controlled by, or is under common control with You. For purposes of this
- definition, "control" means (a) the power, direct or indirect, to cause
- the direction or management of such entity, whether by contract or
- otherwise, or (b) ownership of more than fifty percent (50%) of the
- outstanding shares or beneficial ownership of such entity.
-
-
-2. License Grants and Conditions
-
-2.1. Grants
-
- Each Contributor hereby grants You a world-wide, royalty-free,
- non-exclusive license:
-
- a. under intellectual property rights (other than patent or trademark)
- Licensable by such Contributor to use, reproduce, make available,
- modify, display, perform, distribute, and otherwise exploit its
- Contributions, either on an unmodified basis, with Modifications, or
- as part of a Larger Work; and
-
- b. under Patent Claims of such Contributor to make, use, sell, offer for
- sale, have made, import, and otherwise transfer either its
- Contributions or its Contributor Version.
-
-2.2. Effective Date
-
- The licenses granted in Section 2.1 with respect to any Contribution
- become effective for each Contribution on the date the Contributor first
- distributes such Contribution.
-
-2.3. Limitations on Grant Scope
-
- The licenses granted in this Section 2 are the only rights granted under
- this License. No additional rights or licenses will be implied from the
- distribution or licensing of Covered Software under this License.
- Notwithstanding Section 2.1(b) above, no patent license is granted by a
- Contributor:
-
- a. for any code that a Contributor has removed from Covered Software; or
-
- b. for infringements caused by: (i) Your and any other third party's
- modifications of Covered Software, or (ii) the combination of its
- Contributions with other software (except as part of its Contributor
- Version); or
-
- c. under Patent Claims infringed by Covered Software in the absence of
- its Contributions.
-
- This License does not grant any rights in the trademarks, service marks,
- or logos of any Contributor (except as may be necessary to comply with
- the notice requirements in Section 3.4).
-
-2.4. Subsequent Licenses
-
- No Contributor makes additional grants as a result of Your choice to
- distribute the Covered Software under a subsequent version of this
- License (see Section 10.2) or under the terms of a Secondary License (if
- permitted under the terms of Section 3.3).
-
-2.5. Representation
-
- Each Contributor represents that the Contributor believes its
- Contributions are its original creation(s) or it has sufficient rights to
- grant the rights to its Contributions conveyed by this License.
-
-2.6. Fair Use
-
- This License is not intended to limit any rights You have under
- applicable copyright doctrines of fair use, fair dealing, or other
- equivalents.
-
-2.7. Conditions
-
- Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in
- Section 2.1.
-
-
-3. Responsibilities
-
-3.1. Distribution of Source Form
-
- All distribution of Covered Software in Source Code Form, including any
- Modifications that You create or to which You contribute, must be under
- the terms of this License. You must inform recipients that the Source
- Code Form of the Covered Software is governed by the terms of this
- License, and how they can obtain a copy of this License. You may not
- attempt to alter or restrict the recipients' rights in the Source Code
- Form.
-
-3.2. Distribution of Executable Form
-
- If You distribute Covered Software in Executable Form then:
-
- a. such Covered Software must also be made available in Source Code Form,
- as described in Section 3.1, and You must inform recipients of the
- Executable Form how they can obtain a copy of such Source Code Form by
- reasonable means in a timely manner, at a charge no more than the cost
- of distribution to the recipient; and
-
- b. You may distribute such Executable Form under the terms of this
- License, or sublicense it under different terms, provided that the
- license for the Executable Form does not attempt to limit or alter the
- recipients' rights in the Source Code Form under this License.
-
-3.3. Distribution of a Larger Work
-
- You may create and distribute a Larger Work under terms of Your choice,
- provided that You also comply with the requirements of this License for
- the Covered Software. If the Larger Work is a combination of Covered
- Software with a work governed by one or more Secondary Licenses, and the
- Covered Software is not Incompatible With Secondary Licenses, this
- License permits You to additionally distribute such Covered Software
- under the terms of such Secondary License(s), so that the recipient of
- the Larger Work may, at their option, further distribute the Covered
- Software under the terms of either this License or such Secondary
- License(s).
-
-3.4. Notices
-
- You may not remove or alter the substance of any license notices
- (including copyright notices, patent notices, disclaimers of warranty, or
- limitations of liability) contained within the Source Code Form of the
- Covered Software, except that You may alter any license notices to the
- extent required to remedy known factual inaccuracies.
-
-3.5. Application of Additional Terms
-
- You may choose to offer, and to charge a fee for, warranty, support,
- indemnity or liability obligations to one or more recipients of Covered
- Software. However, You may do so only on Your own behalf, and not on
- behalf of any Contributor. You must make it absolutely clear that any
- such warranty, support, indemnity, or liability obligation is offered by
- You alone, and You hereby agree to indemnify every Contributor for any
- liability incurred by such Contributor as a result of warranty, support,
- indemnity or liability terms You offer. You may include additional
- disclaimers of warranty and limitations of liability specific to any
- jurisdiction.
-
-4. Inability to Comply Due to Statute or Regulation
-
- If it is impossible for You to comply with any of the terms of this License
- with respect to some or all of the Covered Software due to statute,
- judicial order, or regulation then You must: (a) comply with the terms of
- this License to the maximum extent possible; and (b) describe the
- limitations and the code they affect. Such description must be placed in a
- text file included with all distributions of the Covered Software under
- this License. Except to the extent prohibited by statute or regulation,
- such description must be sufficiently detailed for a recipient of ordinary
- skill to be able to understand it.
-
-5. Termination
-
-5.1. The rights granted under this License will terminate automatically if You
- fail to comply with any of its terms. However, if You become compliant,
- then the rights granted under this License from a particular Contributor
- are reinstated (a) provisionally, unless and until such Contributor
- explicitly and finally terminates Your grants, and (b) on an ongoing
- basis, if such Contributor fails to notify You of the non-compliance by
- some reasonable means prior to 60 days after You have come back into
- compliance. Moreover, Your grants from a particular Contributor are
- reinstated on an ongoing basis if such Contributor notifies You of the
- non-compliance by some reasonable means, this is the first time You have
- received notice of non-compliance with this License from such
- Contributor, and You become compliant prior to 30 days after Your receipt
- of the notice.
-
-5.2. If You initiate litigation against any entity by asserting a patent
- infringement claim (excluding declaratory judgment actions,
- counter-claims, and cross-claims) alleging that a Contributor Version
- directly or indirectly infringes any patent, then the rights granted to
- You by any and all Contributors for the Covered Software under Section
- 2.1 of this License shall terminate.
-
-5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user
- license agreements (excluding distributors and resellers) which have been
- validly granted by You or Your distributors under this License prior to
- termination shall survive termination.
-
-6. Disclaimer of Warranty
-
- Covered Software is provided under this License on an "as is" basis,
- without warranty of any kind, either expressed, implied, or statutory,
- including, without limitation, warranties that the Covered Software is free
- of defects, merchantable, fit for a particular purpose or non-infringing.
- The entire risk as to the quality and performance of the Covered Software
- is with You. Should any Covered Software prove defective in any respect,
- You (not any Contributor) assume the cost of any necessary servicing,
- repair, or correction. This disclaimer of warranty constitutes an essential
- part of this License. No use of any Covered Software is authorized under
- this License except under this disclaimer.
-
-7. Limitation of Liability
-
- Under no circumstances and under no legal theory, whether tort (including
- negligence), contract, or otherwise, shall any Contributor, or anyone who
- distributes Covered Software as permitted above, be liable to You for any
- direct, indirect, special, incidental, or consequential damages of any
- character including, without limitation, damages for lost profits, loss of
- goodwill, work stoppage, computer failure or malfunction, or any and all
- other commercial damages or losses, even if such party shall have been
- informed of the possibility of such damages. This limitation of liability
- shall not apply to liability for death or personal injury resulting from
- such party's negligence to the extent applicable law prohibits such
- limitation. Some jurisdictions do not allow the exclusion or limitation of
- incidental or consequential damages, so this exclusion and limitation may
- not apply to You.
-
-8. Litigation
-
- Any litigation relating to this License may be brought only in the courts
- of a jurisdiction where the defendant maintains its principal place of
- business and such litigation shall be governed by laws of that
- jurisdiction, without reference to its conflict-of-law provisions. Nothing
- in this Section shall prevent a party's ability to bring cross-claims or
- counter-claims.
-
-9. Miscellaneous
-
- This License represents the complete agreement concerning the subject
- matter hereof. If any provision of this License is held to be
- unenforceable, such provision shall be reformed only to the extent
- necessary to make it enforceable. Any law or regulation which provides that
- the language of a contract shall be construed against the drafter shall not
- be used to construe this License against a Contributor.
-
-
-10. Versions of the License
-
-10.1. New Versions
-
- Mozilla Foundation is the license steward. Except as provided in Section
- 10.3, no one other than the license steward has the right to modify or
- publish new versions of this License. Each version will be given a
- distinguishing version number.
-
-10.2. Effect of New Versions
-
- You may distribute the Covered Software under the terms of the version
- of the License under which You originally received the Covered Software,
- or under the terms of any subsequent version published by the license
- steward.
-
-10.3. Modified Versions
-
- If you create software not governed by this License, and you want to
- create a new license for such software, you may create and use a
- modified version of this License if you rename the license and remove
- any references to the name of the license steward (except to note that
- such modified license differs from this License).
-
-10.4. Distributing Source Code Form that is Incompatible With Secondary
- Licenses If You choose to distribute Source Code Form that is
- Incompatible With Secondary Licenses under the terms of this version of
- the License, the notice described in Exhibit B of this License must be
- attached.
-
-Exhibit A - Source Code Form License Notice
-
- This Source Code Form is subject to the
- terms of the Mozilla Public License, v.
- 2.0. If a copy of the MPL was not
- distributed with this file, You can
- obtain one at
- http://mozilla.org/MPL/2.0/.
-
-If it is not possible or desirable to put the notice in a particular file,
-then You may include the notice in a location (such as a LICENSE file in a
-relevant directory) where a recipient would be likely to look for such a
-notice.
-
-You may add additional accurate notices of copyright ownership.
-
-Exhibit B - "Incompatible With Secondary Licenses" Notice
-
- This Source Code Form is "Incompatible
- With Secondary Licenses", as defined by
- the Mozilla Public License, v. 2.0.
-
diff --git a/api/client.go b/api/client.go
index d7e61c116cee..0090321caa7f 100644
--- a/api/client.go
+++ b/api/client.go
@@ -1467,12 +1467,6 @@ START:
}
if outputCurlString {
- // Note that although we're building this up here and returning it as an error object, the Error()
- // interface method on it only gets called in a context where the actual string returned from that
- // method is irrelevant, because it gets swallowed by an error buffer that's never output to the user.
- // That's on purpose, not a bug, because in this case, OutputStringError is not really an _error_, per se.
- // It's just a way of aborting the control flow so that requests don't actually execute, and instead,
- // we can detect what's happened back in the CLI machinery and show the actual curl string to the user.
LastOutputStringError = &OutputStringError{
Request: req,
TLSSkipVerify: c.config.HttpClient.Transport.(*http.Transport).TLSClientConfig.InsecureSkipVerify,
diff --git a/api/output_string.go b/api/output_string.go
index dbf37e8b3874..d7777712d209 100644
--- a/api/output_string.go
+++ b/api/output_string.go
@@ -8,7 +8,7 @@ import (
"net/http"
"strings"
- "github.com/hashicorp/go-retryablehttp"
+ retryablehttp "github.com/hashicorp/go-retryablehttp"
)
const (
@@ -25,10 +25,6 @@ type OutputStringError struct {
finalCurlString string
}
-// Error is here so that we can return this struct as an error from client.rawRequestWithContext(). Note that
-// the ErrOutputStringRequest constant is never actually used and is completely irrelevant to how this all functions.
-// We could've just as easily returned an empty string. What matters is the machinery that happens before then where
-// the curl string is built. So yes, this is confusing, but yes, this is also on purpose, and it is not incorrect.
func (d *OutputStringError) Error() string {
if d.finalCurlString == "" {
cs, err := d.buildCurlString()
diff --git a/api/sys_seal.go b/api/sys_seal.go
index d5548aef77fd..62002496c36b 100644
--- a/api/sys_seal.go
+++ b/api/sys_seal.go
@@ -96,25 +96,24 @@ func sealStatusRequestWithContext(ctx context.Context, c *Sys, r *Request) (*Sea
}
type SealStatusResponse struct {
- Type string `json:"type"`
- Initialized bool `json:"initialized"`
- Sealed bool `json:"sealed"`
- T int `json:"t"`
- N int `json:"n"`
- Progress int `json:"progress"`
- Nonce string `json:"nonce"`
- Version string `json:"version"`
- BuildDate string `json:"build_date"`
- Migration bool `json:"migration"`
- ClusterName string `json:"cluster_name,omitempty"`
- ClusterID string `json:"cluster_id,omitempty"`
- RecoverySeal bool `json:"recovery_seal"`
- RecoverySealType string `json:"recovery_seal_type,omitempty"`
- StorageType string `json:"storage_type,omitempty"`
- HCPLinkStatus string `json:"hcp_link_status,omitempty"`
- HCPLinkResourceID string `json:"hcp_link_resource_ID,omitempty"`
- RemovedFromCluster *bool `json:"removed_from_cluster,omitempty"`
- Warnings []string `json:"warnings,omitempty"`
+ Type string `json:"type"`
+ Initialized bool `json:"initialized"`
+ Sealed bool `json:"sealed"`
+ T int `json:"t"`
+ N int `json:"n"`
+ Progress int `json:"progress"`
+ Nonce string `json:"nonce"`
+ Version string `json:"version"`
+ BuildDate string `json:"build_date"`
+ Migration bool `json:"migration"`
+ ClusterName string `json:"cluster_name,omitempty"`
+ ClusterID string `json:"cluster_id,omitempty"`
+ RecoverySeal bool `json:"recovery_seal"`
+ RecoverySealType string `json:"recovery_seal_type,omitempty"`
+ StorageType string `json:"storage_type,omitempty"`
+ HCPLinkStatus string `json:"hcp_link_status,omitempty"`
+ HCPLinkResourceID string `json:"hcp_link_resource_ID,omitempty"`
+ Warnings []string `json:"warnings,omitempty"`
}
type UnsealOpts struct {
diff --git a/audit/headers.go b/audit/headers.go
index d505ffaf360f..a6ba6b00cd14 100644
--- a/audit/headers.go
+++ b/audit/headers.go
@@ -175,7 +175,6 @@ func (a *HeadersConfig) DefaultHeaders() map[string]*headerSettings {
return map[string]*headerSettings{
correlationID: {},
xCorrelationID: {},
- "user-agent": {},
}
}
diff --git a/audit/headers_test.go b/audit/headers_test.go
index 54fce0f7ed35..025f4a422f8b 100644
--- a/audit/headers_test.go
+++ b/audit/headers_test.go
@@ -254,11 +254,9 @@ func TestAuditedHeadersConfig_ApplyConfig(t *testing.T) {
t.Fatal(err)
}
- const hmacPrefix = "hmac-sha256:"
-
expected := map[string][]string{
"x-test-header": {"foo"},
- "x-vault-header": {hmacPrefix, hmacPrefix},
+ "x-vault-header": {"hmac-sha256:", "hmac-sha256:"},
}
if len(expected) != len(result) {
@@ -273,7 +271,7 @@ func TestAuditedHeadersConfig_ApplyConfig(t *testing.T) {
}
for i, e := range expectedValues {
- if e == hmacPrefix {
+ if e == "hmac-sha256:" {
if !strings.HasPrefix(resultValues[i], e) {
t.Fatalf("Expected headers did not match actual: Expected %#v...\n Got %#v\n", e, resultValues[i])
}
@@ -611,28 +609,13 @@ func TestAuditedHeaders_invalidate_defaults(t *testing.T) {
require.Equal(t, len(ahc.DefaultHeaders())+1, len(ahc.headerSettings)) // (defaults + 1 new header)
_, ok := ahc.headerSettings["x-magic-header"]
require.True(t, ok)
-
s, ok := ahc.headerSettings["x-correlation-id"]
require.True(t, ok)
require.False(t, s.HMAC)
- s, ok = ahc.headerSettings["user-agent"]
- require.True(t, ok)
- require.False(t, s.HMAC)
-
- // Add correlation ID and user-agent specifically with HMAC and make sure it doesn't get blasted away.
- fakeHeaders1 = map[string]*headerSettings{
- "x-magic-header": {},
- "X-Correlation-ID": {
- HMAC: true,
- },
- "User-Agent": {
- HMAC: true,
- },
- }
-
+ // Add correlation ID specifically with HMAC and make sure it doesn't get blasted away.
+ fakeHeaders1 = map[string]*headerSettings{"x-magic-header": {}, "X-Correlation-ID": {HMAC: true}}
fakeBytes1, err = json.Marshal(fakeHeaders1)
-
require.NoError(t, err)
err = view.Put(context.Background(), &logical.StorageEntry{Key: auditedHeadersEntry, Value: fakeBytes1})
require.NoError(t, err)
@@ -643,12 +626,7 @@ func TestAuditedHeaders_invalidate_defaults(t *testing.T) {
require.Equal(t, len(ahc.DefaultHeaders())+1, len(ahc.headerSettings)) // (defaults + 1 new header, 1 is also a default)
_, ok = ahc.headerSettings["x-magic-header"]
require.True(t, ok)
-
s, ok = ahc.headerSettings["x-correlation-id"]
require.True(t, ok)
require.True(t, s.HMAC)
-
- s, ok = ahc.headerSettings["user-agent"]
- require.True(t, ok)
- require.True(t, s.HMAC)
}
diff --git a/builtin/logical/aws/client.go b/builtin/logical/aws/client.go
index 802abb3d1db7..dd6a58196631 100644
--- a/builtin/logical/aws/client.go
+++ b/builtin/logical/aws/client.go
@@ -48,9 +48,6 @@ func (b *backend) getRootConfig(ctx context.Context, s logical.Storage, clientTy
endpoint = *aws.String(config.IAMEndpoint)
case clientType == "sts" && config.STSEndpoint != "":
endpoint = *aws.String(config.STSEndpoint)
- if config.STSRegion != "" {
- credsConfig.Region = config.STSRegion
- }
}
if config.IdentityTokenAudience != "" {
diff --git a/builtin/logical/aws/path_config_root.go b/builtin/logical/aws/path_config_root.go
index 741c8502d08c..93fccc370e71 100644
--- a/builtin/logical/aws/path_config_root.go
+++ b/builtin/logical/aws/path_config_root.go
@@ -48,10 +48,6 @@ func pathConfigRoot(b *backend) *framework.Path {
Type: framework.TypeString,
Description: "Endpoint to custom STS server URL",
},
- "sts_region": {
- Type: framework.TypeString,
- Description: "Specific region for STS API calls.",
- },
"max_retries": {
Type: framework.TypeInt,
Default: aws.UseServiceDefaultRetries,
@@ -114,7 +110,6 @@ func (b *backend) pathConfigRootRead(ctx context.Context, req *logical.Request,
"region": config.Region,
"iam_endpoint": config.IAMEndpoint,
"sts_endpoint": config.STSEndpoint,
- "sts_region": config.STSRegion,
"max_retries": config.MaxRetries,
"username_template": config.UsernameTemplate,
"role_arn": config.RoleARN,
@@ -130,7 +125,6 @@ func (b *backend) pathConfigRootWrite(ctx context.Context, req *logical.Request,
region := data.Get("region").(string)
iamendpoint := data.Get("iam_endpoint").(string)
stsendpoint := data.Get("sts_endpoint").(string)
- stsregion := data.Get("sts_region").(string)
maxretries := data.Get("max_retries").(int)
roleARN := data.Get("role_arn").(string)
usernameTemplate := data.Get("username_template").(string)
@@ -146,7 +140,6 @@ func (b *backend) pathConfigRootWrite(ctx context.Context, req *logical.Request,
SecretKey: data.Get("secret_key").(string),
IAMEndpoint: iamendpoint,
STSEndpoint: stsendpoint,
- STSRegion: stsregion,
Region: region,
MaxRetries: maxretries,
UsernameTemplate: usernameTemplate,
@@ -200,7 +193,6 @@ type rootConfig struct {
SecretKey string `json:"secret_key"`
IAMEndpoint string `json:"iam_endpoint"`
STSEndpoint string `json:"sts_endpoint"`
- STSRegion string `json:"sts_region"`
Region string `json:"region"`
MaxRetries int `json:"max_retries"`
UsernameTemplate string `json:"username_template"`
diff --git a/builtin/logical/aws/path_config_root_test.go b/builtin/logical/aws/path_config_root_test.go
index 9c1ed0476f3a..783745ac0ed8 100644
--- a/builtin/logical/aws/path_config_root_test.go
+++ b/builtin/logical/aws/path_config_root_test.go
@@ -30,7 +30,6 @@ func TestBackend_PathConfigRoot(t *testing.T) {
"region": "us-west-2",
"iam_endpoint": "https://iam.amazonaws.com",
"sts_endpoint": "https://sts.us-west-2.amazonaws.com",
- "sts_region": "",
"max_retries": 10,
"username_template": defaultUserNameTemplate,
"role_arn": "",
diff --git a/builtin/logical/aws/path_static_roles.go b/builtin/logical/aws/path_static_roles.go
index 3057fabb3a80..b4374880271c 100644
--- a/builtin/logical/aws/path_static_roles.go
+++ b/builtin/logical/aws/path_static_roles.go
@@ -225,12 +225,6 @@ func (b *backend) pathStaticRolesWrite(ctx context.Context, req *logical.Request
if err != nil {
return nil, fmt.Errorf("expected an item with name %q, but got an error: %w", config.Name, err)
}
- // check if i is nil to prevent panic because
- // 1. PopByKey returns nil if the key does not exist; and
- // 2. the static cred queue is not repopulated on reload (see VAULT-30877)
- if i == nil {
- return nil, fmt.Errorf("expected an item with name %q, but got nil", config.Name)
- }
i.Value = config
// update the next rotation to occur at now + the new rotation period
newExpiration := time.Now().Add(config.RotationPeriod)
diff --git a/builtin/logical/pki/acme_challenge_engine.go b/builtin/logical/pki/acme_challenge_engine.go
index fab922917fda..985bf9f318cf 100644
--- a/builtin/logical/pki/acme_challenge_engine.go
+++ b/builtin/logical/pki/acme_challenge_engine.go
@@ -491,7 +491,7 @@ func (ace *ACMEChallengeEngine) _verifyChallenge(sc *storageContext, id string,
if err := saveAuthorizationAtPath(sc, authzPath, authz); err != nil {
err = fmt.Errorf("error saving updated (validated) authorization %v/%v for challenge %v: %w", cv.Account, cv.Authorization, id, err)
- return ace._verifyChallengeRetry(sc, cv, authzPath, authz, challenge, fmt.Errorf("%w: %s", ErrServerInternal, err.Error()), id)
+ return ace._verifyChallengeRetry(sc, cv, authzPath, authz, challenge, err, id)
}
return ace._verifyChallengeCleanup(sc, nil, id)
diff --git a/builtin/logical/pki/acme_challenges.go b/builtin/logical/pki/acme_challenges.go
index 307fef3490ea..85c051c86e0e 100644
--- a/builtin/logical/pki/acme_challenges.go
+++ b/builtin/logical/pki/acme_challenges.go
@@ -42,14 +42,14 @@ var OIDACMEIdentifier = asn1.ObjectIdentifier{1, 3, 6, 1, 5, 5, 7, 1, 31}
func ValidateKeyAuthorization(keyAuthz string, token string, thumbprint string) (bool, error) {
parts := strings.Split(keyAuthz, ".")
if len(parts) != 2 {
- return false, fmt.Errorf("%w: %s", ErrMalformed, fmt.Errorf("invalid authorization: got %v parts, expected 2", len(parts)).Error())
+ return false, fmt.Errorf("invalid authorization: got %v parts, expected 2", len(parts))
}
tokenPart := parts[0]
thumbprintPart := parts[1]
if token != tokenPart || thumbprint != thumbprintPart {
- return false, fmt.Errorf("%w: %s", ErrIncorrectResponse, fmt.Errorf("key authorization was invalid").Error())
+ return false, fmt.Errorf("key authorization was invalid")
}
return true, nil
@@ -126,7 +126,7 @@ func ValidateHTTP01Challenge(domain string, token string, thumbprint string, con
path := "http://" + domain + "/.well-known/acme-challenge/" + token
dialer, err := buildDialerConfig(config)
if err != nil {
- return false, fmt.Errorf("%w: %s", ErrServerInternal, fmt.Errorf("failed to build dialer: %w", err).Error())
+ return false, fmt.Errorf("failed to build dialer: %w", err)
}
transport := &http.Transport{
@@ -167,7 +167,7 @@ func ValidateHTTP01Challenge(domain string, token string, thumbprint string, con
resp, err := client.Get(path)
if err != nil {
- return false, fmt.Errorf("%w: %s", ErrConnection, fmt.Errorf("http-01: failed to fetch path %v: %w", path, err).Error())
+ return false, fmt.Errorf("http-01: failed to fetch path %v: %w", path, err)
}
// We provision a buffer which allows for a variable size challenge, some
@@ -180,15 +180,15 @@ func ValidateHTTP01Challenge(domain string, token string, thumbprint string, con
// Attempt to read the body, but don't do so infinitely.
body, err := io.ReadAll(io.LimitReader(resp.Body, int64(maxExpected+1)))
if err != nil {
- return false, fmt.Errorf("%w: %s", ErrIncorrectResponse, fmt.Errorf("http-01: unexpected error while reading body: %w", err).Error())
+ return false, fmt.Errorf("http-01: unexpected error while reading body: %w", err)
}
if len(body) > maxExpected {
- return false, fmt.Errorf("%w: %s", ErrMalformed, fmt.Errorf("http-01: response too large: received %v > %v bytes", len(body), maxExpected).Error())
+ return false, fmt.Errorf("http-01: response too large: received %v > %v bytes", len(body), maxExpected)
}
if len(body) < minExpected {
- return false, fmt.Errorf("%w: %s", ErrMalformed, fmt.Errorf("http-01: response too small: received %v < %v bytes", len(body), minExpected).Error())
+ return false, fmt.Errorf("http-01: response too small: received %v < %v bytes", len(body), minExpected)
}
// Per RFC 8555 Section 8.3. HTTP Challenge:
@@ -215,7 +215,7 @@ func ValidateDNS01Challenge(domain string, token string, thumbprint string, conf
// 2. To use a context to set stricter timeout limits.
resolver, err := buildResolver(config)
if err != nil {
- return false, fmt.Errorf("%w: %s", ErrServerInternal, fmt.Errorf("failed to build resolver: %w", err).Error())
+ return false, fmt.Errorf("failed to build resolver: %w", err)
}
ctx, cancel := context.WithTimeout(context.Background(), 30*time.Second)
@@ -224,7 +224,7 @@ func ValidateDNS01Challenge(domain string, token string, thumbprint string, conf
name := DNSChallengePrefix + domain
results, err := resolver.LookupTXT(ctx, name)
if err != nil {
- return false, fmt.Errorf("%w: %s", ErrDNS, fmt.Errorf("dns-01: failed to lookup TXT records for domain (%v) via resolver %v: %w", name, config.DNSResolver, err).Error())
+ return false, fmt.Errorf("dns-01: failed to lookup TXT records for domain (%v) via resolver %v: %w", name, config.DNSResolver, err)
}
for _, keyAuthz := range results {
@@ -234,7 +234,7 @@ func ValidateDNS01Challenge(domain string, token string, thumbprint string, conf
}
}
- return false, fmt.Errorf("%w: %s", ErrDNS, fmt.Errorf("dns-01: challenge failed against %v records", len(results)).Error())
+ return false, fmt.Errorf("dns-01: challenge failed against %v records", len(results))
}
func ValidateTLSALPN01Challenge(domain string, token string, thumbprint string, config *acmeConfigEntry) (bool, error) {
@@ -456,7 +456,7 @@ func ValidateTLSALPN01Challenge(domain string, token string, thumbprint string,
// resolved according to configuration.
dialer, err := buildDialerConfig(config)
if err != nil {
- return false, fmt.Errorf("%w: %s", ErrServerInternal, fmt.Errorf("failed to build dialer: %w", err).Error())
+ return false, fmt.Errorf("failed to build dialer: %w", err)
}
// Per RFC 8737 Section 3. TLS with Application-Layer Protocol
@@ -471,7 +471,7 @@ func ValidateTLSALPN01Challenge(domain string, token string, thumbprint string,
address := fmt.Sprintf("%v:"+ALPNPort, domain)
conn, err := dialer.Dial("tcp", address)
if err != nil {
- return false, fmt.Errorf("%w: %s", ErrConnection, fmt.Errorf("tls-alpn-01: failed to dial host: %w", err).Error())
+ return false, fmt.Errorf("tls-alpn-01: failed to dial host: %w", err)
}
// Initiate the connection to the remote peer.
@@ -496,7 +496,7 @@ func ValidateTLSALPN01Challenge(domain string, token string, thumbprint string,
// See note above about why we can allow Handshake to complete
// successfully.
if err := client.HandshakeContext(ctx); err != nil {
- return false, fmt.Errorf("%w: %s", ErrTLS, fmt.Errorf("tls-alpn-01: failed to perform handshake: %w", err).Error())
+ return false, fmt.Errorf("tls-alpn-01: failed to perform handshake: %w", err)
}
return true, nil
}
diff --git a/builtin/logical/pki/acme_errors.go b/builtin/logical/pki/acme_errors.go
index e82920a7383c..3c9c059f7d22 100644
--- a/builtin/logical/pki/acme_errors.go
+++ b/builtin/logical/pki/acme_errors.go
@@ -142,7 +142,8 @@ func (e *ErrorResponse) Marshal() (*logical.Response, error) {
return &resp, nil
}
-func FindType(given error) (err error, id string, code int, matchedError bool) {
+func FindType(given error) (err error, id string, code int, found bool) {
+ matchedError := false
for err, id = range errIdMappings {
if errors.Is(given, err) {
matchedError = true
diff --git a/builtin/logical/pki/backend_test.go b/builtin/logical/pki/backend_test.go
index 3cdd73833eb5..ec179dbb84ef 100644
--- a/builtin/logical/pki/backend_test.go
+++ b/builtin/logical/pki/backend_test.go
@@ -6103,7 +6103,6 @@ func TestPKI_EmptyCRLConfigUpgraded(t *testing.T) {
require.Equal(t, resp.Data["auto_rebuild_grace_period"], pki_backend.DefaultCrlConfig.AutoRebuildGracePeriod)
require.Equal(t, resp.Data["enable_delta"], pki_backend.DefaultCrlConfig.EnableDelta)
require.Equal(t, resp.Data["delta_rebuild_interval"], pki_backend.DefaultCrlConfig.DeltaRebuildInterval)
- require.Equal(t, resp.Data["max_crl_entries"], pki_backend.DefaultCrlConfig.MaxCRLEntries)
}
func TestPKI_ListRevokedCerts(t *testing.T) {
@@ -7081,16 +7080,15 @@ func TestProperAuthing(t *testing.T) {
}
}
-type patchIssuerTestCase struct {
- Field string
- Before interface{}
- Patched interface{}
-}
-
func TestPatchIssuer(t *testing.T) {
t.Parallel()
- testCases := []patchIssuerTestCase{
+ type TestCase struct {
+ Field string
+ Before interface{}
+ Patched interface{}
+ }
+ testCases := []TestCase{
{
Field: "issuer_name",
Before: "root",
@@ -7137,82 +7135,65 @@ func TestPatchIssuer(t *testing.T) {
Patched: []string{"self"},
},
}
- testPatchIssuer(t, testCases)
-}
-func testPatchIssuer(t *testing.T, testCases []patchIssuerTestCase) {
- for _, testCase := range testCases {
- t.Run(testCase.Field, func(t *testing.T) {
- b, s := CreateBackendWithStorage(t)
-
- // 1. Setup root issuer.
- resp, err := CBWrite(b, s, "root/generate/internal", map[string]interface{}{
- "common_name": "Vault Root CA",
- "key_type": "ec",
- "ttl": "7200h",
- "issuer_name": "root",
- })
- requireSuccessNonNilResponse(t, resp, err, "failed generating root issuer")
- id := string(resp.Data["issuer_id"].(issuing.IssuerID))
+ for index, testCase := range testCases {
+ t.Logf("index: %v / tc: %v", index, testCase)
- // 2. Enable Cluster paths
- resp, err = CBWrite(b, s, "config/urls", map[string]interface{}{
- "path": "https://localhost/v1/pki",
- "aia_path": "http://localhost/v1/pki",
- })
- requireSuccessNonNilResponse(t, resp, err, "failed updating AIA config")
+ b, s := CreateBackendWithStorage(t)
- // 3. Add AIA information
- resp, err = CBPatch(b, s, "issuer/default", map[string]interface{}{
- "issuing_certificates": "http://localhost/v1/pki-1/ca",
- "crl_distribution_points": "http://localhost/v1/pki-1/crl",
- "ocsp_servers": "http://localhost/v1/pki-1/ocsp",
- })
- requireSuccessNonNilResponse(t, resp, err, "failed setting up issuer")
-
- // 4. Read the issuer before.
- resp, err = CBRead(b, s, "issuer/default")
- requireSuccessNonNilResponse(t, resp, err, "failed reading root issuer before")
- require.Equal(t, testCase.Before, resp.Data[testCase.Field], "bad expectations")
-
- // 5. Perform modification.
- resp, err = CBPatch(b, s, "issuer/default", map[string]interface{}{
- testCase.Field: testCase.Patched,
- })
- requireSuccessNonNilResponse(t, resp, err, "failed patching root issuer")
+ // 1. Setup root issuer.
+ resp, err := CBWrite(b, s, "root/generate/internal", map[string]interface{}{
+ "common_name": "Vault Root CA",
+ "key_type": "ec",
+ "ttl": "7200h",
+ "issuer_name": "root",
+ })
+ requireSuccessNonNilResponse(t, resp, err, "failed generating root issuer")
+ id := string(resp.Data["issuer_id"].(issuing.IssuerID))
- if testCase.Field != "manual_chain" {
- require.Equal(t, testCase.Patched, resp.Data[testCase.Field], "failed persisting value")
- } else {
- // self->id
- require.Equal(t, []string{id}, resp.Data[testCase.Field], "failed persisting value")
- }
+ // 2. Enable Cluster paths
+ resp, err = CBWrite(b, s, "config/urls", map[string]interface{}{
+ "path": "https://localhost/v1/pki",
+ "aia_path": "http://localhost/v1/pki",
+ })
+ requireSuccessNonNilResponse(t, resp, err, "failed updating AIA config")
- // 6. Ensure it stuck
- resp, err = CBRead(b, s, "issuer/default")
- requireSuccessNonNilResponse(t, resp, err, "failed reading root issuer after")
+ // 3. Add AIA information
+ resp, err = CBPatch(b, s, "issuer/default", map[string]interface{}{
+ "issuing_certificates": "http://localhost/v1/pki-1/ca",
+ "crl_distribution_points": "http://localhost/v1/pki-1/crl",
+ "ocsp_servers": "http://localhost/v1/pki-1/ocsp",
+ })
+ requireSuccessNonNilResponse(t, resp, err, "failed setting up issuer")
- if testCase.Field != "manual_chain" {
- require.Equal(t, testCase.Patched, resp.Data[testCase.Field])
- } else {
- // self->id
- require.Equal(t, []string{id}, resp.Data[testCase.Field], "failed persisting value")
- }
+ // 4. Read the issuer before.
+ resp, err = CBRead(b, s, "issuer/default")
+ requireSuccessNonNilResponse(t, resp, err, "failed reading root issuer before")
+ require.Equal(t, testCase.Before, resp.Data[testCase.Field], "bad expectations")
- // 7. Patch it back
- resp, err = CBPatch(b, s, "issuer/default", map[string]interface{}{
- testCase.Field: testCase.Before,
- })
- requireSuccessNonNilResponse(t, resp, err, "failed patching root issuer")
+ // 5. Perform modification.
+ resp, err = CBPatch(b, s, "issuer/default", map[string]interface{}{
+ testCase.Field: testCase.Patched,
+ })
+ requireSuccessNonNilResponse(t, resp, err, "failed patching root issuer")
- require.Equal(t, testCase.Before, resp.Data[testCase.Field], "failed persisting value")
+ if testCase.Field != "manual_chain" {
+ require.Equal(t, testCase.Patched, resp.Data[testCase.Field], "failed persisting value")
+ } else {
+ // self->id
+ require.Equal(t, []string{id}, resp.Data[testCase.Field], "failed persisting value")
+ }
- // 8. Ensure it stuck
- resp, err = CBRead(b, s, "issuer/default")
- requireSuccessNonNilResponse(t, resp, err, "failed reading root issuer after")
+ // 6. Ensure it stuck
+ resp, err = CBRead(b, s, "issuer/default")
+ requireSuccessNonNilResponse(t, resp, err, "failed reading root issuer after")
- require.Equal(t, testCase.Before, resp.Data[testCase.Field])
- })
+ if testCase.Field != "manual_chain" {
+ require.Equal(t, testCase.Patched, resp.Data[testCase.Field])
+ } else {
+ // self->id
+ require.Equal(t, []string{id}, resp.Data[testCase.Field], "failed persisting value")
+ }
}
}
diff --git a/builtin/logical/pki/cert_util.go b/builtin/logical/pki/cert_util.go
index ae5d3504d465..5111699ab8c5 100644
--- a/builtin/logical/pki/cert_util.go
+++ b/builtin/logical/pki/cert_util.go
@@ -264,7 +264,7 @@ func fetchCertBySerial(sc pki_backend.StorageContext, prefix, serial string) (*l
default:
certCounter.IncrementTotalCertificatesCount(certsCounted, path)
}
- return nil, errutil.InternalError{Err: fmt.Sprintf("error deleting certificate with serial %s from old location: %s", serial, err)}
+ return nil, errutil.InternalError{Err: fmt.Sprintf("error deleting certificate with serial %s from old location", serial)}
}
return certEntry, nil
diff --git a/builtin/logical/pki/crl_test.go b/builtin/logical/pki/crl_test.go
index 7f26f49a780f..3c7848db78de 100644
--- a/builtin/logical/pki/crl_test.go
+++ b/builtin/logical/pki/crl_test.go
@@ -285,7 +285,7 @@ func crlEnableDisableTestForBackend(t *testing.T, b *backend, s logical.Storage,
}
serials := make(map[int]string)
- for i := 0; i < 7; i++ {
+ for i := 0; i < 6; i++ {
resp, err := CBWrite(b, s, "issue/test", map[string]interface{}{
"common_name": "test.foobar.com",
})
@@ -323,15 +323,11 @@ func crlEnableDisableTestForBackend(t *testing.T, b *backend, s logical.Storage,
}
}
- revoke := func(serialIndex int, errorText ...string) {
+ revoke := func(serialIndex int) {
_, err = CBWrite(b, s, "revoke", map[string]interface{}{
"serial_number": serials[serialIndex],
})
- if err != nil && len(errorText) == 1 {
- if strings.Contains(err.Error(), errorText[0]) {
- err = nil
- return
- }
+ if err != nil {
t.Fatal(err)
}
@@ -381,24 +377,6 @@ func crlEnableDisableTestForBackend(t *testing.T, b *backend, s logical.Storage,
crlCreationTime2 := getParsedCrlFromBackend(t, b, s, "crl").TBSCertList.ThisUpdate
require.NotEqual(t, crlCreationTime1, crlCreationTime2)
-
- // Set a limit, and test that it blocks building an over-large CRL
- CBWrite(b, s, "config/crl", map[string]interface{}{
- "max_crl_entries": 6,
- })
- revoke(6, "revocation list size (7) exceeds configured maximum (6)")
- test(6)
-
- _, err = CBRead(b, s, "crl/rotate")
- require.Error(t, err)
- require.True(t, strings.Contains(err.Error(), "revocation list size (7) exceeds configured maximum (6)"))
-
- // Set unlimited, and try again
- CBWrite(b, s, "config/crl", map[string]interface{}{
- "max_crl_entries": -1,
- })
- _, err = CBRead(b, s, "crl/rotate")
- require.NoError(t, err)
}
func TestBackend_Secondary_CRL_Rebuilding(t *testing.T) {
diff --git a/builtin/logical/pki/crl_util.go b/builtin/logical/pki/crl_util.go
index 99ec95a79757..def00a5f11c6 100644
--- a/builtin/logical/pki/crl_util.go
+++ b/builtin/logical/pki/crl_util.go
@@ -1768,20 +1768,6 @@ func buildAnyCRLsWithCerts(
internalCRLConfig.LastModified = time.Now().UTC()
}
- // Enforce the max CRL size guard before building the actual CRL
- if globalCRLConfig.MaxCRLEntries > -1 {
- limit := maxCRLEntriesOrDefault(globalCRLConfig.MaxCRLEntries)
- revokedCount := len(revokedCerts)
- if revokedCount > limit {
- // Also log a nasty error to get the operator's attention
- sc.Logger().Error("CRL was not updated, as it exceeds the configured max size. The CRL now does not contain all revoked certificates! This may be indicative of a runaway issuance/revocation pattern.", "limit", limit)
- return nil, fmt.Errorf("error building CRL: revocation list size (%d) exceeds configured maximum (%d)", revokedCount, limit)
- }
- if revokedCount > int(float32(limit)*0.90) {
- sc.Logger().Warn("warning, revoked certificate count is within 10% of the configured maximum CRL size", "revoked_certs", revokedCount, "limit", limit)
- }
- }
-
// Lastly, build the CRL.
nextUpdate, err := buildCRL(sc, globalCRLConfig, forceNew, representative, revokedCerts, crlIdentifier, crlNumber, isUnified, isDelta, lastCompleteNumber)
if err != nil {
diff --git a/builtin/logical/pki/issuing/issuers.go b/builtin/logical/pki/issuing/issuers.go
index 47bcdd5b7361..9ceff5b13cf2 100644
--- a/builtin/logical/pki/issuing/issuers.go
+++ b/builtin/logical/pki/issuing/issuers.go
@@ -137,7 +137,6 @@ type IssuerEntry struct {
AIAURIs *AiaConfigEntry `json:"aia_uris,omitempty"`
LastModified time.Time `json:"last_modified"`
Version uint `json:"version"`
- entIssuerEntry
}
// GetCertificate returns a x509.Certificate of the CA certificate
diff --git a/builtin/logical/pki/issuing/issuers_oss.go b/builtin/logical/pki/issuing/issuers_oss.go
deleted file mode 100644
index b6202a98dc91..000000000000
--- a/builtin/logical/pki/issuing/issuers_oss.go
+++ /dev/null
@@ -1,8 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: BUSL-1.1
-
-//go:build !enterprise
-
-package issuing
-
-type entIssuerEntry struct{}
diff --git a/builtin/logical/pki/metadata.pb.go b/builtin/logical/pki/metadata.pb.go
index fe8da60ea29f..209f5fcce0f7 100644
--- a/builtin/logical/pki/metadata.pb.go
+++ b/builtin/logical/pki/metadata.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: builtin/logical/pki/metadata.proto
@@ -37,9 +37,11 @@ type CertificateMetadata struct {
func (x *CertificateMetadata) Reset() {
*x = CertificateMetadata{}
- mi := &file_builtin_logical_pki_metadata_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_builtin_logical_pki_metadata_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *CertificateMetadata) String() string {
@@ -50,7 +52,7 @@ func (*CertificateMetadata) ProtoMessage() {}
func (x *CertificateMetadata) ProtoReflect() protoreflect.Message {
mi := &file_builtin_logical_pki_metadata_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -150,6 +152,20 @@ func file_builtin_logical_pki_metadata_proto_init() {
if File_builtin_logical_pki_metadata_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_builtin_logical_pki_metadata_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*CertificateMetadata); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
file_builtin_logical_pki_metadata_proto_msgTypes[0].OneofWrappers = []any{}
type x struct{}
out := protoimpl.TypeBuilder{
diff --git a/builtin/logical/pki/path_config_crl.go b/builtin/logical/pki/path_config_crl.go
index a9cdcf5ca1cd..81815ff21592 100644
--- a/builtin/logical/pki/path_config_crl.go
+++ b/builtin/logical/pki/path_config_crl.go
@@ -16,70 +16,6 @@ import (
"github.com/hashicorp/vault/sdk/logical"
)
-var configCRLFields = map[string]*framework.FieldSchema{
- "expiry": {
- Type: framework.TypeString,
- Description: `The amount of time the generated CRL should be
-valid; defaults to 72 hours`,
- Default: "72h",
- },
- "disable": {
- Type: framework.TypeBool,
- Description: `If set to true, disables generating the CRL entirely.`,
- },
- "ocsp_disable": {
- Type: framework.TypeBool,
- Description: `If set to true, ocsp unauthorized responses will be returned.`,
- },
- "ocsp_expiry": {
- Type: framework.TypeString,
- Description: `The amount of time an OCSP response will be valid (controls
-the NextUpdate field); defaults to 12 hours`,
- Default: "1h",
- },
- "auto_rebuild": {
- Type: framework.TypeBool,
- Description: `If set to true, enables automatic rebuilding of the CRL`,
- },
- "auto_rebuild_grace_period": {
- Type: framework.TypeString,
- Description: `The time before the CRL expires to automatically rebuild it, when enabled. Must be shorter than the CRL expiry. Defaults to 12h.`,
- Default: "12h",
- },
- "enable_delta": {
- Type: framework.TypeBool,
- Description: `Whether to enable delta CRLs between authoritative CRL rebuilds`,
- },
- "delta_rebuild_interval": {
- Type: framework.TypeString,
- Description: `The time between delta CRL rebuilds if a new revocation has occurred. Must be shorter than the CRL expiry. Defaults to 15m.`,
- Default: "15m",
- },
- "cross_cluster_revocation": {
- Type: framework.TypeBool,
- Description: `Whether to enable a global, cross-cluster revocation queue.
-Must be used with auto_rebuild=true.`,
- },
- "unified_crl": {
- Type: framework.TypeBool,
- Description: `If set to true enables global replication of revocation entries,
-also enabling unified versions of OCSP and CRLs if their respective features are enabled.
-disable for CRLs and ocsp_disable for OCSP.`,
- Default: "false",
- },
- "unified_crl_on_existing_paths": {
- Type: framework.TypeBool,
- Description: `If set to true,
-existing CRL and OCSP paths will return the unified CRL instead of a response based on cluster-local data`,
- Default: "false",
- },
- "max_crl_entries": {
- Type: framework.TypeInt,
- Description: `The maximum number of entries the CRL can contain. This is meant as a guard against accidental runaway revocations overloading Vault storage. If this limit is exceeded writing the CRL will fail. If set to -1 this limit is disabled.`,
- Default: pki_backend.DefaultCrlConfig.MaxCRLEntries,
- },
-}
-
func pathConfigCRL(b *backend) *framework.Path {
return &framework.Path{
Pattern: "config/crl",
@@ -88,7 +24,65 @@ func pathConfigCRL(b *backend) *framework.Path {
OperationPrefix: operationPrefixPKI,
},
- Fields: configCRLFields,
+ Fields: map[string]*framework.FieldSchema{
+ "expiry": {
+ Type: framework.TypeString,
+ Description: `The amount of time the generated CRL should be
+valid; defaults to 72 hours`,
+ Default: "72h",
+ },
+ "disable": {
+ Type: framework.TypeBool,
+ Description: `If set to true, disables generating the CRL entirely.`,
+ },
+ "ocsp_disable": {
+ Type: framework.TypeBool,
+ Description: `If set to true, ocsp unauthorized responses will be returned.`,
+ },
+ "ocsp_expiry": {
+ Type: framework.TypeString,
+ Description: `The amount of time an OCSP response will be valid (controls
+the NextUpdate field); defaults to 12 hours`,
+ Default: "1h",
+ },
+ "auto_rebuild": {
+ Type: framework.TypeBool,
+ Description: `If set to true, enables automatic rebuilding of the CRL`,
+ },
+ "auto_rebuild_grace_period": {
+ Type: framework.TypeString,
+ Description: `The time before the CRL expires to automatically rebuild it, when enabled. Must be shorter than the CRL expiry. Defaults to 12h.`,
+ Default: "12h",
+ },
+ "enable_delta": {
+ Type: framework.TypeBool,
+ Description: `Whether to enable delta CRLs between authoritative CRL rebuilds`,
+ },
+ "delta_rebuild_interval": {
+ Type: framework.TypeString,
+ Description: `The time between delta CRL rebuilds if a new revocation has occurred. Must be shorter than the CRL expiry. Defaults to 15m.`,
+ Default: "15m",
+ },
+ "cross_cluster_revocation": {
+ Type: framework.TypeBool,
+ Description: `Whether to enable a global, cross-cluster revocation queue.
+Must be used with auto_rebuild=true.`,
+ },
+ "unified_crl": {
+ Type: framework.TypeBool,
+ Description: `If set to true enables global replication of revocation entries,
+also enabling unified versions of OCSP and CRLs if their respective features are enabled.
+disable for CRLs and ocsp_disable for OCSP.`,
+ Default: "false",
+ },
+ "unified_crl_on_existing_paths": {
+ Type: framework.TypeBool,
+ Description: `If set to true,
+existing CRL and OCSP paths will return the unified CRL instead of a response based on cluster-local data`,
+ Default: "false",
+ },
+ },
+
Operations: map[logical.Operation]framework.OperationHandler{
logical.ReadOperation: &framework.PathOperation{
DisplayAttrs: &framework.DisplayAttributes{
@@ -98,7 +92,69 @@ func pathConfigCRL(b *backend) *framework.Path {
Responses: map[int][]framework.Response{
http.StatusOK: {{
Description: "OK",
- Fields: configCRLFields,
+ Fields: map[string]*framework.FieldSchema{
+ "expiry": {
+ Type: framework.TypeString,
+ Description: `The amount of time the generated CRL should be
+valid; defaults to 72 hours`,
+ Required: true,
+ },
+ "disable": {
+ Type: framework.TypeBool,
+ Description: `If set to true, disables generating the CRL entirely.`,
+ Required: true,
+ },
+ "ocsp_disable": {
+ Type: framework.TypeBool,
+ Description: `If set to true, ocsp unauthorized responses will be returned.`,
+ Required: true,
+ },
+ "ocsp_expiry": {
+ Type: framework.TypeString,
+ Description: `The amount of time an OCSP response will be valid (controls
+the NextUpdate field); defaults to 12 hours`,
+ Required: true,
+ },
+ "auto_rebuild": {
+ Type: framework.TypeBool,
+ Description: `If set to true, enables automatic rebuilding of the CRL`,
+ Required: true,
+ },
+ "auto_rebuild_grace_period": {
+ Type: framework.TypeString,
+ Description: `The time before the CRL expires to automatically rebuild it, when enabled. Must be shorter than the CRL expiry. Defaults to 12h.`,
+ Required: true,
+ },
+ "enable_delta": {
+ Type: framework.TypeBool,
+ Description: `Whether to enable delta CRLs between authoritative CRL rebuilds`,
+ Required: true,
+ },
+ "delta_rebuild_interval": {
+ Type: framework.TypeString,
+ Description: `The time between delta CRL rebuilds if a new revocation has occurred. Must be shorter than the CRL expiry. Defaults to 15m.`,
+ Required: true,
+ },
+ "cross_cluster_revocation": {
+ Type: framework.TypeBool,
+ Description: `Whether to enable a global, cross-cluster revocation queue.
+Must be used with auto_rebuild=true.`,
+ Required: true,
+ },
+ "unified_crl": {
+ Type: framework.TypeBool,
+ Description: `If set to true enables global replication of revocation entries,
+also enabling unified versions of OCSP and CRLs if their respective features are enabled.
+disable for CRLs and ocsp_disable for OCSP.`,
+ Required: true,
+ },
+ "unified_crl_on_existing_paths": {
+ Type: framework.TypeBool,
+ Description: `If set to true,
+existing CRL and OCSP paths will return the unified CRL instead of a response based on cluster-local data`,
+ Required: true,
+ },
+ },
}},
},
},
@@ -111,7 +167,65 @@ func pathConfigCRL(b *backend) *framework.Path {
Responses: map[int][]framework.Response{
http.StatusOK: {{
Description: "OK",
- Fields: configCRLFields,
+ Fields: map[string]*framework.FieldSchema{
+ "expiry": {
+ Type: framework.TypeString,
+ Description: `The amount of time the generated CRL should be
+valid; defaults to 72 hours`,
+ Default: "72h",
+ },
+ "disable": {
+ Type: framework.TypeBool,
+ Description: `If set to true, disables generating the CRL entirely.`,
+ },
+ "ocsp_disable": {
+ Type: framework.TypeBool,
+ Description: `If set to true, ocsp unauthorized responses will be returned.`,
+ },
+ "ocsp_expiry": {
+ Type: framework.TypeString,
+ Description: `The amount of time an OCSP response will be valid (controls
+the NextUpdate field); defaults to 12 hours`,
+ Default: "1h",
+ },
+ "auto_rebuild": {
+ Type: framework.TypeBool,
+ Description: `If set to true, enables automatic rebuilding of the CRL`,
+ },
+ "auto_rebuild_grace_period": {
+ Type: framework.TypeString,
+ Description: `The time before the CRL expires to automatically rebuild it, when enabled. Must be shorter than the CRL expiry. Defaults to 12h.`,
+ Default: "12h",
+ },
+ "enable_delta": {
+ Type: framework.TypeBool,
+ Description: `Whether to enable delta CRLs between authoritative CRL rebuilds`,
+ },
+ "delta_rebuild_interval": {
+ Type: framework.TypeString,
+ Description: `The time between delta CRL rebuilds if a new revocation has occurred. Must be shorter than the CRL expiry. Defaults to 15m.`,
+ Default: "15m",
+ },
+ "cross_cluster_revocation": {
+ Type: framework.TypeBool,
+ Description: `Whether to enable a global, cross-cluster revocation queue.
+Must be used with auto_rebuild=true.`,
+ Required: false,
+ },
+ "unified_crl": {
+ Type: framework.TypeBool,
+ Description: `If set to true enables global replication of revocation entries,
+also enabling unified versions of OCSP and CRLs if their respective features are enabled.
+disable for CRLs and ocsp_disable for OCSP.`,
+ Required: false,
+ },
+ "unified_crl_on_existing_paths": {
+ Type: framework.TypeBool,
+ Description: `If set to true,
+existing CRL and OCSP paths will return the unified CRL instead of a response based on cluster-local data`,
+ Required: false,
+ },
+ },
}},
},
// Read more about why these flags are set in backend.go.
@@ -212,13 +326,6 @@ func (b *backend) pathCRLWrite(ctx context.Context, req *logical.Request, d *fra
config.UnifiedCRLOnExistingPaths = unifiedCrlOnExistingPathsRaw.(bool)
}
- if maxCRLEntriesRaw, ok := d.GetOk("max_crl_entries"); ok {
- v := maxCRLEntriesRaw.(int)
- if v == -1 || v > 0 {
- config.MaxCRLEntries = v
- }
- }
-
if config.UnifiedCRLOnExistingPaths && !config.UnifiedCRL {
return logical.ErrorResponse("unified_crl_on_existing_paths cannot be enabled if unified_crl is disabled"), nil
}
@@ -301,13 +408,6 @@ func (b *backend) pathCRLWrite(ctx context.Context, req *logical.Request, d *fra
return resp, nil
}
-func maxCRLEntriesOrDefault(size int) int {
- if size == 0 {
- return pki_backend.DefaultCrlConfig.MaxCRLEntries
- }
- return size
-}
-
func genResponseFromCrlConfig(config *pki_backend.CrlConfig) *logical.Response {
return &logical.Response{
Data: map[string]interface{}{
@@ -322,7 +422,6 @@ func genResponseFromCrlConfig(config *pki_backend.CrlConfig) *logical.Response {
"cross_cluster_revocation": config.UseGlobalQueue,
"unified_crl": config.UnifiedCRL,
"unified_crl_on_existing_paths": config.UnifiedCRLOnExistingPaths,
- "max_crl_entries": maxCRLEntriesOrDefault(config.MaxCRLEntries),
},
}
}
diff --git a/builtin/logical/pki/path_fetch_issuers.go b/builtin/logical/pki/path_fetch_issuers.go
index fa21443a5997..4d82f38b4bd8 100644
--- a/builtin/logical/pki/path_fetch_issuers.go
+++ b/builtin/logical/pki/path_fetch_issuers.go
@@ -209,13 +209,90 @@ and using '{{cluster_aia_path}}' requires /config/cluster's 'aia_path' member
to be set on all PR secondary clusters.`,
Default: false,
}
- addEntPathIssuerFields(fields)
- updateIssuerFields := issuerResponseFields(false)
updateIssuerSchema := map[int][]framework.Response{
http.StatusOK: {{
Description: "OK",
- Fields: updateIssuerFields,
+ Fields: map[string]*framework.FieldSchema{
+ "issuer_id": {
+ Type: framework.TypeString,
+ Description: `Issuer Id`,
+ Required: false,
+ },
+ "issuer_name": {
+ Type: framework.TypeString,
+ Description: `Issuer Name`,
+ Required: false,
+ },
+ "key_id": {
+ Type: framework.TypeString,
+ Description: `Key Id`,
+ Required: false,
+ },
+ "certificate": {
+ Type: framework.TypeString,
+ Description: `Certificate`,
+ Required: false,
+ },
+ "manual_chain": {
+ Type: framework.TypeStringSlice,
+ Description: `Manual Chain`,
+ Required: false,
+ },
+ "ca_chain": {
+ Type: framework.TypeStringSlice,
+ Description: `CA Chain`,
+ Required: false,
+ },
+ "leaf_not_after_behavior": {
+ Type: framework.TypeString,
+ Description: `Leaf Not After Behavior`,
+ Required: false,
+ },
+ "usage": {
+ Type: framework.TypeString,
+ Description: `Usage`,
+ Required: false,
+ },
+ "revocation_signature_algorithm": {
+ Type: framework.TypeString,
+ Description: `Revocation Signature Alogrithm`,
+ Required: false,
+ },
+ "revoked": {
+ Type: framework.TypeBool,
+ Description: `Revoked`,
+ Required: false,
+ },
+ "revocation_time": {
+ Type: framework.TypeInt,
+ Required: false,
+ },
+ "revocation_time_rfc3339": {
+ Type: framework.TypeString,
+ Required: false,
+ },
+ "issuing_certificates": {
+ Type: framework.TypeStringSlice,
+ Description: `Issuing Certificates`,
+ Required: false,
+ },
+ "crl_distribution_points": {
+ Type: framework.TypeStringSlice,
+ Description: `CRL Distribution Points`,
+ Required: false,
+ },
+ "ocsp_servers": {
+ Type: framework.TypeStringSlice,
+ Description: `OCSP Servers`,
+ Required: false,
+ },
+ "enable_aia_url_templating": {
+ Type: framework.TypeBool,
+ Description: `Whether or not templating is enabled for AIA fields`,
+ Required: false,
+ },
+ },
}},
}
@@ -263,95 +340,6 @@ to be set on all PR secondary clusters.`,
}
}
-func issuerResponseFields(required bool) map[string]*framework.FieldSchema {
- fields := map[string]*framework.FieldSchema{
- "issuer_id": {
- Type: framework.TypeString,
- Description: `Issuer Id`,
- Required: required,
- },
- "issuer_name": {
- Type: framework.TypeString,
- Description: `Issuer Name`,
- Required: required,
- },
- "key_id": {
- Type: framework.TypeString,
- Description: `Key Id`,
- Required: required,
- },
- "certificate": {
- Type: framework.TypeString,
- Description: `Certificate`,
- Required: required,
- },
- "manual_chain": {
- Type: framework.TypeStringSlice,
- Description: `Manual Chain`,
- Required: required,
- },
- "ca_chain": {
- Type: framework.TypeStringSlice,
- Description: `CA Chain`,
- Required: required,
- },
- "leaf_not_after_behavior": {
- Type: framework.TypeString,
- Description: `Leaf Not After Behavior`,
- Required: required,
- },
- "usage": {
- Type: framework.TypeString,
- Description: `Usage`,
- Required: required,
- },
- "revocation_signature_algorithm": {
- Type: framework.TypeString,
- Description: `Revocation Signature Alogrithm`,
- Required: required,
- },
- "revoked": {
- Type: framework.TypeBool,
- Description: `Revoked`,
- Required: required,
- },
- "revocation_time": {
- Type: framework.TypeInt,
- Description: `Revocation time`,
- Required: required,
- },
- "revocation_time_rfc3339": {
- Type: framework.TypeString,
- Description: `Revocation time RFC 3339 formatted`,
- Required: required,
- },
- "issuing_certificates": {
- Type: framework.TypeStringSlice,
- Description: `Issuing Certificates`,
- Required: required,
- },
- "crl_distribution_points": {
- Type: framework.TypeStringSlice,
- Description: `CRL Distribution Points`,
- Required: required,
- },
- "ocsp_servers": {
- Type: framework.TypeStringSlice,
- Description: `OCSP Servers`,
- Required: required,
- },
- "enable_aia_url_templating": {
- Type: framework.TypeBool,
- Description: `Whether or not templating is enabled for AIA fields`,
- Required: required,
- },
- }
-
- addEntPathIssuerResponseFields(fields)
-
- return fields
-}
-
func buildPathGetIssuer(b *backend, pattern string, displayAttrs *framework.DisplayAttributes) *framework.Path {
fields := map[string]*framework.FieldSchema{}
fields = addIssuerRefField(fields)
@@ -466,7 +454,6 @@ func respondReadIssuer(issuer *issuing.IssuerEntry) (*logical.Response, error) {
"crl_distribution_points": []string{},
"ocsp_servers": []string{},
}
- setEntIssuerData(data, issuer)
if issuer.Revoked {
data["revocation_time"] = issuer.RevocationTime
@@ -478,8 +465,6 @@ func respondReadIssuer(issuer *issuing.IssuerEntry) (*logical.Response, error) {
data["crl_distribution_points"] = issuer.AIAURIs.CRLDistributionPoints
data["ocsp_servers"] = issuer.AIAURIs.OCSPServers
data["enable_aia_url_templating"] = issuer.AIAURIs.EnableTemplating
- } else {
- data["enable_aia_url_templating"] = false
}
response := &logical.Response{
@@ -679,10 +664,6 @@ func (b *backend) pathUpdateIssuer(ctx context.Context, req *logical.Request, da
}
}
- if updateEntIssuerFields(issuer, data, false) {
- modified = true
- }
-
// Updating the chain should be the last modification as there's a chance
// it'll write it out to disk for us. We'd hate to then modify the issuer
// again and write it a second time.
@@ -988,10 +969,6 @@ func (b *backend) pathPatchIssuer(ctx context.Context, req *logical.Request, dat
}
}
- if updateEntIssuerFields(issuer, data, true) {
- modified = true
- }
-
if modified {
err := sc.writeIssuer(issuer)
if err != nil {
diff --git a/builtin/logical/pki/path_fetch_issuers_stubs_oss.go b/builtin/logical/pki/path_fetch_issuers_stubs_oss.go
deleted file mode 100644
index 813de757f5d7..000000000000
--- a/builtin/logical/pki/path_fetch_issuers_stubs_oss.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: BUSL-1.1
-
-//go:build !enterprise
-
-package pki
-
-import (
- "github.com/hashicorp/vault/builtin/logical/pki/issuing"
- "github.com/hashicorp/vault/sdk/framework"
-)
-
-//go:generate go run github.com/hashicorp/vault/tools/stubmaker
-
-func addEntPathIssuerFields(fields map[string]*framework.FieldSchema) {}
-func addEntPathIssuerResponseFields(fields map[string]*framework.FieldSchema) {}
-func setEntIssuerData(data map[string]any, issuer *issuing.IssuerEntry) {}
-
-func updateEntIssuerFields(issuer *issuing.IssuerEntry, data *framework.FieldData, ignoreNotPresent bool) bool {
- return false
-}
diff --git a/builtin/logical/pki/path_manage_issuers.go b/builtin/logical/pki/path_manage_issuers.go
index 23ff242c8dec..01c0d6653063 100644
--- a/builtin/logical/pki/path_manage_issuers.go
+++ b/builtin/logical/pki/path_manage_issuers.go
@@ -516,7 +516,6 @@ secret-keys.
func pathRevokeIssuer(b *backend) *framework.Path {
fields := addIssuerRefField(map[string]*framework.FieldSchema{})
- responseFields := issuerResponseFields(true)
return &framework.Path{
Pattern: "issuer/" + framework.GenericNameRegex(issuerRefParam) + "/revoke",
@@ -535,7 +534,83 @@ func pathRevokeIssuer(b *backend) *framework.Path {
Responses: map[int][]framework.Response{
http.StatusOK: {{
Description: "OK",
- Fields: responseFields,
+ Fields: map[string]*framework.FieldSchema{
+ "issuer_id": {
+ Type: framework.TypeString,
+ Description: `ID of the issuer`,
+ Required: true,
+ },
+ "issuer_name": {
+ Type: framework.TypeString,
+ Description: `Name of the issuer`,
+ Required: true,
+ },
+ "key_id": {
+ Type: framework.TypeString,
+ Description: `ID of the Key`,
+ Required: true,
+ },
+ "certificate": {
+ Type: framework.TypeString,
+ Description: `Certificate`,
+ Required: true,
+ },
+ "manual_chain": {
+ Type: framework.TypeCommaStringSlice,
+ Description: `Manual Chain`,
+ Required: true,
+ },
+ "ca_chain": {
+ Type: framework.TypeCommaStringSlice,
+ Description: `Certificate Authority Chain`,
+ Required: true,
+ },
+ "leaf_not_after_behavior": {
+ Type: framework.TypeString,
+ Description: ``,
+ Required: true,
+ },
+ "usage": {
+ Type: framework.TypeString,
+ Description: `Allowed usage`,
+ Required: true,
+ },
+ "revocation_signature_algorithm": {
+ Type: framework.TypeString,
+ Description: `Which signature algorithm to use when building CRLs`,
+ Required: true,
+ },
+ "revoked": {
+ Type: framework.TypeBool,
+ Description: `Whether the issuer was revoked`,
+ Required: true,
+ },
+ "issuing_certificates": {
+ Type: framework.TypeCommaStringSlice,
+ Description: `Specifies the URL values for the Issuing Certificate field`,
+ Required: true,
+ },
+ "crl_distribution_points": {
+ Type: framework.TypeStringSlice,
+ Description: `Specifies the URL values for the CRL Distribution Points field`,
+ Required: true,
+ },
+ "ocsp_servers": {
+ Type: framework.TypeStringSlice,
+ Description: `Specifies the URL values for the OCSP Servers field`,
+ Required: true,
+ },
+ "revocation_time": {
+ Type: framework.TypeInt64,
+ Description: `Time of revocation`,
+ Required: false,
+ },
+ "revocation_time_rfc3339": {
+ Type: framework.TypeTime,
+ Description: `RFC formatted time of revocation`,
+ Required: false,
+ },
+ },
}},
},
// Read more about why these flags are set in backend.go
diff --git a/builtin/logical/pki/path_tidy.go b/builtin/logical/pki/path_tidy.go
index e82cce13b991..5e7a4b037681 100644
--- a/builtin/logical/pki/path_tidy.go
+++ b/builtin/logical/pki/path_tidy.go
@@ -156,179 +156,6 @@ var defaultTidyConfig = tidyConfig{
CMPV2NonceStore: false,
}
-var tidyStatusResponseFields = map[string]*framework.FieldSchema{
- "safety_buffer": {
- Type: framework.TypeInt,
- Description: `Safety buffer time duration`,
- Required: true,
- },
- "issuer_safety_buffer": {
- Type: framework.TypeInt,
- Description: `Issuer safety buffer`,
- Required: true,
- },
- "revocation_queue_safety_buffer": {
- Type: framework.TypeInt,
- Description: `Revocation queue safety buffer`,
- Required: true,
- },
- "acme_account_safety_buffer": {
- Type: framework.TypeInt,
- Description: `Safety buffer after creation after which accounts lacking orders are revoked`,
- Required: false,
- },
- "tidy_cert_store": {
- Type: framework.TypeBool,
- Description: `Tidy certificate store`,
- Required: true,
- },
- "tidy_revoked_certs": {
- Type: framework.TypeBool,
- Description: `Tidy revoked certificates`,
- Required: true,
- },
- "tidy_revoked_cert_issuer_associations": {
- Type: framework.TypeBool,
- Description: `Tidy revoked certificate issuer associations`,
- Required: true,
- },
- "tidy_expired_issuers": {
- Type: framework.TypeBool,
- Description: `Tidy expired issuers`,
- Required: true,
- },
- "tidy_cross_cluster_revoked_certs": {
- Type: framework.TypeBool,
- Description: `Tidy the cross-cluster revoked certificate store`,
- Required: false,
- },
- "tidy_acme": {
- Type: framework.TypeBool,
- Description: `Tidy Unused Acme Accounts, and Orders`,
- Required: true,
- },
- "tidy_cert_metadata": {
- Type: framework.TypeBool,
- Description: `Tidy cert metadata`,
- Required: true,
- },
- "tidy_cmpv2_nonce_store": {
- Type: framework.TypeBool,
- Description: `Tidy CMPv2 nonce store`,
- Required: true,
- },
- "pause_duration": {
- Type: framework.TypeString,
- Description: `Duration to pause between tidying certificates`,
- Required: true,
- },
- "state": {
- Type: framework.TypeString,
- Description: `One of Inactive, Running, Finished, or Error`,
- Required: true,
- },
- "error": {
- Type: framework.TypeString,
- Description: `The error message`,
- Required: true,
- },
- "time_started": {
- Type: framework.TypeString,
- Description: `Time the operation started`,
- Required: true,
- },
- "time_finished": {
- Type: framework.TypeString,
- Description: `Time the operation finished`,
- Required: false,
- },
- "last_auto_tidy_finished": {
- Type: framework.TypeString,
- Description: `Time the last auto-tidy operation finished`,
- Required: true,
- },
- "message": {
- Type: framework.TypeString,
- Description: `Message of the operation`,
- Required: true,
- },
- "cert_store_deleted_count": {
- Type: framework.TypeInt,
- Description: `The number of certificate storage entries deleted`,
- Required: true,
- },
- "revoked_cert_deleted_count": {
- Type: framework.TypeInt,
- Description: `The number of revoked certificate entries deleted`,
- Required: true,
- },
- "current_cert_store_count": {
- Type: framework.TypeInt,
- Description: `The number of revoked certificate entries deleted`,
- Required: true,
- },
- "cross_revoked_cert_deleted_count": {
- Type: framework.TypeInt,
- Description: ``,
- Required: true,
- },
- "current_revoked_cert_count": {
- Type: framework.TypeInt,
- Description: `The number of revoked certificate entries deleted`,
- Required: true,
- },
- "revocation_queue_deleted_count": {
- Type: framework.TypeInt,
- Required: true,
- },
- "tidy_move_legacy_ca_bundle": {
- Type: framework.TypeBool,
- Required: true,
- },
- "tidy_revocation_queue": {
- Type: framework.TypeBool,
- Required: true,
- },
- "missing_issuer_cert_count": {
- Type: framework.TypeInt,
- Required: true,
- },
- "internal_backend_uuid": {
- Type: framework.TypeString,
- Required: true,
- },
- "total_acme_account_count": {
- Type: framework.TypeInt,
- Description: `Total number of acme accounts iterated over`,
- Required: false,
- },
- "acme_account_deleted_count": {
- Type: framework.TypeInt,
- Description: `The number of revoked acme accounts removed`,
- Required: false,
- },
- "acme_account_revoked_count": {
- Type: framework.TypeInt,
- Description: `The number of unused acme accounts revoked`,
- Required: false,
- },
- "acme_orders_deleted_count": {
- Type: framework.TypeInt,
- Description: `The number of expired, unused acme orders removed`,
- Required: false,
- },
- "cert_metadata_deleted_count": {
- Type: framework.TypeInt,
- Description: `The number of metadata entries removed`,
- Required: false,
- },
- "cmpv2_nonce_deleted_count": {
- Type: framework.TypeInt,
- Description: `The number of CMPv2 nonces removed`,
- Required: false,
- },
-}
-
func pathTidy(b *backend) *framework.Path {
return &framework.Path{
Pattern: "tidy$",
@@ -372,7 +199,177 @@ func pathTidyCancel(b *backend) *framework.Path {
Responses: map[int][]framework.Response{
http.StatusOK: {{
Description: "OK",
- Fields: tidyStatusResponseFields,
+ Fields: map[string]*framework.FieldSchema{
+ "safety_buffer": {
+ Type: framework.TypeInt,
+ Description: `Safety buffer time duration`,
+ Required: false,
+ },
+ "issuer_safety_buffer": {
+ Type: framework.TypeInt,
+ Description: `Issuer safety buffer`,
+ Required: false,
+ },
+ "revocation_queue_safety_buffer": {
+ Type: framework.TypeInt,
+ Description: `Revocation queue safety buffer`,
+ Required: true,
+ },
+ "tidy_cert_store": {
+ Type: framework.TypeBool,
+ Description: `Tidy certificate store`,
+ Required: false,
+ },
+ "tidy_revoked_certs": {
+ Type: framework.TypeBool,
+ Description: `Tidy revoked certificates`,
+ Required: false,
+ },
+ "tidy_revoked_cert_issuer_associations": {
+ Type: framework.TypeBool,
+ Description: `Tidy revoked certificate issuer associations`,
+ Required: false,
+ },
+ "tidy_acme": {
+ Type: framework.TypeBool,
+ Description: `Tidy Unused Acme Accounts, and Orders`,
+ Required: false,
+ },
+ "acme_account_safety_buffer": {
+ Type: framework.TypeInt,
+ Description: `Safety buffer after creation after which accounts lacking orders are revoked`,
+ Required: false,
+ },
+ "tidy_expired_issuers": {
+ Type: framework.TypeBool,
+ Description: `Tidy expired issuers`,
+ Required: false,
+ },
+ "tidy_cert_metadata": {
+ Type: framework.TypeBool,
+ Description: `Tidy cert metadata`,
+ Required: false,
+ },
+ "tidy_cmpv2_nonce_store": {
+ Type: framework.TypeBool,
+ Description: `Tidy CMPv2 nonce store`,
+ Required: false,
+ },
+ "pause_duration": {
+ Type: framework.TypeString,
+ Description: `Duration to pause between tidying certificates`,
+ Required: false,
+ },
+ "state": {
+ Type: framework.TypeString,
+ Description: `One of Inactive, Running, Finished, or Error`,
+ Required: false,
+ },
+ "error": {
+ Type: framework.TypeString,
+ Description: `The error message`,
+ Required: false,
+ },
+ "time_started": {
+ Type: framework.TypeString,
+ Description: `Time the operation started`,
+ Required: false,
+ },
+ "time_finished": {
+ Type: framework.TypeString,
+ Description: `Time the operation finished`,
+ Required: false,
+ },
+ "last_auto_tidy_finished": {
+ Type: framework.TypeString,
+ Description: `Time the last auto-tidy operation finished`,
+ Required: true,
+ },
+ "message": {
+ Type: framework.TypeString,
+ Description: `Message of the operation`,
+ Required: false,
+ },
+ "cert_store_deleted_count": {
+ Type: framework.TypeInt,
+ Description: `The number of certificate storage entries deleted`,
+ Required: false,
+ },
+ "revoked_cert_deleted_count": {
+ Type: framework.TypeInt,
+ Description: `The number of revoked certificate entries deleted`,
+ Required: false,
+ },
+ "current_cert_store_count": {
+ Type: framework.TypeInt,
+ Description: `The number of revoked certificate entries deleted`,
+ Required: false,
+ },
+ "current_revoked_cert_count": {
+ Type: framework.TypeInt,
+ Description: `The number of revoked certificate entries deleted`,
+ Required: false,
+ },
+ "missing_issuer_cert_count": {
+ Type: framework.TypeInt,
+ Required: false,
+ },
+ "tidy_move_legacy_ca_bundle": {
+ Type: framework.TypeBool,
+ Required: false,
+ },
+ "tidy_cross_cluster_revoked_certs": {
+ Type: framework.TypeBool,
+ Description: `Tidy the cross-cluster revoked certificate store`,
+ Required: false,
+ },
+ "tidy_revocation_queue": {
+ Type: framework.TypeBool,
+ Required: false,
+ },
+ "revocation_queue_deleted_count": {
+ Type: framework.TypeInt,
+ Required: false,
+ },
+ "cross_revoked_cert_deleted_count": {
+ Type: framework.TypeInt,
+ Required: false,
+ },
+ "internal_backend_uuid": {
+ Type: framework.TypeString,
+ Required: false,
+ },
+ "total_acme_account_count": {
+ Type: framework.TypeInt,
+ Description: `Total number of acme accounts iterated over`,
+ Required: false,
+ },
+ "acme_account_deleted_count": {
+ Type: framework.TypeInt,
+ Description: `The number of revoked acme accounts removed`,
+ Required: false,
+ },
+ "acme_account_revoked_count": {
+ Type: framework.TypeInt,
+ Description: `The number of unused acme accounts revoked`,
+ Required: false,
+ },
+ "acme_orders_deleted_count": {
+ Type: framework.TypeInt,
+ Description: `The number of expired, unused acme orders removed`,
+ Required: false,
+ },
+ "cert_metadata_deleted_count": {
+ Type: framework.TypeInt,
+ Description: `The number of metadata entries removed`,
+ Required: false,
+ },
+ "cmpv2_nonce_deleted_count": {
+ Type: framework.TypeInt,
+ Description: `The number of CMPv2 nonces removed`,
+ Required: false,
+ },
+ },
}},
},
ForwardPerformanceStandby: true,
@@ -399,7 +396,178 @@ func pathTidyStatus(b *backend) *framework.Path {
Responses: map[int][]framework.Response{
http.StatusOK: {{
Description: "OK",
- Fields: tidyStatusResponseFields,
+ Fields: map[string]*framework.FieldSchema{
+ "safety_buffer": {
+ Type: framework.TypeInt,
+ Description: `Safety buffer time duration`,
+ Required: true,
+ },
+ "issuer_safety_buffer": {
+ Type: framework.TypeInt,
+ Description: `Issuer safety buffer`,
+ Required: true,
+ },
+ "revocation_queue_safety_buffer": {
+ Type: framework.TypeInt,
+ Description: `Revocation queue safety buffer`,
+ Required: true,
+ },
+ "acme_account_safety_buffer": {
+ Type: framework.TypeInt,
+ Description: `Safety buffer after creation after which accounts lacking orders are revoked`,
+ Required: false,
+ },
+ "tidy_cert_store": {
+ Type: framework.TypeBool,
+ Description: `Tidy certificate store`,
+ Required: true,
+ },
+ "tidy_revoked_certs": {
+ Type: framework.TypeBool,
+ Description: `Tidy revoked certificates`,
+ Required: true,
+ },
+ "tidy_revoked_cert_issuer_associations": {
+ Type: framework.TypeBool,
+ Description: `Tidy revoked certificate issuer associations`,
+ Required: true,
+ },
+ "tidy_expired_issuers": {
+ Type: framework.TypeBool,
+ Description: `Tidy expired issuers`,
+ Required: true,
+ },
+ "tidy_cross_cluster_revoked_certs": {
+ Type: framework.TypeBool,
+ Description: `Tidy the cross-cluster revoked certificate store`,
+ Required: false,
+ },
+ "tidy_acme": {
+ Type: framework.TypeBool,
+ Description: `Tidy Unused Acme Accounts, and Orders`,
+ Required: true,
+ },
+ "tidy_cert_metadata": {
+ Type: framework.TypeBool,
+ Description: `Tidy cert metadata`,
+ Required: true,
+ },
+ "tidy_cmpv2_nonce_store": {
+ Type: framework.TypeBool,
+ Description: `Tidy CMPv2 nonce store`,
+ Required: true,
+ },
+ "pause_duration": {
+ Type: framework.TypeString,
+ Description: `Duration to pause between tidying certificates`,
+ Required: true,
+ },
+ "state": {
+ Type: framework.TypeString,
+ Description: `One of Inactive, Running, Finished, or Error`,
+ Required: true,
+ },
+ "error": {
+ Type: framework.TypeString,
+ Description: `The error message`,
+ Required: true,
+ },
+ "time_started": {
+ Type: framework.TypeString,
+ Description: `Time the operation started`,
+ Required: true,
+ },
+ "time_finished": {
+ Type: framework.TypeString,
+ Description: `Time the operation finished`,
+ Required: false,
+ },
+ "last_auto_tidy_finished": {
+ Type: framework.TypeString,
+ Description: `Time the last auto-tidy operation finished`,
+ Required: true,
+ },
+ "message": {
+ Type: framework.TypeString,
+ Description: `Message of the operation`,
+ Required: true,
+ },
+ "cert_store_deleted_count": {
+ Type: framework.TypeInt,
+ Description: `The number of certificate storage entries deleted`,
+ Required: true,
+ },
+ "revoked_cert_deleted_count": {
+ Type: framework.TypeInt,
+ Description: `The number of revoked certificate entries deleted`,
+ Required: true,
+ },
+ "current_cert_store_count": {
+ Type: framework.TypeInt,
+ Description: `The number of revoked certificate entries deleted`,
+ Required: true,
+ },
+ "cross_revoked_cert_deleted_count": {
+ Type: framework.TypeInt,
+ Description: ``,
+ Required: true,
+ },
+ "current_revoked_cert_count": {
+ Type: framework.TypeInt,
+ Description: `The number of revoked certificate entries deleted`,
+ Required: true,
+ },
+ "revocation_queue_deleted_count": {
+ Type: framework.TypeInt,
+ Required: true,
+ },
+ "tidy_move_legacy_ca_bundle": {
+ Type: framework.TypeBool,
+ Required: true,
+ },
+ "tidy_revocation_queue": {
+ Type: framework.TypeBool,
+ Required: true,
+ },
+ "missing_issuer_cert_count": {
+ Type: framework.TypeInt,
+ Required: true,
+ },
+ "internal_backend_uuid": {
+ Type: framework.TypeString,
+ Required: true,
+ },
+ "total_acme_account_count": {
+ Type: framework.TypeInt,
+ Description: `Total number of acme accounts iterated over`,
+ Required: false,
+ },
+ "acme_account_deleted_count": {
+ Type: framework.TypeInt,
+ Description: `The number of revoked acme accounts removed`,
+ Required: false,
+ },
+ "acme_account_revoked_count": {
+ Type: framework.TypeInt,
+ Description: `The number of unused acme accounts revoked`,
+ Required: false,
+ },
+ "acme_orders_deleted_count": {
+ Type: framework.TypeInt,
+ Description: `The number of expired, unused acme orders removed`,
+ Required: false,
+ },
+ "cert_metadata_deleted_count": {
+ Type: framework.TypeInt,
+ Description: `The number of metadata entries removed`,
+ Required: false,
+ },
+ "cmpv2_nonce_deleted_count": {
+ Type: framework.TypeInt,
+ Description: `The number of CMPv2 nonces removed`,
+ Required: false,
+ },
+ },
}},
},
ForwardPerformanceStandby: true,
@@ -1610,8 +1778,6 @@ func (b *backend) pathTidyStatusRead(_ context.Context, _ *logical.Request, _ *f
resp.Data["cmpv2_nonce_deleted_count"] = b.tidyStatus.cmpv2NonceDeletedCount
switch b.tidyStatus.state {
- case tidyStatusInactive:
- resp.Data["state"] = "Inactive"
case tidyStatusStarted:
resp.Data["state"] = "Running"
case tidyStatusFinished:
@@ -1846,7 +2012,7 @@ func (b *backend) tidyStatusStop(err error) {
b.tidyStatus.err = err
if err == nil {
b.tidyStatus.state = tidyStatusFinished
- } else if errors.Is(err, tidyCancelledError) {
+ } else if err == tidyCancelledError {
b.tidyStatus.state = tidyStatusCancelled
} else {
b.tidyStatus.state = tidyStatusError
diff --git a/builtin/logical/pki/pki_backend/crl_config.go b/builtin/logical/pki/pki_backend/crl_config.go
index 99ccf6f1208f..f37fbbbb3321 100644
--- a/builtin/logical/pki/pki_backend/crl_config.go
+++ b/builtin/logical/pki/pki_backend/crl_config.go
@@ -19,7 +19,6 @@ type CrlConfig struct {
UseGlobalQueue bool `json:"cross_cluster_revocation"`
UnifiedCRL bool `json:"unified_crl"`
UnifiedCRLOnExistingPaths bool `json:"unified_crl_on_existing_paths"`
- MaxCRLEntries int `json:"max_crl_entries"`
}
// Implicit default values for the config if it does not exist.
@@ -36,5 +35,4 @@ var DefaultCrlConfig = CrlConfig{
UseGlobalQueue: false,
UnifiedCRL: false,
UnifiedCRLOnExistingPaths: false,
- MaxCRLEntries: 100000,
}
diff --git a/builtin/logical/transit/api_utils.go b/builtin/logical/transit/api_utils.go
deleted file mode 100644
index 75ba53a2b220..000000000000
--- a/builtin/logical/transit/api_utils.go
+++ /dev/null
@@ -1,29 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: BUSL-1.1
-
-package transit
-
-import (
- "fmt"
-
- "github.com/hashicorp/vault/sdk/helper/keysutil"
-)
-
-// parsePaddingSchemeArg validate that the provided padding scheme argument received on the api can be used.
-func parsePaddingSchemeArg(keyType keysutil.KeyType, rawPs any) (keysutil.PaddingScheme, error) {
- ps, ok := rawPs.(string)
- if !ok {
- return "", fmt.Errorf("argument was not a string: %T", rawPs)
- }
-
- paddingScheme, err := keysutil.ParsePaddingScheme(ps)
- if err != nil {
- return "", err
- }
-
- if !keyType.PaddingSchemesSupported() {
- return "", fmt.Errorf("unsupported key type %s for padding scheme", keyType.String())
- }
-
- return paddingScheme, nil
-}
diff --git a/builtin/logical/transit/api_utils_test.go b/builtin/logical/transit/api_utils_test.go
deleted file mode 100644
index 96223a6c69cd..000000000000
--- a/builtin/logical/transit/api_utils_test.go
+++ /dev/null
@@ -1,52 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: BUSL-1.1
-
-package transit
-
-import (
- "testing"
-
- "github.com/hashicorp/vault/sdk/helper/keysutil"
-)
-
-// Test_parsePaddingSchemeArg validate the various use cases we have around parsing
-// the various padding_scheme arg possible values.
-func Test_parsePaddingSchemeArg(t *testing.T) {
- type args struct {
- keyType keysutil.KeyType
- rawPs any
- }
- tests := []struct {
- name string
- args args
- want keysutil.PaddingScheme
- wantErr bool
- }{
- // Error cases
- {name: "nil-ps", args: args{keyType: keysutil.KeyType_RSA2048, rawPs: nil}, wantErr: true},
- {name: "nonstring-ps", args: args{keyType: keysutil.KeyType_RSA2048, rawPs: 5}, wantErr: true},
- {name: "invalid-ps", args: args{keyType: keysutil.KeyType_RSA2048, rawPs: "unknown"}, wantErr: true},
- {name: "bad-keytype-oaep", args: args{keyType: keysutil.KeyType_AES128_CMAC, rawPs: "oaep"}, wantErr: true},
- {name: "bad-keytype-pkcs1", args: args{keyType: keysutil.KeyType_ECDSA_P256, rawPs: "pkcs1v15"}, wantErr: true},
- {name: "oaep-capped", args: args{keyType: keysutil.KeyType_RSA4096, rawPs: "OAEP"}, wantErr: true},
- {name: "pkcs1-whitespace", args: args{keyType: keysutil.KeyType_RSA3072, rawPs: " pkcs1v15 "}, wantErr: true},
-
- // Valid cases
- {name: "oaep-2048", args: args{keyType: keysutil.KeyType_RSA2048, rawPs: "oaep"}, want: keysutil.PaddingScheme_OAEP},
- {name: "oaep-3072", args: args{keyType: keysutil.KeyType_RSA3072, rawPs: "oaep"}, want: keysutil.PaddingScheme_OAEP},
- {name: "oaep-4096", args: args{keyType: keysutil.KeyType_RSA4096, rawPs: "oaep"}, want: keysutil.PaddingScheme_OAEP},
- {name: "pkcs1", args: args{keyType: keysutil.KeyType_RSA3072, rawPs: "pkcs1v15"}, want: keysutil.PaddingScheme_PKCS1v15},
- }
- for _, tt := range tests {
- t.Run(tt.name, func(t *testing.T) {
- got, err := parsePaddingSchemeArg(tt.args.keyType, tt.args.rawPs)
- if (err != nil) != tt.wantErr {
- t.Errorf("parsePaddingSchemeArg() error = %v, wantErr %v", err, tt.wantErr)
- return
- }
- if got != tt.want {
- t.Errorf("parsePaddingSchemeArg() got = %v, want %v", got, tt.want)
- }
- })
- }
-}
diff --git a/builtin/logical/transit/backend_test.go b/builtin/logical/transit/backend_test.go
index ff3afb1189c6..528ccf68218c 100644
--- a/builtin/logical/transit/backend_test.go
+++ b/builtin/logical/transit/backend_test.go
@@ -148,96 +148,83 @@ func testTransit_RSA(t *testing.T, keyType string) {
plaintext := "dGhlIHF1aWNrIGJyb3duIGZveA==" // "the quick brown fox"
- for _, padding := range []keysutil.PaddingScheme{keysutil.PaddingScheme_OAEP, keysutil.PaddingScheme_PKCS1v15, ""} {
- encryptReq := &logical.Request{
- Path: "encrypt/rsa",
- Operation: logical.UpdateOperation,
- Storage: storage,
- Data: map[string]interface{}{
- "plaintext": plaintext,
- },
- }
-
- if padding != "" {
- encryptReq.Data["padding_scheme"] = padding
- }
-
- resp, err = b.HandleRequest(context.Background(), encryptReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("bad: err: %v\nresp: %#v", err, resp)
- }
+ encryptReq := &logical.Request{
+ Path: "encrypt/rsa",
+ Operation: logical.UpdateOperation,
+ Storage: storage,
+ Data: map[string]interface{}{
+ "plaintext": plaintext,
+ },
+ }
- ciphertext1 := resp.Data["ciphertext"].(string)
+ resp, err = b.HandleRequest(context.Background(), encryptReq)
+ if err != nil || (resp != nil && resp.IsError()) {
+ t.Fatalf("bad: err: %v\nresp: %#v", err, resp)
+ }
- decryptReq := &logical.Request{
- Path: "decrypt/rsa",
- Operation: logical.UpdateOperation,
- Storage: storage,
- Data: map[string]interface{}{
- "ciphertext": ciphertext1,
- },
- }
- if padding != "" {
- decryptReq.Data["padding_scheme"] = padding
- }
+ ciphertext1 := resp.Data["ciphertext"].(string)
- resp, err = b.HandleRequest(context.Background(), decryptReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("bad: err: %v\nresp: %#v", err, resp)
- }
+ decryptReq := &logical.Request{
+ Path: "decrypt/rsa",
+ Operation: logical.UpdateOperation,
+ Storage: storage,
+ Data: map[string]interface{}{
+ "ciphertext": ciphertext1,
+ },
+ }
- decryptedPlaintext := resp.Data["plaintext"]
+ resp, err = b.HandleRequest(context.Background(), decryptReq)
+ if err != nil || (resp != nil && resp.IsError()) {
+ t.Fatalf("bad: err: %v\nresp: %#v", err, resp)
+ }
- if plaintext != decryptedPlaintext {
- t.Fatalf("bad: plaintext; expected: %q\nactual: %q", plaintext, decryptedPlaintext)
- }
+ decryptedPlaintext := resp.Data["plaintext"]
- // Rotate the key
- rotateReq := &logical.Request{
- Path: "keys/rsa/rotate",
- Operation: logical.UpdateOperation,
- Storage: storage,
- }
- resp, err = b.HandleRequest(context.Background(), rotateReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("bad: err: %v\nresp: %#v", err, resp)
- }
+ if plaintext != decryptedPlaintext {
+ t.Fatalf("bad: plaintext; expected: %q\nactual: %q", plaintext, decryptedPlaintext)
+ }
- // Encrypt again
- resp, err = b.HandleRequest(context.Background(), encryptReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("bad: err: %v\nresp: %#v", err, resp)
- }
- ciphertext2 := resp.Data["ciphertext"].(string)
+ // Rotate the key
+ rotateReq := &logical.Request{
+ Path: "keys/rsa/rotate",
+ Operation: logical.UpdateOperation,
+ Storage: storage,
+ }
+ resp, err = b.HandleRequest(context.Background(), rotateReq)
+ if err != nil || (resp != nil && resp.IsError()) {
+ t.Fatalf("bad: err: %v\nresp: %#v", err, resp)
+ }
- if ciphertext1 == ciphertext2 {
- t.Fatalf("expected different ciphertexts")
- }
+ // Encrypt again
+ resp, err = b.HandleRequest(context.Background(), encryptReq)
+ if err != nil || (resp != nil && resp.IsError()) {
+ t.Fatalf("bad: err: %v\nresp: %#v", err, resp)
+ }
+ ciphertext2 := resp.Data["ciphertext"].(string)
- // See if the older ciphertext can still be decrypted
- resp, err = b.HandleRequest(context.Background(), decryptReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("bad: err: %v\nresp: %#v", err, resp)
- }
- if resp.Data["plaintext"].(string) != plaintext {
- t.Fatal("failed to decrypt old ciphertext after rotating the key")
- }
+ if ciphertext1 == ciphertext2 {
+ t.Fatalf("expected different ciphertexts")
+ }
- // Decrypt the new ciphertext
- decryptReq.Data = map[string]interface{}{
- "ciphertext": ciphertext2,
- }
- if padding != "" {
- decryptReq.Data["padding_scheme"] = padding
- }
+ // See if the older ciphertext can still be decrypted
+ resp, err = b.HandleRequest(context.Background(), decryptReq)
+ if err != nil || (resp != nil && resp.IsError()) {
+ t.Fatalf("bad: err: %v\nresp: %#v", err, resp)
+ }
+ if resp.Data["plaintext"].(string) != plaintext {
+ t.Fatal("failed to decrypt old ciphertext after rotating the key")
+ }
- resp, err = b.HandleRequest(context.Background(), decryptReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("bad: err: %v\nresp: %#v", err, resp)
- }
- if resp.Data["plaintext"].(string) != plaintext {
- t.Fatal("failed to decrypt ciphertext after rotating the key")
- }
+ // Decrypt the new ciphertext
+ decryptReq.Data = map[string]interface{}{
+ "ciphertext": ciphertext2,
+ }
+ resp, err = b.HandleRequest(context.Background(), decryptReq)
+ if err != nil || (resp != nil && resp.IsError()) {
+ t.Fatalf("bad: err: %v\nresp: %#v", err, resp)
+ }
+ if resp.Data["plaintext"].(string) != plaintext {
+ t.Fatal("failed to decrypt ciphertext after rotating the key")
}
signReq := &logical.Request{
diff --git a/builtin/logical/transit/path_datakey.go b/builtin/logical/transit/path_datakey.go
index 47969673d72b..53aff54690bb 100644
--- a/builtin/logical/transit/path_datakey.go
+++ b/builtin/logical/transit/path_datakey.go
@@ -39,12 +39,6 @@ func (b *backend) pathDatakey() *framework.Path {
ciphertext; "wrapped" will return the ciphertext only.`,
},
- "padding_scheme": {
- Type: framework.TypeString,
- Description: `The padding scheme to use for decrypt. Currently only applies to RSA key types.
-Options are 'oaep' or 'pkcs1v15'. Defaults to 'oaep'`,
- },
-
"context": {
Type: framework.TypeString,
Description: "Context for key derivation. Required for derived keys.",
@@ -148,31 +142,23 @@ func (b *backend) pathDatakeyWrite(ctx context.Context, req *logical.Request, d
return nil, err
}
- factories := make([]any, 0)
- if ps, ok := d.GetOk("padding_scheme"); ok {
- paddingScheme, err := parsePaddingSchemeArg(p.Type, ps)
- if err != nil {
- return logical.ErrorResponse(fmt.Sprintf("padding_scheme argument invalid: %s", err.Error())), logical.ErrInvalidRequest
- }
- factories = append(factories, paddingScheme)
-
- }
+ var managedKeyFactory ManagedKeyFactory
if p.Type == keysutil.KeyType_MANAGED_KEY {
managedKeySystemView, ok := b.System().(logical.ManagedKeySystemView)
if !ok {
return nil, errors.New("unsupported system view")
}
- factories = append(factories, ManagedKeyFactory{
+ managedKeyFactory = ManagedKeyFactory{
managedKeyParams: keysutil.ManagedKeyParameters{
ManagedKeySystemView: managedKeySystemView,
BackendUUID: b.backendUUID,
Context: ctx,
},
- })
+ }
}
- ciphertext, err := p.EncryptWithFactory(ver, context, nonce, base64.StdEncoding.EncodeToString(newKey), factories...)
+ ciphertext, err := p.EncryptWithFactory(ver, context, nonce, base64.StdEncoding.EncodeToString(newKey), nil, managedKeyFactory)
if err != nil {
switch err.(type) {
case errutil.UserError:
diff --git a/builtin/logical/transit/path_datakey_test.go b/builtin/logical/transit/path_datakey_test.go
deleted file mode 100644
index 2207419f84e4..000000000000
--- a/builtin/logical/transit/path_datakey_test.go
+++ /dev/null
@@ -1,125 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: BUSL-1.1
-
-package transit
-
-import (
- "context"
- "testing"
-
- "github.com/hashicorp/vault/sdk/logical"
- "github.com/mitchellh/mapstructure"
- "github.com/stretchr/testify/require"
-)
-
-// TestDataKeyWithPaddingScheme validates that we properly leverage padding scheme
-// args for the returned keys
-func TestDataKeyWithPaddingScheme(t *testing.T) {
- b, s := createBackendWithStorage(t)
- keyName := "test"
- createKeyReq := &logical.Request{
- Operation: logical.UpdateOperation,
- Path: "keys/" + keyName,
- Storage: s,
- Data: map[string]interface{}{
- "type": "rsa-2048",
- },
- }
-
- resp, err := b.HandleRequest(context.Background(), createKeyReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("failed key creation: err: %v resp: %#v", err, resp)
- }
-
- tests := []struct {
- Name string
- PaddingScheme string
- DecryptPaddingScheme string
- ShouldFailToDecrypt bool
- }{
- {"no-padding-scheme", "", "", false},
- {"oaep", "oaep", "oaep", false},
- {"pkcs1v15", "pkcs1v15", "pkcs1v15", false},
- {"mixed-should-fail", "pkcs1v15", "oaep", true},
- {"mixed-based-on-default-should-fail", "", "pkcs1v15", true},
- }
- for _, tc := range tests {
- t.Run(tc.Name, func(t *testing.T) {
- dataKeyReq := &logical.Request{
- Operation: logical.UpdateOperation,
- Path: "datakey/wrapped/" + keyName,
- Storage: s,
- Data: map[string]interface{}{},
- }
- if len(tc.PaddingScheme) > 0 {
- dataKeyReq.Data["padding_scheme"] = tc.PaddingScheme
- }
-
- resp, err = b.HandleRequest(context.Background(), dataKeyReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("failed data key api: err: %v resp: %#v", err, resp)
- }
- require.NotNil(t, resp, "Got nil nil response")
- var d struct {
- Ciphertext string `mapstructure:"ciphertext"`
- }
- err = mapstructure.Decode(resp.Data, &d)
- require.NoError(t, err, "failed decoding datakey api response")
- require.NotEmpty(t, d.Ciphertext, "ciphertext should not be empty")
-
- // Attempt to decrypt with data key with the same padding scheme
- decryptReq := &logical.Request{
- Operation: logical.UpdateOperation,
- Path: "decrypt/" + keyName,
- Storage: s,
- Data: map[string]interface{}{
- "ciphertext": d.Ciphertext,
- },
- }
- if len(tc.DecryptPaddingScheme) > 0 {
- decryptReq.Data["padding_scheme"] = tc.DecryptPaddingScheme
- }
-
- resp, err = b.HandleRequest(context.Background(), decryptReq)
- if tc.ShouldFailToDecrypt {
- require.Error(t, err, "Should have failed decryption as padding schemes are mixed")
- } else {
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("failed to decrypt data key: err: %v resp: %#v", err, resp)
- }
- }
- })
- }
-}
-
-// TestDataKeyWithPaddingSchemeInvalidKeyType validates we fail when we specify a
-// padding_scheme value on an invalid key type (non-RSA)
-func TestDataKeyWithPaddingSchemeInvalidKeyType(t *testing.T) {
- b, s := createBackendWithStorage(t)
- keyName := "test"
- createKeyReq := &logical.Request{
- Operation: logical.UpdateOperation,
- Path: "keys/" + keyName,
- Storage: s,
- Data: map[string]interface{}{},
- }
-
- resp, err := b.HandleRequest(context.Background(), createKeyReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("failed key creation: err: %v resp: %#v", err, resp)
- }
-
- dataKeyReq := &logical.Request{
- Operation: logical.UpdateOperation,
- Path: "datakey/wrapped/" + keyName,
- Storage: s,
- Data: map[string]interface{}{
- "padding_scheme": "oaep",
- },
- }
-
- resp, err = b.HandleRequest(context.Background(), dataKeyReq)
- require.ErrorContains(t, err, "invalid request")
- require.NotNil(t, resp, "response should not be nil")
- require.Contains(t, resp.Error().Error(), "padding_scheme argument invalid: unsupported key")
-}
diff --git a/builtin/logical/transit/path_decrypt.go b/builtin/logical/transit/path_decrypt.go
index dc1bcbf608ce..1daf74daf5d1 100644
--- a/builtin/logical/transit/path_decrypt.go
+++ b/builtin/logical/transit/path_decrypt.go
@@ -50,12 +50,6 @@ func (b *backend) pathDecrypt() *framework.Path {
The ciphertext to decrypt, provided as returned by encrypt.`,
},
- "padding_scheme": {
- Type: framework.TypeString,
- Description: `The padding scheme to use for decrypt. Currently only applies to RSA key types.
-Options are 'oaep' or 'pkcs1v15'. Defaults to 'oaep'`,
- },
-
"context": {
Type: framework.TypeString,
Description: `
@@ -136,9 +130,6 @@ func (b *backend) pathDecryptWrite(ctx context.Context, req *logical.Request, d
Nonce: d.Get("nonce").(string),
AssociatedData: d.Get("associated_data").(string),
}
- if ps, ok := d.GetOk("padding_scheme"); ok {
- batchInputItems[0].PaddingScheme = ps.(string)
- }
}
batchResponseItems := make([]DecryptBatchResponseItem, len(batchInputItems))
@@ -201,40 +192,33 @@ func (b *backend) pathDecryptWrite(ctx context.Context, req *logical.Request, d
continue
}
- var factories []any
- if item.PaddingScheme != "" {
- paddingScheme, err := parsePaddingSchemeArg(p.Type, item.PaddingScheme)
- if err != nil {
- batchResponseItems[i].Error = fmt.Sprintf("'[%d].padding_scheme' invalid: %s", i, err.Error())
- continue
- }
- factories = append(factories, paddingScheme)
- }
+ var factory interface{}
if item.AssociatedData != "" {
if !p.Type.AssociatedDataSupported() {
batchResponseItems[i].Error = fmt.Sprintf("'[%d].associated_data' provided for non-AEAD cipher suite %v", i, p.Type.String())
continue
}
- factories = append(factories, AssocDataFactory{item.AssociatedData})
+ factory = AssocDataFactory{item.AssociatedData}
}
+ var managedKeyFactory ManagedKeyFactory
if p.Type == keysutil.KeyType_MANAGED_KEY {
managedKeySystemView, ok := b.System().(logical.ManagedKeySystemView)
if !ok {
batchResponseItems[i].Error = errors.New("unsupported system view").Error()
}
- factories = append(factories, ManagedKeyFactory{
+ managedKeyFactory = ManagedKeyFactory{
managedKeyParams: keysutil.ManagedKeyParameters{
ManagedKeySystemView: managedKeySystemView,
BackendUUID: b.backendUUID,
Context: ctx,
},
- })
+ }
}
- plaintext, err := p.DecryptWithFactory(item.DecodedContext, item.DecodedNonce, item.Ciphertext, factories...)
+ plaintext, err := p.DecryptWithFactory(item.DecodedContext, item.DecodedNonce, item.Ciphertext, factory, managedKeyFactory)
if err != nil {
switch err.(type) {
case errutil.InternalError:
diff --git a/builtin/logical/transit/path_encrypt.go b/builtin/logical/transit/path_encrypt.go
index c0502db4292a..38c618f9b363 100644
--- a/builtin/logical/transit/path_encrypt.go
+++ b/builtin/logical/transit/path_encrypt.go
@@ -34,9 +34,6 @@ type BatchRequestItem struct {
// Ciphertext for decryption
Ciphertext string `json:"ciphertext" structs:"ciphertext" mapstructure:"ciphertext"`
- // PaddingScheme for encryption/decryption
- PaddingScheme string `json:"padding_scheme" structs:"padding_scheme" mapstructure:"padding_scheme"`
-
// Nonce to be used when v1 convergent encryption is used
Nonce string `json:"nonce" structs:"nonce" mapstructure:"nonce"`
@@ -108,12 +105,6 @@ func (b *backend) pathEncrypt() *framework.Path {
Description: "Base64 encoded plaintext value to be encrypted",
},
- "padding_scheme": {
- Type: framework.TypeString,
- Description: `The padding scheme to use for decrypt. Currently only applies to RSA key types.
-Options are 'oaep' or 'pkcs1v15'. Defaults to 'oaep'`,
- },
-
"context": {
Type: framework.TypeString,
Description: "Base64 encoded context for key derivation. Required if key derivation is enabled",
@@ -268,13 +259,6 @@ func decodeBatchRequestItems(src interface{}, requirePlaintext bool, requireCiph
} else if requirePlaintext {
errs.Errors = append(errs.Errors, fmt.Sprintf("'[%d].plaintext' missing plaintext to encrypt", i))
}
- if v, has := item["padding_scheme"]; has {
- if casted, ok := v.(string); ok {
- (*dst)[i].PaddingScheme = casted
- } else {
- errs.Errors = append(errs.Errors, fmt.Sprintf("'[%d].padding_scheme' expected type 'string', got unconvertible type '%T'", i, item["padding_scheme"]))
- }
- }
if v, has := item["nonce"]; has {
if !reflect.ValueOf(v).IsValid() {
@@ -374,13 +358,6 @@ func (b *backend) pathEncryptWrite(ctx context.Context, req *logical.Request, d
KeyVersion: d.Get("key_version").(int),
AssociatedData: d.Get("associated_data").(string),
}
- if psRaw, ok := d.GetOk("padding_scheme"); ok {
- if ps, ok := psRaw.(string); ok {
- batchInputItems[0].PaddingScheme = ps
- } else {
- return logical.ErrorResponse("padding_scheme was not a string"), logical.ErrInvalidRequest
- }
- }
}
batchResponseItems := make([]EncryptBatchResponseItem, len(batchInputItems))
@@ -458,12 +435,6 @@ func (b *backend) pathEncryptWrite(ctx context.Context, req *logical.Request, d
polReq.KeyType = keysutil.KeyType_AES256_GCM96
case "chacha20-poly1305":
polReq.KeyType = keysutil.KeyType_ChaCha20_Poly1305
- case "rsa-2048":
- polReq.KeyType = keysutil.KeyType_RSA2048
- case "rsa-3072":
- polReq.KeyType = keysutil.KeyType_RSA3072
- case "rsa-4096":
- polReq.KeyType = keysutil.KeyType_RSA4096
case "ecdsa-p256", "ecdsa-p384", "ecdsa-p521":
return logical.ErrorResponse(fmt.Sprintf("key type %v not supported for this operation", keyType)), logical.ErrInvalidRequest
case "managed_key":
@@ -511,40 +482,33 @@ func (b *backend) pathEncryptWrite(ctx context.Context, req *logical.Request, d
warnAboutNonceUsage = true
}
- var factories []any
- if item.PaddingScheme != "" {
- paddingScheme, err := parsePaddingSchemeArg(p.Type, item.PaddingScheme)
- if err != nil {
- batchResponseItems[i].Error = fmt.Sprintf("'[%d].padding_scheme' invalid: %s", i, err.Error())
- continue
- }
- factories = append(factories, paddingScheme)
- }
+ var factory interface{}
if item.AssociatedData != "" {
if !p.Type.AssociatedDataSupported() {
batchResponseItems[i].Error = fmt.Sprintf("'[%d].associated_data' provided for non-AEAD cipher suite %v", i, p.Type.String())
continue
}
- factories = append(factories, AssocDataFactory{item.AssociatedData})
+ factory = AssocDataFactory{item.AssociatedData}
}
+ var managedKeyFactory ManagedKeyFactory
if p.Type == keysutil.KeyType_MANAGED_KEY {
managedKeySystemView, ok := b.System().(logical.ManagedKeySystemView)
if !ok {
batchResponseItems[i].Error = errors.New("unsupported system view").Error()
}
- factories = append(factories, ManagedKeyFactory{
+ managedKeyFactory = ManagedKeyFactory{
managedKeyParams: keysutil.ManagedKeyParameters{
ManagedKeySystemView: managedKeySystemView,
BackendUUID: b.backendUUID,
Context: ctx,
},
- })
+ }
}
- ciphertext, err := p.EncryptWithFactory(item.KeyVersion, item.DecodedContext, item.DecodedNonce, item.Plaintext, factories...)
+ ciphertext, err := p.EncryptWithFactory(item.KeyVersion, item.DecodedContext, item.DecodedNonce, item.Plaintext, factory, managedKeyFactory)
if err != nil {
switch err.(type) {
case errutil.InternalError:
diff --git a/builtin/logical/transit/path_rewrap.go b/builtin/logical/transit/path_rewrap.go
index ea5f8cccd4a7..49b69c7255e1 100644
--- a/builtin/logical/transit/path_rewrap.go
+++ b/builtin/logical/transit/path_rewrap.go
@@ -19,39 +19,6 @@ import (
var ErrNonceNotAllowed = errors.New("provided nonce not allowed for this key")
-type RewrapBatchRequestItem struct {
- // Context for key derivation. This is required for derived keys.
- Context string `json:"context" structs:"context" mapstructure:"context"`
-
- // DecodedContext is the base64 decoded version of Context
- DecodedContext []byte
-
- // Ciphertext for decryption
- Ciphertext string `json:"ciphertext" structs:"ciphertext" mapstructure:"ciphertext"`
-
- // Nonce to be used when v1 convergent encryption is used
- Nonce string `json:"nonce" structs:"nonce" mapstructure:"nonce"`
-
- // The key version to be used for encryption
- KeyVersion int `json:"key_version" structs:"key_version" mapstructure:"key_version"`
-
- // DecodedNonce is the base64 decoded version of Nonce
- DecodedNonce []byte
-
- // Associated Data for AEAD ciphers
- AssociatedData string `json:"associated_data" struct:"associated_data" mapstructure:"associated_data"`
-
- // Reference is an arbitrary caller supplied string value that will be placed on the
- // batch response to ease correlation between inputs and outputs
- Reference string `json:"reference" structs:"reference" mapstructure:"reference"`
-
- // EncryptPaddingScheme specifies the RSA padding scheme for encryption
- EncryptPaddingScheme string `json:"encrypt_padding_scheme" structs:"encrypt_padding_scheme" mapstructure:"encrypt_padding_scheme"`
-
- // DecryptPaddingScheme specifies the RSA padding scheme for decryption
- DecryptPaddingScheme string `json:"decrypt_padding_scheme" structs:"decrypt_padding_scheme" mapstructure:"decrypt_padding_scheme"`
-}
-
func (b *backend) pathRewrap() *framework.Path {
return &framework.Path{
Pattern: "rewrap/" + framework.GenericNameRegex("name"),
@@ -72,18 +39,6 @@ func (b *backend) pathRewrap() *framework.Path {
Description: "Ciphertext value to rewrap",
},
- "encrypt_padding_scheme": {
- Type: framework.TypeString,
- Description: `The padding scheme to use for rewrap's encrypt step. Currently only applies to RSA key types.
-Options are 'oaep' or 'pkcs1v15'. Defaults to 'oaep'`,
- },
-
- "decrypt_padding_scheme": {
- Type: framework.TypeString,
- Description: `The padding scheme to use for rewrap's decrypt step. Currently only applies to RSA key types.
-Options are 'oaep' or 'pkcs1v15'. Defaults to 'oaep'`,
- },
-
"context": {
Type: framework.TypeString,
Description: "Base64 encoded context for key derivation. Required for derived keys.",
@@ -121,7 +76,7 @@ Any batch output will preserve the order of the batch input.`,
func (b *backend) pathRewrapWrite(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {
batchInputRaw := d.Raw["batch_input"]
- var batchInputItems []RewrapBatchRequestItem
+ var batchInputItems []BatchRequestItem
var err error
if batchInputRaw != nil {
err = mapstructure.Decode(batchInputRaw, &batchInputItems)
@@ -138,19 +93,13 @@ func (b *backend) pathRewrapWrite(ctx context.Context, req *logical.Request, d *
return logical.ErrorResponse("missing ciphertext to decrypt"), logical.ErrInvalidRequest
}
- batchInputItems = make([]RewrapBatchRequestItem, 1)
- batchInputItems[0] = RewrapBatchRequestItem{
+ batchInputItems = make([]BatchRequestItem, 1)
+ batchInputItems[0] = BatchRequestItem{
Ciphertext: ciphertext,
Context: d.Get("context").(string),
Nonce: d.Get("nonce").(string),
KeyVersion: d.Get("key_version").(int),
}
- if ps, ok := d.GetOk("decrypt_padding_scheme"); ok {
- batchInputItems[0].DecryptPaddingScheme = ps.(string)
- }
- if ps, ok := d.GetOk("encrypt_padding_scheme"); ok {
- batchInputItems[0].EncryptPaddingScheme = ps.(string)
- }
}
batchResponseItems := make([]EncryptBatchResponseItem, len(batchInputItems))
@@ -207,21 +156,12 @@ func (b *backend) pathRewrapWrite(ctx context.Context, req *logical.Request, d *
continue
}
- var factories []any
- if item.DecryptPaddingScheme != "" {
- paddingScheme, err := parsePaddingSchemeArg(p.Type, item.DecryptPaddingScheme)
- if err != nil {
- batchResponseItems[i].Error = fmt.Sprintf("'[%d].decrypt_padding_scheme' invalid: %s", i, err.Error())
- continue
- }
- factories = append(factories, paddingScheme)
- }
if item.Nonce != "" && !nonceAllowed(p) {
batchResponseItems[i].Error = ErrNonceNotAllowed.Error()
continue
}
- plaintext, err := p.DecryptWithFactory(item.DecodedContext, item.DecodedNonce, item.Ciphertext, factories...)
+ plaintext, err := p.Decrypt(item.DecodedContext, item.DecodedNonce, item.Ciphertext)
if err != nil {
switch err.(type) {
case errutil.UserError:
@@ -232,21 +172,11 @@ func (b *backend) pathRewrapWrite(ctx context.Context, req *logical.Request, d *
}
}
- factories = make([]any, 0)
- if item.EncryptPaddingScheme != "" {
- paddingScheme, err := parsePaddingSchemeArg(p.Type, item.EncryptPaddingScheme)
- if err != nil {
- batchResponseItems[i].Error = fmt.Sprintf("'[%d].encrypt_padding_scheme' invalid: %s", i, err.Error())
- continue
- }
- factories = append(factories, paddingScheme)
- factories = append(factories, keysutil.PaddingScheme(item.EncryptPaddingScheme))
- }
if !warnAboutNonceUsage && shouldWarnAboutNonceUsage(p, item.DecodedNonce) {
warnAboutNonceUsage = true
}
- ciphertext, err := p.EncryptWithFactory(item.KeyVersion, item.DecodedContext, item.DecodedNonce, plaintext, factories...)
+ ciphertext, err := p.Encrypt(item.KeyVersion, item.DecodedContext, item.DecodedNonce, plaintext)
if err != nil {
switch err.(type) {
case errutil.UserError:
diff --git a/builtin/logical/transit/path_rewrap_test.go b/builtin/logical/transit/path_rewrap_test.go
index 4018d63ae8fa..55f28874656e 100644
--- a/builtin/logical/transit/path_rewrap_test.go
+++ b/builtin/logical/transit/path_rewrap_test.go
@@ -326,116 +326,3 @@ func TestTransit_BatchRewrapCase3(t *testing.T) {
}
}
-
-// TestTransit_BatchRewrapCase4 batch rewrap leveraging RSA padding schemes
-func TestTransit_BatchRewrapCase4(t *testing.T) {
- var resp *logical.Response
- var err error
-
- b, s := createBackendWithStorage(t)
-
- batchEncryptionInput := []interface{}{
- map[string]interface{}{"plaintext": "dmlzaGFsCg==", "reference": "ek", "padding_scheme": "pkcs1v15"},
- map[string]interface{}{"plaintext": "dGhlIHF1aWNrIGJyb3duIGZveA==", "reference": "do", "padding_scheme": "pkcs1v15"},
- }
- batchEncryptionData := map[string]interface{}{
- "type": "rsa-2048",
- "batch_input": batchEncryptionInput,
- }
- batchReq := &logical.Request{
- Operation: logical.CreateOperation,
- Path: "encrypt/upserted_key",
- Storage: s,
- Data: batchEncryptionData,
- }
- resp, err = b.HandleRequest(context.Background(), batchReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("err:%v resp:%#v", err, resp)
- }
-
- batchEncryptionResponseItems := resp.Data["batch_results"].([]EncryptBatchResponseItem)
-
- batchRewrapInput := make([]interface{}, len(batchEncryptionResponseItems))
- for i, item := range batchEncryptionResponseItems {
- batchRewrapInput[i] = map[string]interface{}{
- "ciphertext": item.Ciphertext,
- "reference": item.Reference,
- "decrypt_padding_scheme": "pkcs1v15",
- "encrypt_padding_scheme": "oaep",
- }
- }
-
- batchRewrapData := map[string]interface{}{
- "batch_input": batchRewrapInput,
- }
-
- rotateReq := &logical.Request{
- Operation: logical.UpdateOperation,
- Path: "keys/upserted_key/rotate",
- Storage: s,
- }
- resp, err = b.HandleRequest(context.Background(), rotateReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("err:%v resp:%#v", err, resp)
- }
-
- rewrapReq := &logical.Request{
- Operation: logical.UpdateOperation,
- Path: "rewrap/upserted_key",
- Storage: s,
- Data: batchRewrapData,
- }
-
- resp, err = b.HandleRequest(context.Background(), rewrapReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("err:%v resp:%#v", err, resp)
- }
-
- batchRewrapResponseItems := resp.Data["batch_results"].([]EncryptBatchResponseItem)
-
- if len(batchRewrapResponseItems) != len(batchEncryptionResponseItems) {
- t.Fatalf("bad: length of input and output or rewrap are not matching; expected: %d, actual: %d", len(batchEncryptionResponseItems), len(batchRewrapResponseItems))
- }
-
- decReq := &logical.Request{
- Operation: logical.UpdateOperation,
- Path: "decrypt/upserted_key",
- Storage: s,
- }
-
- for i, eItem := range batchEncryptionResponseItems {
- rItem := batchRewrapResponseItems[i]
-
- inputRef := batchEncryptionInput[i].(map[string]interface{})["reference"]
- if eItem.Reference != inputRef {
- t.Fatalf("bad: reference mismatch. Expected %s, Actual: %s", inputRef, eItem.Reference)
- }
-
- if eItem.Ciphertext == rItem.Ciphertext {
- t.Fatalf("bad: rewrap input and output are the same")
- }
-
- if !strings.HasPrefix(rItem.Ciphertext, "vault:v2") {
- t.Fatalf("bad: invalid version of ciphertext in rewrap response; expected: 'vault:v2', actual: %s", rItem.Ciphertext)
- }
-
- if rItem.KeyVersion != 2 {
- t.Fatalf("unexpected key version; got: %d, expected: %d", rItem.KeyVersion, 2)
- }
-
- decReq.Data = map[string]interface{}{
- "ciphertext": rItem.Ciphertext,
- }
-
- resp, err = b.HandleRequest(context.Background(), decReq)
- if err != nil || (resp != nil && resp.IsError()) {
- t.Fatalf("err:%v resp:%#v", err, resp)
- }
-
- plaintext1 := "dGhlIHF1aWNrIGJyb3duIGZveA=="
- plaintext2 := "dmlzaGFsCg=="
- if resp.Data["plaintext"] != plaintext1 && resp.Data["plaintext"] != plaintext2 {
- t.Fatalf("bad: plaintext. Expected: %q or %q, Actual: %q", plaintext1, plaintext2, resp.Data["plaintext"])
- }
- }
-}
diff --git a/builtin/logical/transit/path_sign_verify.go b/builtin/logical/transit/path_sign_verify.go
index feed68309ef7..2043c8724e99 100644
--- a/builtin/logical/transit/path_sign_verify.go
+++ b/builtin/logical/transit/path_sign_verify.go
@@ -7,6 +7,7 @@ import (
"context"
"crypto/rsa"
"encoding/base64"
+ "errors"
"fmt"
"strconv"
"strings"
@@ -19,41 +20,6 @@ import (
"github.com/mitchellh/mapstructure"
)
-// policySignArgs are the arguments required to pass through to the SDK policy.SignWithOptions
-type policySignArgs struct {
- keyVersion int
- keyContext []byte
- input []byte
- options keysutil.SigningOptions
-}
-
-type policyVerifyArgs struct {
- keyContext []byte
- input []byte
- options keysutil.SigningOptions
- sig string
-}
-
-type commonSignVerifyApiArgs struct {
- keyName string
- hashAlgorithm keysutil.HashType
- marshaling keysutil.MarshalingType
- prehashed bool
- sigAlgorithm string
- saltLength int
-}
-
-// signApiArgs represents the input arguments that apply to all members of the batch
-type signApiArgs struct {
- commonSignVerifyApiArgs
- keyVersion int
-}
-
-// verifyApiArgs represents the input arguments that apply to all members of the batch
-type verifyApiArgs struct {
- commonSignVerifyApiArgs
-}
-
// BatchRequestSignItem represents a request item for batch processing.
// A map type allows us to distinguish between empty and missing values.
type batchRequestSignItem map[string]string
@@ -137,12 +103,6 @@ func (b *backend) pathSign() *framework.Path {
derivation is enabled; currently only available with ed25519 keys.`,
},
- "signature_context": {
- Type: framework.TypeString,
- Description: `Base64 encoded context for Ed25519ph and Ed25519ctx signatures.
-Currently only available with Ed25519 keys. (Enterprise Only)`,
- },
-
"hash_algorithm": {
Type: framework.TypeString,
Default: defaultHashAlgorithm,
@@ -247,12 +207,6 @@ func (b *backend) pathVerify() *framework.Path {
derivation is enabled; currently only available with ed25519 keys.`,
},
- "signature_context": {
- Type: framework.TypeString,
- Description: `Base64 encoded context for Ed25519ph and Ed25519ctx signatures.
-Currently only available with Ed25519 keys. (Enterprise Only)`,
- },
-
"signature": {
Type: framework.TypeString,
Description: "The signature, including vault header/key version",
@@ -265,7 +219,7 @@ Currently only available with Ed25519 keys. (Enterprise Only)`,
"cmac": {
Type: framework.TypeString,
- Description: "The CMAC, including vault header/key version (Enterprise only)",
+ Description: "The CMAC, including vault header/key version",
},
"input": {
@@ -351,7 +305,7 @@ preserve the order of the batch input`,
}
}
-func getSaltLength(d *framework.FieldData) (int, error) {
+func (b *backend) getSaltLength(d *framework.FieldData) (int, error) {
rawSaltLength, ok := d.GetOk("salt_length")
// This should only happen when something is wrong with the schema,
// so this is a reasonable default.
@@ -379,10 +333,33 @@ func getSaltLength(d *framework.FieldData) (int, error) {
}
func (b *backend) pathSignWrite(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {
- // Fetch the top-level arguments for the sign api. These do not include
- // the values that are within the batch input parameters, only those that
- // apply globally.
- apiArgs, err := getSignApiArgs(d)
+ name := d.Get("name").(string)
+ ver := d.Get("key_version").(int)
+ hashAlgorithmStr := d.Get("urlalgorithm").(string)
+ if hashAlgorithmStr == "" {
+ hashAlgorithmStr = d.Get("hash_algorithm").(string)
+ if hashAlgorithmStr == "" {
+ hashAlgorithmStr = d.Get("algorithm").(string)
+ if hashAlgorithmStr == "" {
+ hashAlgorithmStr = defaultHashAlgorithm
+ }
+ }
+ }
+
+ hashAlgorithm, ok := keysutil.HashTypeMap[hashAlgorithmStr]
+ if !ok {
+ return logical.ErrorResponse(fmt.Sprintf("invalid hash algorithm %q", hashAlgorithmStr)), logical.ErrInvalidRequest
+ }
+
+ marshalingStr := d.Get("marshaling_algorithm").(string)
+ marshaling, ok := keysutil.MarshalingTypeMap[marshalingStr]
+ if !ok {
+ return logical.ErrorResponse(fmt.Sprintf("invalid marshaling type %q", marshalingStr)), logical.ErrInvalidRequest
+ }
+
+ prehashed := d.Get("prehashed").(bool)
+ sigAlgorithm := d.Get("signature_algorithm").(string)
+ saltLength, err := b.getSaltLength(d)
if err != nil {
return logical.ErrorResponse(err.Error()), logical.ErrInvalidRequest
}
@@ -390,7 +367,7 @@ func (b *backend) pathSignWrite(ctx context.Context, req *logical.Request, d *fr
// Get the policy
p, _, err := b.GetPolicy(ctx, keysutil.PolicyRequest{
Storage: req.Storage,
- Name: apiArgs.keyName,
+ Name: name,
}, b.GetRandomReader())
if err != nil {
return nil, err
@@ -403,8 +380,15 @@ func (b *backend) pathSignWrite(ctx context.Context, req *logical.Request, d *fr
}
defer p.Unlock()
- if err := validateCommonSignVerifyApiArgs(p, apiArgs.commonSignVerifyApiArgs); err != nil {
- return logical.ErrorResponse(err.Error()), logical.ErrInvalidRequest
+ if !p.Type.SigningSupported() {
+ return logical.ErrorResponse(fmt.Sprintf("key type %v does not support signing", p.Type)), logical.ErrInvalidRequest
+ }
+
+ // Allow managed keys to specify no hash algo without additional conditions.
+ if hashAlgorithm == keysutil.HashTypeNone && p.Type != keysutil.KeyType_MANAGED_KEY {
+ if !prehashed || sigAlgorithm != "pkcs1v15" {
+ return logical.ErrorResponse("hash_algorithm=none requires both prehashed=true and signature_algorithm=pkcs1v15"), logical.ErrInvalidRequest
+ }
}
batchInputRaw := d.Raw["batch_input"]
@@ -425,23 +409,65 @@ func (b *backend) pathSignWrite(ctx context.Context, req *logical.Request, d *fr
"input": d.Get("input").(string),
"context": d.Get("context").(string),
}
-
- // Only defined within the ENT schema, so this is useless on CE.
- if sigContext, ok := d.GetOk("signature_context"); ok {
- batchInputItems[0]["signature_context"] = sigContext.(string)
- }
}
response := make([]batchResponseSignItem, len(batchInputItems))
for i, item := range batchInputItems {
- psa, err := b.getPolicySignArgs(ctx, p, apiArgs, item)
+
+ rawInput, ok := item["input"]
+ if !ok {
+ response[i].Error = "missing input"
+ response[i].err = logical.ErrInvalidRequest
+ continue
+ }
+
+ input, err := base64.StdEncoding.DecodeString(rawInput)
if err != nil {
- response[i].Error = err.Error()
+ response[i].Error = fmt.Sprintf("unable to decode input as base64: %s", err)
response[i].err = logical.ErrInvalidRequest
continue
}
- sig, err := p.SignWithOptions(psa.keyVersion, psa.keyContext, psa.input, &psa.options)
+ if p.Type.HashSignatureInput() && !prehashed {
+ hf := keysutil.HashFuncMap[hashAlgorithm]()
+ if hf != nil {
+ hf.Write(input)
+ input = hf.Sum(nil)
+ }
+ }
+
+ contextRaw := item["context"]
+ var context []byte
+ if len(contextRaw) != 0 {
+ context, err = base64.StdEncoding.DecodeString(contextRaw)
+ if err != nil {
+ response[i].Error = "failed to base64-decode context"
+ response[i].err = logical.ErrInvalidRequest
+ continue
+ }
+ }
+
+ var managedKeyParameters keysutil.ManagedKeyParameters
+ if p.Type == keysutil.KeyType_MANAGED_KEY {
+ managedKeySystemView, ok := b.System().(logical.ManagedKeySystemView)
+ if !ok {
+ return nil, errors.New("unsupported system view")
+ }
+
+ managedKeyParameters = keysutil.ManagedKeyParameters{
+ ManagedKeySystemView: managedKeySystemView,
+ BackendUUID: b.backendUUID,
+ Context: ctx,
+ }
+ }
+
+ sig, err := p.SignWithOptions(ver, context, input, &keysutil.SigningOptions{
+ HashAlgorithm: hashAlgorithm,
+ Marshaling: marshaling,
+ SaltLength: saltLength,
+ SigAlgorithm: sigAlgorithm,
+ ManagedKeyParams: managedKeyParameters,
+ })
if err != nil {
if batchInputRaw != nil {
response[i].Error = err.Error()
@@ -450,7 +476,7 @@ func (b *backend) pathSignWrite(ctx context.Context, req *logical.Request, d *fr
} else if sig == nil {
response[i].err = fmt.Errorf("signature could not be computed")
} else {
- keyVersion := apiArgs.keyVersion
+ keyVersion := ver
if keyVersion == 0 {
keyVersion = p.LatestVersion
}
@@ -493,129 +519,36 @@ func (b *backend) pathSignWrite(ctx context.Context, req *logical.Request, d *fr
return resp, nil
}
-func (b *backend) getPolicySignArgs(ctx context.Context, p *keysutil.Policy, args signApiArgs, item batchRequestSignItem) (policySignArgs, error) {
- rawInput, ok := item["input"]
- if !ok {
- return policySignArgs{}, fmt.Errorf("missing input")
- }
-
- input, err := base64.StdEncoding.DecodeString(rawInput)
- if err != nil {
- return policySignArgs{}, fmt.Errorf("unable to decode input: %s", err)
- }
-
- if p.Type.HashSignatureInput() && !args.prehashed {
- hf := keysutil.HashFuncMap[args.hashAlgorithm]()
- if hf != nil {
- hf.Write(input)
- input = hf.Sum(nil)
+func (b *backend) pathVerifyWrite(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {
+ batchInputRaw := d.Raw["batch_input"]
+ var batchInputItems []batchRequestVerifyItem
+ if batchInputRaw != nil {
+ err := mapstructure.Decode(batchInputRaw, &batchInputItems)
+ if err != nil {
+ return nil, fmt.Errorf("failed to parse batch input: %w", err)
}
- }
-
- keyContext, err := decodeBase64Arg(item["context"])
- if err != nil {
- return policySignArgs{}, fmt.Errorf("failed to base64-decode context")
- }
-
- psa := policySignArgs{
- keyVersion: args.keyVersion,
- keyContext: keyContext,
- input: input,
- options: keysutil.SigningOptions{
- HashAlgorithm: args.hashAlgorithm,
- Marshaling: args.marshaling,
- SaltLength: args.saltLength,
- SigAlgorithm: args.sigAlgorithm,
- },
- }
-
- if err := b.populateEntPolicySigningOptions(ctx, p, args, item, &psa); err != nil {
- return policySignArgs{}, fmt.Errorf("failed to parse batch item: %s", err)
- }
- return psa, nil
-}
-func validateCommonSignVerifyApiArgs(p *keysutil.Policy, apiArgs commonSignVerifyApiArgs) error {
- if !p.Type.SigningSupported() {
- return fmt.Errorf("key type %v does not support signing", p.Type)
- }
-
- // Perform Vault version specific checks (CE vs ENT)
- return validateSignApiArgsVersionSpecific(p, apiArgs)
-}
-
-func getSignApiArgs(d *framework.FieldData) (signApiArgs, error) {
- keyVersion := d.Get("key_version").(int)
- commonArgs, err := getCommonSignVerifyApiArgs(d)
- if err != nil {
- return signApiArgs{}, err
- }
-
- return signApiArgs{
- commonSignVerifyApiArgs: commonArgs,
- keyVersion: keyVersion,
- }, nil
-}
-
-func getCommonSignVerifyApiArgs(d *framework.FieldData) (commonSignVerifyApiArgs, error) {
- keyName := d.Get("name").(string)
- hashAlgorithmStr, hashAlgorithm, ok := getHashAlgorithmFromArgs(d)
- if !ok {
- return commonSignVerifyApiArgs{}, fmt.Errorf("invalid hash algorithm %q", hashAlgorithmStr)
- }
-
- marshalingStr := d.Get("marshaling_algorithm").(string)
- marshaling, ok := keysutil.MarshalingTypeMap[marshalingStr]
- if !ok {
- return commonSignVerifyApiArgs{}, fmt.Errorf("invalid marshaling type %q", marshalingStr)
- }
-
- prehashed := d.Get("prehashed").(bool)
- sigAlgorithm := d.Get("signature_algorithm").(string)
- saltLength, err := getSaltLength(d)
- if err != nil {
- return commonSignVerifyApiArgs{}, err
- }
-
- return commonSignVerifyApiArgs{
- keyName: keyName,
- hashAlgorithm: hashAlgorithm,
- marshaling: marshaling,
- prehashed: prehashed,
- sigAlgorithm: sigAlgorithm,
- saltLength: saltLength,
- }, nil
-}
-
-func getHashAlgorithmFromArgs(d *framework.FieldData) (string, keysutil.HashType, bool) {
- hashAlgorithmStr := d.Get("urlalgorithm").(string)
- if hashAlgorithmStr == "" {
- hashAlgorithmStr = d.Get("hash_algorithm").(string)
- if hashAlgorithmStr == "" {
- hashAlgorithmStr = d.Get("algorithm").(string)
- if hashAlgorithmStr == "" {
- hashAlgorithmStr = defaultHashAlgorithm
- }
+ if len(batchInputItems) == 0 {
+ return logical.ErrorResponse("missing batch input to process"), logical.ErrInvalidRequest
}
- }
-
- hashAlgorithm, ok := keysutil.HashTypeMap[hashAlgorithmStr]
- return hashAlgorithmStr, hashAlgorithm, ok
-}
-
-func decodeBase64Arg(fieldVal interface{}) ([]byte, error) {
- parsedStr, err := parseutil.ParseString(fieldVal)
- if err != nil {
- return nil, err
- }
-
- return base64.StdEncoding.DecodeString(parsedStr)
-}
+ } else {
+ // use empty string if input is missing - not an error
+ inputB64 := d.Get("input").(string)
-func (b *backend) pathVerifyWrite(ctx context.Context, req *logical.Request, d *framework.FieldData) (*logical.Response, error) {
- batchInputItems, isBatchInput, err := getVerifyBatchItems(d)
- if err != nil {
- return logical.ErrorResponse(err.Error()), logical.ErrInvalidRequest
+ batchInputItems = make([]batchRequestVerifyItem, 1)
+ batchInputItems[0] = batchRequestVerifyItem{
+ "input": inputB64,
+ }
+ if sig, ok := d.GetOk("signature"); ok {
+ batchInputItems[0]["signature"] = sig.(string)
+ }
+ if hmac, ok := d.GetOk("hmac"); ok {
+ batchInputItems[0]["hmac"] = hmac.(string)
+ }
+ if cmac, ok := d.GetOk("cmac"); ok {
+ batchInputItems[0]["cmac"] = cmac.(string)
+ }
+ batchInputItems[0]["context"] = d.Get("context").(string)
}
// For simplicity, 'signature' and 'hmac' cannot be mixed across batch_input elements.
@@ -639,11 +572,11 @@ func (b *backend) pathVerifyWrite(ctx context.Context, req *logical.Request, d *
optionsSet := numBooleansTrue(sigFound, hmacFound, cmacFound)
switch {
- case !isBatchInput && optionsSet > 1:
+ case batchInputRaw == nil && optionsSet > 1:
return logical.ErrorResponse("provide one of 'signature', 'hmac' or 'cmac'"), logical.ErrInvalidRequest
- case !isBatchInput && optionsSet == 0:
- return logical.ErrorResponse("missing one of 'signature', 'hmac' or 'cmac' arguments to verify"), logical.ErrInvalidRequest
+ case batchInputRaw == nil && optionsSet == 0:
+ return logical.ErrorResponse("missing 'signature', 'hmac' or 'cmac' were given to verify"), logical.ErrInvalidRequest
case optionsSet > 1:
return logical.ErrorResponse("elements of batch_input must all provide either 'signature', 'hmac' or 'cmac'"), logical.ErrInvalidRequest
@@ -667,7 +600,32 @@ func (b *backend) pathVerifyWrite(ctx context.Context, req *logical.Request, d *
return b.pathCMACVerify(ctx, req, d)
}
- apiArgs, err := getVerifyApiArgs(d)
+ name := d.Get("name").(string)
+ hashAlgorithmStr := d.Get("urlalgorithm").(string)
+ if hashAlgorithmStr == "" {
+ hashAlgorithmStr = d.Get("hash_algorithm").(string)
+ if hashAlgorithmStr == "" {
+ hashAlgorithmStr = d.Get("algorithm").(string)
+ if hashAlgorithmStr == "" {
+ hashAlgorithmStr = defaultHashAlgorithm
+ }
+ }
+ }
+
+ hashAlgorithm, ok := keysutil.HashTypeMap[hashAlgorithmStr]
+ if !ok {
+ return logical.ErrorResponse(fmt.Sprintf("invalid hash algorithm %q", hashAlgorithmStr)), logical.ErrInvalidRequest
+ }
+
+ marshalingStr := d.Get("marshaling_algorithm").(string)
+ marshaling, ok := keysutil.MarshalingTypeMap[marshalingStr]
+ if !ok {
+ return logical.ErrorResponse(fmt.Sprintf("invalid marshaling type %q", marshalingStr)), logical.ErrInvalidRequest
+ }
+
+ prehashed := d.Get("prehashed").(bool)
+ sigAlgorithm := d.Get("signature_algorithm").(string)
+ saltLength, err := b.getSaltLength(d)
if err != nil {
return logical.ErrorResponse(err.Error()), logical.ErrInvalidRequest
}
@@ -675,7 +633,7 @@ func (b *backend) pathVerifyWrite(ctx context.Context, req *logical.Request, d *
// Get the policy
p, _, err := b.GetPolicy(ctx, keysutil.PolicyRequest{
Storage: req.Storage,
- Name: apiArgs.keyName,
+ Name: name,
}, b.GetRandomReader())
if err != nil {
return nil, err
@@ -688,28 +646,106 @@ func (b *backend) pathVerifyWrite(ctx context.Context, req *logical.Request, d *
}
defer p.Unlock()
- if err := validateCommonSignVerifyApiArgs(p, apiArgs.commonSignVerifyApiArgs); err != nil {
- return logical.ErrorResponse(err.Error()), logical.ErrInvalidRequest
+ if !p.Type.SigningSupported() {
+ return logical.ErrorResponse(fmt.Sprintf("key type %v does not support verification", p.Type)), logical.ErrInvalidRequest
+ }
+
+ // Allow managed keys to specify no hash algo without additional conditions.
+ if hashAlgorithm == keysutil.HashTypeNone && p.Type != keysutil.KeyType_MANAGED_KEY {
+ if !prehashed || sigAlgorithm != "pkcs1v15" {
+ return logical.ErrorResponse("hash_algorithm=none requires both prehashed=true and signature_algorithm=pkcs1v15"), logical.ErrInvalidRequest
+ }
}
response := make([]batchResponseVerifyItem, len(batchInputItems))
for i, item := range batchInputItems {
- pva, err := b.getPolicyVerifyArgs(ctx, p, apiArgs, item)
+ rawInput, ok := item["input"]
+ if !ok {
+ response[i].Error = "missing input"
+ response[i].err = logical.ErrInvalidRequest
+ continue
+ }
+ strInput, err := parseutil.ParseString(rawInput)
+ if err != nil {
+ response[i].Error = fmt.Sprintf("unable to parse input as string: %s", err)
+ response[i].err = logical.ErrInvalidRequest
+ continue
+ }
+
+ input, err := base64.StdEncoding.DecodeString(strInput)
+ if err != nil {
+ response[i].Error = fmt.Sprintf("unable to decode input as base64: %s", err)
+ response[i].err = logical.ErrInvalidRequest
+ continue
+ }
+
+ sigRaw, ok := item["signature"].(string)
+ if !ok {
+ response[i].Error = "missing signature"
+ response[i].err = logical.ErrInvalidRequest
+ continue
+ }
+ sig, err := parseutil.ParseString(sigRaw)
if err != nil {
- response[i].Error = fmt.Sprintf("failed to parse item: %s", err)
+ response[i].Error = fmt.Sprintf("failed to parse signature as a string: %s", err)
response[i].err = logical.ErrInvalidRequest
continue
}
- valid, err := p.VerifySignatureWithOptions(pva.keyContext, pva.input, pva.sig, &pva.options)
+ if p.Type.HashSignatureInput() && !prehashed {
+ hf := keysutil.HashFuncMap[hashAlgorithm]()
+ if hf != nil {
+ hf.Write(input)
+ input = hf.Sum(nil)
+ }
+ }
+
+ contextRaw, err := parseutil.ParseString(item["context"])
+ if err != nil {
+ response[i].Error = fmt.Sprintf("failed to parse context as a string: %s", err)
+ response[i].err = logical.ErrInvalidRequest
+ continue
+ }
+ var context []byte
+ if len(contextRaw) != 0 {
+ context, err = base64.StdEncoding.DecodeString(contextRaw)
+ if err != nil {
+ response[i].Error = "failed to base64-decode context"
+ response[i].err = logical.ErrInvalidRequest
+ continue
+ }
+ }
+ var managedKeyParameters keysutil.ManagedKeyParameters
+ if p.Type == keysutil.KeyType_MANAGED_KEY {
+ managedKeySystemView, ok := b.System().(logical.ManagedKeySystemView)
+ if !ok {
+ return nil, errors.New("unsupported system view")
+ }
+
+ managedKeyParameters = keysutil.ManagedKeyParameters{
+ ManagedKeySystemView: managedKeySystemView,
+ BackendUUID: b.backendUUID,
+ Context: ctx,
+ }
+ }
+
+ signingOptions := &keysutil.SigningOptions{
+ HashAlgorithm: hashAlgorithm,
+ Marshaling: marshaling,
+ SaltLength: saltLength,
+ SigAlgorithm: sigAlgorithm,
+ ManagedKeyParams: managedKeyParameters,
+ }
+
+ valid, err := p.VerifySignatureWithOptions(context, input, sig, signingOptions)
if err != nil {
switch err.(type) {
case errutil.UserError:
response[i].Error = err.Error()
response[i].err = logical.ErrInvalidRequest
default:
- if isBatchInput {
+ if batchInputRaw != nil {
response[i].Error = err.Error()
}
response[i].err = err
@@ -721,7 +757,7 @@ func (b *backend) pathVerifyWrite(ctx context.Context, req *logical.Request, d *
// Generate the response
resp := &logical.Response{}
- if isBatchInput {
+ if batchInputRaw != nil {
// Copy the references
for i := range batchInputItems {
if ref, err := parseutil.ParseString(batchInputItems[i]["reference"]); err == nil {
@@ -746,110 +782,51 @@ func (b *backend) pathVerifyWrite(ctx context.Context, req *logical.Request, d *
return resp, nil
}
-func getVerifyApiArgs(d *framework.FieldData) (verifyApiArgs, error) {
- commonArgs, err := getCommonSignVerifyApiArgs(d)
- if err != nil {
- return verifyApiArgs{}, err
- }
-
- return verifyApiArgs{
- commonSignVerifyApiArgs: commonArgs,
- }, nil
-}
-
-func getVerifyBatchItems(d *framework.FieldData) ([]batchRequestVerifyItem, bool, error) {
- if batchInputRaw, ok := d.Raw["batch_input"]; ok && batchInputRaw != nil {
- var batchInputItems []batchRequestVerifyItem
- err := mapstructure.Decode(batchInputRaw, &batchInputItems)
- if err != nil {
- return nil, false, fmt.Errorf("failed to parse batch input: %w", err)
- }
-
- if len(batchInputItems) == 0 {
- return nil, false, fmt.Errorf("missing batch input to process")
+func numBooleansTrue(bools ...bool) int {
+ numSet := 0
+ for _, value := range bools {
+ if value {
+ numSet++
}
-
- return batchInputItems, true, nil
}
+ return numSet
+}
- // use empty string if input is missing - not an error
- item := batchRequestVerifyItem{
- "input": d.Get("input").(string),
+func decodeTransitSignature(sig string) ([]byte, int, error) {
+ if !strings.HasPrefix(sig, "vault:v") {
+ return nil, 0, fmt.Errorf("prefix is not vault:v")
}
- if sig, ok := d.GetOk("signature"); ok {
- item["signature"] = sig.(string)
- }
- if hmac, ok := d.GetOk("hmac"); ok {
- item["hmac"] = hmac.(string)
- }
- if cmac, ok := d.GetOk("cmac"); ok {
- item["cmac"] = cmac.(string)
- }
-
- item["context"] = d.Get("context").(string)
- if sigContext, ok := d.GetOk("signature_context"); ok {
- item["signature_context"] = sigContext.(string)
+ splitVerification := strings.SplitN(strings.TrimPrefix(sig, "vault:v"), ":", 2)
+ if len(splitVerification) != 2 {
+ return nil, 0, fmt.Errorf("wrong number of fields delimited by ':', got %d expected 2", len(splitVerification))
}
- return []batchRequestVerifyItem{item}, false, nil
-}
-
-func (b *backend) getPolicyVerifyArgs(ctx context.Context, p *keysutil.Policy, apiArgs verifyApiArgs, item batchRequestVerifyItem) (policyVerifyArgs, error) {
- input, err := decodeBase64Arg(item["input"])
+ ver, err := strconv.Atoi(splitVerification[0])
if err != nil {
- return policyVerifyArgs{}, fmt.Errorf("failed to parse input: %s", err)
+ return nil, 0, fmt.Errorf("key version number %s count not be decoded", splitVerification[0])
}
- sigRaw, ok := item["signature"]
- if !ok {
- return policyVerifyArgs{}, fmt.Errorf("missing signature")
- }
- sig, err := parseutil.ParseString(sigRaw)
- if err != nil {
- return policyVerifyArgs{}, fmt.Errorf("failed to parse signature as a string: %s", err)
+ if ver < 1 {
+ return nil, 0, fmt.Errorf("key version less than 1 are invalid got: %d", ver)
}
- if p.Type.HashSignatureInput() && !apiArgs.prehashed {
- hf := keysutil.HashFuncMap[apiArgs.hashAlgorithm]()
- if hf != nil {
- hf.Write(input)
- input = hf.Sum(nil)
- }
+ if len(strings.TrimSpace(splitVerification[1])) == 0 {
+ return nil, 0, fmt.Errorf("missing base64 verification string from vault signature")
}
- keyContext, err := decodeBase64Arg(item["context"])
+ verBytes, err := base64.StdEncoding.DecodeString(splitVerification[1])
if err != nil {
- return policyVerifyArgs{}, fmt.Errorf("failed to parse context: %s", err)
- }
-
- vsa := policyVerifyArgs{
- keyContext: keyContext,
- input: input,
- sig: sig,
- options: keysutil.SigningOptions{
- HashAlgorithm: apiArgs.hashAlgorithm,
- Marshaling: apiArgs.marshaling,
- SaltLength: apiArgs.saltLength,
- SigAlgorithm: apiArgs.sigAlgorithm,
- },
+ return nil, 0, fmt.Errorf("unable to decode verification string as base64: %s", err)
}
- if err := b.populateEntPolicyVerifyOptions(ctx, p, apiArgs, item, &vsa); err != nil {
- return policyVerifyArgs{}, fmt.Errorf("failed to parse batch item: %s", err)
- }
-
- return vsa, nil
+ return verBytes, ver, nil
}
-func numBooleansTrue(bools ...bool) int {
- numSet := 0
- for _, value := range bools {
- if value {
- numSet++
- }
- }
- return numSet
+func encodeTransitSignature(value []byte, keyVersion int) string {
+ retStr := base64.StdEncoding.EncodeToString(value)
+ retStr = fmt.Sprintf("vault:v%d:%s", keyVersion, retStr)
+ return retStr
}
const pathSignHelpSyn = `Generate a signature for input data using the named key`
diff --git a/builtin/logical/transit/path_sign_verify_ce.go b/builtin/logical/transit/path_sign_verify_ce.go
deleted file mode 100644
index a0f416a18f9f..000000000000
--- a/builtin/logical/transit/path_sign_verify_ce.go
+++ /dev/null
@@ -1,81 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: BUSL-1.1
-
-//go:build !enterprise
-
-package transit
-
-import (
- "context"
- "fmt"
-
- "github.com/hashicorp/vault/sdk/helper/keysutil"
-)
-
-// validateSignApiArgsVersionSpecific will perform a validation of the Sign API parameters
-// from the Enterprise or CE point of view.
-func validateSignApiArgsVersionSpecific(p *keysutil.Policy, apiArgs commonSignVerifyApiArgs) error {
- if err := _validateEntSpecificKeyType(p); err != nil {
- return err
- }
-
- if apiArgs.hashAlgorithm == keysutil.HashTypeNone {
- if !apiArgs.prehashed || apiArgs.sigAlgorithm != "pkcs1v15" {
- return fmt.Errorf("hash_algorithm=none requires both prehashed=true and signature_algorithm=pkcs1v15")
- }
- }
-
- return nil
-}
-
-// populateEntPolicySigning augments or tweaks the input parameters to the SDK policy.SignWithOptions for
-// Enterprise usage.
-func (b *backend) populateEntPolicySigningOptions(_ context.Context, p *keysutil.Policy, args signApiArgs, item batchRequestSignItem, _ *policySignArgs) error {
- return _forbidEd25519EntBehavior(p, args.commonSignVerifyApiArgs, item["signature_context"])
-}
-
-// populateEntPolicyVerifyOptions augments or tweaks the input parameters to the SDK policy.VerifyWithOptions for
-// Enterprise usage.
-func (b *backend) populateEntPolicyVerifyOptions(_ context.Context, p *keysutil.Policy, args verifyApiArgs, item batchRequestVerifyItem, _ *policyVerifyArgs) error {
- sigContext, err := _validateString(item, "signature_context")
- if err != nil {
- return err
- }
- return _forbidEd25519EntBehavior(p, args.commonSignVerifyApiArgs, sigContext)
-}
-
-func _validateString(item batchRequestVerifyItem, key string) (string, error) {
- if itemVal, exists := item[key]; exists {
- if itemStrVal, ok := itemVal.(string); ok {
- return itemStrVal, nil
- }
- return "", fmt.Errorf("expected string for key=%q, got=%q", key, itemVal)
- }
- return "", nil
-}
-
-func _forbidEd25519EntBehavior(p *keysutil.Policy, apiArgs commonSignVerifyApiArgs, sigContext string) error {
- if p.Type != keysutil.KeyType_ED25519 {
- return nil
- }
-
- switch {
- case apiArgs.prehashed:
- return fmt.Errorf("only Pure Ed25519 signatures supported, prehashed must be false")
- case apiArgs.hashAlgorithm == keysutil.HashTypeSHA2512:
- return fmt.Errorf("only Pure Ed25519 signatures supported, hash_alogithm should not be set")
- case sigContext != "":
- return fmt.Errorf("only Pure Ed25519 signatures supported, signature_context must be empty")
- }
-
- return nil
-}
-
-func _validateEntSpecificKeyType(p *keysutil.Policy) error {
- switch p.Type {
- case keysutil.KeyType_AES128_CMAC, keysutil.KeyType_AES256_CMAC, keysutil.KeyType_MANAGED_KEY:
- return fmt.Errorf("enterprise specific key type %q can not be used on CE", p.Type)
- default:
- return nil
- }
-}
diff --git a/builtin/logical/transit/path_sign_verify_test.go b/builtin/logical/transit/path_sign_verify_test.go
index 19c2fa37eda7..1421f71c34dd 100644
--- a/builtin/logical/transit/path_sign_verify_test.go
+++ b/builtin/logical/transit/path_sign_verify_test.go
@@ -7,7 +7,6 @@ import (
"context"
"encoding/base64"
"fmt"
- "maps"
"strconv"
"strings"
"testing"
@@ -16,7 +15,6 @@ import (
"github.com/hashicorp/vault/sdk/helper/keysutil"
"github.com/hashicorp/vault/sdk/logical"
"github.com/mitchellh/mapstructure"
- "github.com/stretchr/testify/require"
"golang.org/x/crypto/ed25519"
)
@@ -370,103 +368,6 @@ func validatePublicKey(t *testing.T, in string, sig string, pubKeyRaw []byte, ex
}
}
-// TestTransit_SignVerify_Ed25519Behavior makes sure the options on ENT for a
-// Ed25519ph/ctx signature fail on CE and ENT if invalid
-func TestTransit_SignVerify_Ed25519Behavior(t *testing.T) {
- b, storage := createBackendWithSysView(t)
-
- // First create a key
- req := &logical.Request{
- Storage: storage,
- Operation: logical.UpdateOperation,
- Path: "keys/foo",
- Data: map[string]interface{}{
- "type": "ed25519",
- },
- }
- _, err := b.HandleRequest(context.Background(), req)
- require.NoError(t, err, "failed creating ed25519 key")
-
- tests := []struct {
- name string
- args map[string]interface{}
- worksOnEnt bool
- }{
- {"sha2-512 only", map[string]interface{}{"hash_algorithm": "sha2-512"}, false},
- {"prehashed only", map[string]interface{}{"prehashed": "true"}, false},
- {"incorrect input for ph args", map[string]interface{}{"prehashed": "true", "hash_algorithm": "sha2-512"}, false},
- {"context too long", map[string]interface{}{"signature_context": strings.Repeat("x", 1024)}, false},
- {
- name: "ctx-signature",
- args: map[string]interface{}{
- "signature_context": "dGVzdGluZyBjb250ZXh0Cg==",
- },
- worksOnEnt: true,
- },
- {
- name: "ph-signature",
- args: map[string]interface{}{
- "input": "3a81oZNherrMQXNJriBBMRLm+k6JqX6iCp7u5ktV05ohkpkqJ0/BqDa6PCOj/uu9RU1EI2Q86A4qmslPpUyknw==",
- "prehashed": "true",
- "hash_algorithm": "sha2-512",
- "signature_context": "dGVzdGluZyBjb250ZXh0Cg==",
- },
- worksOnEnt: true,
- },
- }
- for _, tc := range tests {
- t.Run(tc.name, func(t *testing.T) {
- signData := map[string]interface{}{"input": "dGhlIHF1aWNrIGJyb3duIGZveA=="}
-
- // if tc.args specifies input, this should overwrite our static value above.
- maps.Copy(signData, tc.args)
-
- req = &logical.Request{
- Storage: storage,
- Operation: logical.UpdateOperation,
- Path: "sign/foo",
- Data: signData,
- }
-
- signSignature := "YmFkIHNpZ25hdHVyZQo=" // "bad signature" but is overwritten if sign works
- resp, err := b.HandleRequest(context.Background(), req)
- if constants.IsEnterprise && tc.worksOnEnt {
- require.NoError(t, err, "expected sign to work on ENT but failed: resp: %v", resp)
- require.NotNil(t, resp, "sign should have had non-nil response on ENT")
- require.False(t, resp.IsError(), "sign expected to work on ENT but failed")
- signSignature = resp.Data["signature"].(string)
- require.NotEmpty(t, signSignature, "sign expected to work on ENT but was empty")
- } else {
- require.ErrorContains(t, err, "invalid request", "expected sign request to fail with invalid request")
- }
-
- verifyData := map[string]interface{}{
- "input": signData["input"],
- "signature": signSignature,
- }
-
- // if tc.args specifies input, this should overwrite our static value above.
- maps.Copy(verifyData, tc.args)
-
- req = &logical.Request{
- Storage: storage,
- Operation: logical.UpdateOperation,
- Path: "verify/foo",
- Data: verifyData,
- }
- resp, err = b.HandleRequest(context.Background(), req)
- if constants.IsEnterprise && tc.worksOnEnt {
- require.NoError(t, err, "verify expected to work on ENT but failed: resp: %v", resp)
- require.NotNil(t, resp, "verify should have had non-nil response on ENT")
- require.False(t, resp.IsError(), "expected verify to work on ENT but failed")
- require.True(t, resp.Data["valid"].(bool), "signature verification should have worked")
- } else {
- require.ErrorContains(t, err, "invalid request", "expected verify request to fail with invalid request")
- }
- })
- }
-}
-
func TestTransit_SignVerify_ED25519(t *testing.T) {
b, storage := createBackendWithSysView(t)
diff --git a/changelog/18615.txt b/changelog/18615.txt
deleted file mode 100644
index 2aa4b32a4cb0..000000000000
--- a/changelog/18615.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:bug
-core: fix issue when attempting to re-bootstrap HA when using Raft as HA but not storage
-```
\ No newline at end of file
diff --git a/changelog/22726.txt b/changelog/22726.txt
deleted file mode 100644
index 7da05f79482b..000000000000
--- a/changelog/22726.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:bug
-secrets/aws: Add sts_region parameter to root config for STS API calls.
-```
\ No newline at end of file
diff --git a/changelog/25486.txt b/changelog/25486.txt
deleted file mode 100644
index 9293c69dba04..000000000000
--- a/changelog/25486.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:improvement
-secrets/transit: Add support for RSA padding scheme pkcs1v15 for encryption
-```
diff --git a/changelog/27033.txt b/changelog/27033.txt
deleted file mode 100644
index a06152dedc2b..000000000000
--- a/changelog/27033.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:improvement
-command/server: Add support for dumping pprof files during startup using CLI option `pprof-dump-dir`
-```
diff --git a/changelog/27920.txt b/changelog/27920.txt
deleted file mode 100644
index 6cb687731630..000000000000
--- a/changelog/27920.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:bug
-core/api: Added missing LICENSE files to API sub-modules to ensure Go module tooling recognizes MPL-2.0 license.
-```
diff --git a/changelog/27927.txt b/changelog/27927.txt
deleted file mode 100644
index afc37a7acbd3..000000000000
--- a/changelog/27927.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-```release-note:improvement
-storage/s3: Pass context to AWS SDK calls
-```
-```release-note:improvement
-storage/dynamodb: Pass context to AWS SDK calls
-```
diff --git a/changelog/28126.txt b/changelog/28126.txt
deleted file mode 100644
index 5dfb7a864955..000000000000
--- a/changelog/28126.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-```release-note:improvement
-auto-auth/cert: support watching changes on certificate/key files and notifying the auth handler when `enable_reauth_on_new_credentials` is enabled.
-```
-```release-note:improvement
-auto-auth: support new config option `enable_reauth_on_new_credentials`, supporting re-authentication when receiving new credential on certain auto-auth types
-```
diff --git a/changelog/28330.txt b/changelog/28330.txt
deleted file mode 100644
index 4c325326d6ed..000000000000
--- a/changelog/28330.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:bug
-secret/aws: Fixed potential panic after step-down and the queue has not repopulated.
-```
diff --git a/changelog/28456.txt b/changelog/28456.txt
deleted file mode 100644
index af0a582ecb89..000000000000
--- a/changelog/28456.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:change
-sdk: Upgrade to go-secure-stdlib/plugincontainer@v0.4.1, which also bumps github.com/docker/docker to v27.2.1+incompatible
-```
diff --git a/changelog/28596.txt b/changelog/28596.txt
deleted file mode 100644
index 9e79f89a405b..000000000000
--- a/changelog/28596.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-```release-note:improvement
-audit: Audit logs will contain User-Agent headers when they are present in the incoming request. They are not
-HMAC'ed by default but can be configured to be via the `/sys/config/auditing/request-headers/user-agent` endpoint.
-```
diff --git a/changelog/28654.txt b/changelog/28654.txt
deleted file mode 100644
index 35f7c619e470..000000000000
--- a/changelog/28654.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:improvement
-secrets/pki: Add a CRL entry limit to prevent runaway revocations from overloading Vault, reconfigurable with max_crl_entries on the CRL config.
-```
diff --git a/changelog/28678.txt b/changelog/28678.txt
deleted file mode 100644
index 018e4adb7307..000000000000
--- a/changelog/28678.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:improvement
-secrets/pki: Add ACME error types to errors encountered during challenge validation.
-```
diff --git a/changelog/28798.txt b/changelog/28798.txt
deleted file mode 100644
index 992e240ee621..000000000000
--- a/changelog/28798.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:change
-ui: Upgrade Ember data to v5.3.2 (and minor upgrade of ember-cli, ember-source to v5.8.0)
-```
diff --git a/changelog/28808.txt b/changelog/28808.txt
deleted file mode 100644
index 20d4d1ce4e36..000000000000
--- a/changelog/28808.txt
+++ /dev/null
@@ -1,6 +0,0 @@
-```release-note:improvement
-ui: Replace KVv2 json secret details view with Hds::CodeBlock component allowing users to search the full secret height.
-```
-```release-note:bug
-ui: Allow users to search the full json object within the json code-editor edit/create view.
-```
diff --git a/changelog/28822.txt b/changelog/28822.txt
deleted file mode 100644
index 7f8f48fe72c9..000000000000
--- a/changelog/28822.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:improvement
-ui: Add identity_token_key to mount view for the GCP and Azure Secret engines.
-```
diff --git a/changelog/28867.txt b/changelog/28867.txt
deleted file mode 100644
index a0e541bce63f..000000000000
--- a/changelog/28867.txt
+++ /dev/null
@@ -1,4 +0,0 @@
-```release-note:bug
-core: Fix an issue where duplicate identity aliases in storage could be merged
-inconsistently during different unseal events or on different servers.
-```
diff --git a/changelog/28875.txt b/changelog/28875.txt
deleted file mode 100644
index 471920e9b26d..000000000000
--- a/changelog/28875.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:change
-storage/raft: Do not allow nodes that have been removed from the raft cluster configuration to respond to requests. Shutdown and seal raft nodes when they are removed.
-```
diff --git a/changelog/28938.txt b/changelog/28938.txt
deleted file mode 100644
index 2fe42304f07e..000000000000
--- a/changelog/28938.txt
+++ /dev/null
@@ -1,3 +0,0 @@
-```release-note:improvement
-core: Add `removed_from_cluster` field to sys/seal-status and vault status output to indicate whether the node has been removed from the HA cluster.
-```
diff --git a/changelog/_go-ver-1190.txt b/changelog/_go-ver-1190.txt
index 06f78478a6a2..26003e84db20 100644
--- a/changelog/_go-ver-1190.txt
+++ b/changelog/_go-ver-1190.txt
@@ -1,3 +1,3 @@
``release-note:change
-core: Bump Go version to 1.23.3.
+core: Bump Go version to 1.22.8.
```
diff --git a/command/agent/config/config.go b/command/agent/config/config.go
index 7b9a942824ec..d1597cece8fa 100644
--- a/command/agent/config/config.go
+++ b/command/agent/config/config.go
@@ -129,6 +129,8 @@ type AutoAuth struct {
Method *Method `hcl:"-"`
Sinks []*Sink `hcl:"sinks"`
+ // NOTE: This is unsupported outside of testing and may disappear at any
+ // time.
EnableReauthOnNewCredentials bool `hcl:"enable_reauth_on_new_credentials"`
}
diff --git a/command/agentproxyshared/auth/cert/cert.go b/command/agentproxyshared/auth/cert/cert.go
index b9410c10b9bd..fabe9a6365fb 100644
--- a/command/agentproxyshared/auth/cert/cert.go
+++ b/command/agentproxyshared/auth/cert/cert.go
@@ -5,21 +5,14 @@ package cert
import (
"context"
- "crypto/tls"
- "encoding/hex"
"errors"
"fmt"
"net/http"
- "os"
- "sync"
- "time"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/api"
"github.com/hashicorp/vault/command/agentproxyshared/auth"
"github.com/hashicorp/vault/sdk/helper/consts"
- "github.com/hashicorp/vault/sdk/helper/parseutil"
- "golang.org/x/crypto/blake2b"
)
type certMethod struct {
@@ -34,14 +27,6 @@ type certMethod struct {
// Client is the cached client to use if cert info was provided.
client *api.Client
-
- stopCh chan struct{}
- doneCh chan struct{}
- credSuccessGate chan struct{}
- ticker *time.Ticker
- once *sync.Once
- credsFound chan struct{}
- latestHash *string
}
var _ auth.AuthMethodWithClient = &certMethod{}
@@ -53,17 +38,10 @@ func NewCertAuthMethod(conf *auth.AuthConfig) (auth.AuthMethod, error) {
// Not concerned if the conf.Config is empty as the 'name'
// parameter is optional when using TLS Auth
- lastHash := ""
+
c := &certMethod{
logger: conf.Logger,
mountPath: conf.MountPath,
-
- stopCh: make(chan struct{}),
- doneCh: make(chan struct{}),
- credSuccessGate: make(chan struct{}),
- once: new(sync.Once),
- credsFound: make(chan struct{}),
- latestHash: &lastHash,
}
if conf.Config != nil {
@@ -109,20 +87,6 @@ func NewCertAuthMethod(conf *auth.AuthConfig) (auth.AuthMethod, error) {
}
}
- if c.isCertConfigured() && c.reload {
- reloadPeriod := time.Minute
- if reloadPeriodRaw, ok := conf.Config["reload_period"]; ok {
- period, err := parseutil.ParseDurationSecond(reloadPeriodRaw)
- if err != nil {
- return nil, fmt.Errorf("error parsing 'reload_period' value: %w", err)
- }
- reloadPeriod = period
- }
- c.ticker = time.NewTicker(reloadPeriod)
-
- go c.runWatcher()
- }
-
return c, nil
}
@@ -139,26 +103,12 @@ func (c *certMethod) Authenticate(_ context.Context, client *api.Client) (string
}
func (c *certMethod) NewCreds() chan struct{} {
- return c.credsFound
+ return nil
}
-func (c *certMethod) CredSuccess() {
- c.once.Do(func() {
- close(c.credSuccessGate)
- })
-}
+func (c *certMethod) CredSuccess() {}
-func (c *certMethod) Shutdown() {
- if c.isCertConfigured() && c.reload {
- c.ticker.Stop()
- close(c.stopCh)
- <-c.doneCh
- }
-}
-
-func (c *certMethod) isCertConfigured() bool {
- return c.caCert != "" || (c.clientKey != "" && c.clientCert != "")
-}
+func (c *certMethod) Shutdown() {}
// AuthClient uses the existing client's address and returns a new client with
// the auto-auth method's certificate information if that's provided in its
@@ -168,7 +118,7 @@ func (c *certMethod) AuthClient(client *api.Client) (*api.Client, error) {
clientToAuth := client
- if c.isCertConfigured() {
+ if c.caCert != "" || (c.clientKey != "" && c.clientCert != "") {
// Return cached client if present
if c.client != nil && !c.reload {
return c.client, nil
@@ -191,13 +141,6 @@ func (c *certMethod) AuthClient(client *api.Client) (*api.Client, error) {
return nil, err
}
- // set last hash if load it successfully
- if hash, err := c.hashCert(c.clientCert, c.clientKey, c.caCert); err != nil {
- return nil, err
- } else {
- c.latestHash = &hash
- }
-
var err error
clientToAuth, err = api.NewClient(config)
if err != nil {
@@ -213,95 +156,3 @@ func (c *certMethod) AuthClient(client *api.Client) (*api.Client, error) {
return clientToAuth, nil
}
-
-// hashCert returns reads and verifies the given cert/key pair and return the hashing result
-// in string representation. Otherwise, returns an error.
-// As the pair of cert/key and ca cert are optional because they may be configured externally
-// or use system default ca bundle, empty paths are simply skipped.
-// A valid hashing result means:
-// 1. All presented files are readable.
-// 2. The client cert/key pair is valid if presented.
-// 3. Any presented file in this bundle changed, the hash changes.
-func (c *certMethod) hashCert(certFile, keyFile, caFile string) (string, error) {
- var buf []byte
- if certFile != "" && keyFile != "" {
- certPEMBlock, err := os.ReadFile(certFile)
- if err != nil {
- return "", err
- }
- c.logger.Debug("Loaded cert file", "file", certFile, "length", len(certPEMBlock))
-
- keyPEMBlock, err := os.ReadFile(keyFile)
- if err != nil {
- return "", err
- }
- c.logger.Debug("Loaded key file", "file", keyFile, "length", len(keyPEMBlock))
-
- // verify
- _, err = tls.X509KeyPair(certPEMBlock, keyPEMBlock)
- if err != nil {
- return "", err
- }
- c.logger.Debug("The cert/key are valid")
- buf = append(certPEMBlock, keyPEMBlock...)
- }
-
- if caFile != "" {
- data, err := os.ReadFile(caFile)
- if err != nil {
- return "", err
- }
- c.logger.Debug("Loaded ca file", "file", caFile, "length", len(data))
- buf = append(buf, data...)
- }
-
- sum := blake2b.Sum256(buf)
- return hex.EncodeToString(sum[:]), nil
-}
-
-// runWatcher uses polling instead of inotify to sense the changes on the cert/key/ca files.
-// The reason not to use inotify:
-// 1. To not miss any changes, we need to watch the directory instead of files when using inotify.
-// 2. These files are not frequently changed/renewed, and they don't need to be reloaded immediately after renewal.
-// 3. Some network based filesystem and FUSE don't support inotify.
-func (c *certMethod) runWatcher() {
- defer close(c.doneCh)
-
- select {
- case <-c.stopCh:
- return
-
- case <-c.credSuccessGate:
- // We only start the next loop once we're initially successful,
- // since at startup Authenticate will be called, and we don't want
- // to end up immediately re-authenticating by having found a new
- // value
- }
-
- for {
- changed := false
- select {
- case <-c.stopCh:
- return
-
- case <-c.ticker.C:
- c.logger.Debug("Checking if files changed", "cert", c.clientCert, "key", c.clientKey)
- hash, err := c.hashCert(c.clientCert, c.clientKey, c.caCert)
- // ignore errors in watcher
- if err == nil {
- c.logger.Debug("hash before/after", "new", hash, "old", *c.latestHash)
- changed = *c.latestHash != hash
- } else {
- c.logger.Warn("hash failed for cert/key files", "err", err)
- }
- }
-
- if changed {
- c.logger.Info("The cert/key files changed")
- select {
- case c.credsFound <- struct{}{}:
- case <-c.stopCh:
- }
- }
- }
-}
diff --git a/command/agentproxyshared/auth/cert/cert_test.go b/command/agentproxyshared/auth/cert/cert_test.go
index 7abf856e8426..6a7e4f779e9c 100644
--- a/command/agentproxyshared/auth/cert/cert_test.go
+++ b/command/agentproxyshared/auth/cert/cert_test.go
@@ -5,13 +5,10 @@ package cert
import (
"context"
- "fmt"
"os"
"path"
- "path/filepath"
"reflect"
"testing"
- "time"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/api"
@@ -31,7 +28,6 @@ func TestCertAuthMethod_Authenticate(t *testing.T) {
if err != nil {
t.Fatal(err)
}
- defer method.Shutdown()
client, err := api.NewClient(nil)
if err != nil {
@@ -69,7 +65,6 @@ func TestCertAuthMethod_AuthClient_withoutCerts(t *testing.T) {
if err != nil {
t.Fatal(err)
}
- defer method.Shutdown()
client, err := api.NewClient(api.DefaultConfig())
if err != nil {
@@ -113,7 +108,6 @@ func TestCertAuthMethod_AuthClient_withCerts(t *testing.T) {
if err != nil {
t.Fatal(err)
}
- defer method.Shutdown()
client, err := api.NewClient(nil)
if err != nil {
@@ -140,38 +134,29 @@ func TestCertAuthMethod_AuthClient_withCerts(t *testing.T) {
}
}
-func copyFile(from, to string) error {
- data, err := os.ReadFile(from)
+func TestCertAuthMethod_AuthClient_withCertsReload(t *testing.T) {
+ clientCert, err := os.Open("./test-fixtures/keys/cert.pem")
if err != nil {
- return err
+ t.Fatal(err)
}
- return os.WriteFile(to, data, 0o600)
-}
+ defer clientCert.Close()
-// TestCertAuthMethod_AuthClient_withCertsReload makes the file change and ensures the cert auth method deliver the event.
-func TestCertAuthMethod_AuthClient_withCertsReload(t *testing.T) {
- // Initial the cert/key pair to temp path
- certPath := filepath.Join(os.TempDir(), "app.crt")
- keyPath := filepath.Join(os.TempDir(), "app.key")
- if err := copyFile("./test-fixtures/keys/cert.pem", certPath); err != nil {
- t.Fatal("copy cert file failed", err)
- }
- defer os.Remove(certPath)
- if err := copyFile("./test-fixtures/keys/key.pem", keyPath); err != nil {
- t.Fatal("copy key file failed", err)
+ clientKey, err := os.Open("./test-fixtures/keys/key.pem")
+ if err != nil {
+ t.Fatal(err)
}
- defer os.Remove(keyPath)
+
+ defer clientKey.Close()
config := &auth.AuthConfig{
Logger: hclog.NewNullLogger(),
MountPath: "cert-test",
Config: map[string]interface{}{
- "name": "with-certs-reloaded",
- "client_cert": certPath,
- "client_key": keyPath,
- "reload": true,
- "reload_period": 1,
+ "name": "with-certs-reloaded",
+ "client_cert": clientCert.Name(),
+ "client_key": clientKey.Name(),
+ "reload": true,
},
}
@@ -179,7 +164,6 @@ func TestCertAuthMethod_AuthClient_withCertsReload(t *testing.T) {
if err != nil {
t.Fatal(err)
}
- defer method.Shutdown()
client, err := api.NewClient(nil)
if err != nil {
@@ -204,113 +188,4 @@ func TestCertAuthMethod_AuthClient_withCertsReload(t *testing.T) {
if reloadedClient == clientToUse {
t.Fatal("expected client from AuthClient to return back a new client")
}
-
- method.CredSuccess()
- // Only make a change to the cert file, it doesn't match the key file so the client won't pick and load them.
- ctx, cancel := context.WithTimeout(context.Background(), 3*time.Second)
- if err = copyFile("./test-fixtures/keys/cert1.pem", certPath); err != nil {
- t.Fatal("update cert file failed", err)
- }
-
- select {
- case <-ctx.Done():
- case <-method.NewCreds():
- cancel()
- t.Fatal("malformed cert should not be observed as a change")
- }
-
- // Make a change to the key file and now they are good to be picked.
- if err = copyFile("./test-fixtures/keys/key1.pem", keyPath); err != nil {
- t.Fatal("update key file failed", err)
- }
- ctx, cancel = context.WithTimeout(context.Background(), 3*time.Second)
- select {
- case <-ctx.Done():
- t.Fatal("failed to watch the cert change: timeout")
- case <-method.NewCreds():
- cancel()
- }
-}
-
-// TestCertAuthMethod_hashCert_withEmptyPaths tests hashCert() if it works well with optional options.
-func TestCertAuthMethod_hashCert_withEmptyPaths(t *testing.T) {
- c := &certMethod{
- logger: hclog.NewNullLogger(),
- }
-
- // It skips empty file paths
- sum, err := c.hashCert("", "", "")
- if sum == "" || err != nil {
- t.Fatal("hashCert() should skip empty file paths and succeed.")
- }
- emptySum := sum
-
- // Only present ca cert
- sum, err = c.hashCert("", "", "./test-fixtures/root/rootcacert.pem")
- if sum == "" || err != nil {
- t.Fatal("hashCert() should succeed when only present ca cert.")
- }
-
- // Only present client cert/key
- sum, err = c.hashCert("./test-fixtures/keys/cert.pem", "./test-fixtures/keys/key.pem", "")
- if sum == "" || err != nil {
- fmt.Println(sum, err)
- t.Fatal("hashCert() should succeed when only present client cert/key.")
- }
-
- // The client cert/key should be presented together or will be skipped
- sum, err = c.hashCert("./test-fixtures/keys/cert.pem", "", "")
- if sum == "" || err != nil {
- t.Fatal("hashCert() should succeed when only present client cert.")
- } else if sum != emptySum {
- t.Fatal("hashCert() should skip the client cert/key when only present client cert.")
- }
-}
-
-// TestCertAuthMethod_hashCert_withInvalidClientCert adds test cases for invalid input for hashCert().
-func TestCertAuthMethod_hashCert_withInvalidClientCert(t *testing.T) {
- c := &certMethod{
- logger: hclog.NewNullLogger(),
- }
-
- // With mismatched cert/key pair
- sum, err := c.hashCert("./test-fixtures/keys/cert1.pem", "./test-fixtures/keys/key.pem", "")
- if sum != "" || err == nil {
- t.Fatal("hashCert() should fail with invalid client cert.")
- }
-
- // With non-existed paths
- sum, err = c.hashCert("./test-fixtures/keys/cert2.pem", "./test-fixtures/keys/key.pem", "")
- if sum != "" || err == nil {
- t.Fatal("hashCert() should fail with non-existed client cert path.")
- }
-}
-
-// TestCertAuthMethod_hashCert_withChange tests hashCert() if it detects changes from both client cert/key and ca cert.
-func TestCertAuthMethod_hashCert_withChange(t *testing.T) {
- c := &certMethod{
- logger: hclog.NewNullLogger(),
- }
-
- // A good first case.
- sum, err := c.hashCert("./test-fixtures/keys/cert.pem", "./test-fixtures/keys/key.pem", "./test-fixtures/root/rootcacert.pem")
- if sum == "" || err != nil {
- t.Fatal("hashCert() shouldn't fail with a valid pair of cert/key.")
- }
-
- // Only change the ca cert from the first case.
- sum1, err := c.hashCert("./test-fixtures/keys/cert.pem", "./test-fixtures/keys/key.pem", "./test-fixtures/keys/cert.pem")
- if sum1 == "" || err != nil {
- t.Fatal("hashCert() shouldn't fail with valid pair of cert/key.")
- } else if sum == sum1 {
- t.Fatal("The hash should be different with a different ca cert.")
- }
-
- // Only change the cert/key pair from the first case.
- sum2, err := c.hashCert("./test-fixtures/keys/cert1.pem", "./test-fixtures/keys/key1.pem", "./test-fixtures/root/rootcacert.pem")
- if sum2 == "" || err != nil {
- t.Fatal("hashCert() shouldn't fail with a valid cert/key pair")
- } else if sum == sum2 || sum1 == sum2 {
- t.Fatal("The hash should be different with a different pair of cert/key.")
- }
}
diff --git a/command/agentproxyshared/auth/cert/test-fixtures/keys/cert1.pem b/command/agentproxyshared/auth/cert/test-fixtures/keys/cert1.pem
deleted file mode 100644
index 01afb2157e37..000000000000
--- a/command/agentproxyshared/auth/cert/test-fixtures/keys/cert1.pem
+++ /dev/null
@@ -1,17 +0,0 @@
------BEGIN CERTIFICATE-----
-MIICrTCCAZUCCQDDXho7UXdaIjANBgkqhkiG9w0BAQsFADAWMRQwEgYDVQQDEwtl
-eGFtcGxlLmNvbTAeFw0yNDA4MTcwOTE0MDRaFw0zNDA4MTUwOTE0MDRaMBsxGTAX
-BgNVBAMMEGNlcnQuZXhhbXBsZS5jb20wggEiMA0GCSqGSIb3DQEBAQUAA4IBDwAw
-ggEKAoIBAQDDSMAi8aL1XLCRrPl8KjJcH/pJe9QJtzUIU3T9tfj+Eq8yMUbFu+so
-ec+knsxTi5zN7wq1/t9B9tIvDVG0C9T7BbhX2dYPNC1oY7DtdI3KqA76Z78v533Y
-p/WFMHn9X1v0g7qOHm9Y7V6oHg7m+ICq84fORbmfgNW/tPNqTJRU4wyzlIPw1Toi
-9awHMZHZmbjUwFgSQ8TOXgZfWo1ZmbOFY2epBIRCapsYpJgwKXy1UjIfQIQ6e6xm
-KbKQ/IIeuufo5U8vYV91nGNOVkieeGQ8vmVa1f/oyFfChCRR+aLCqbUGfJWzdicm
-eqyQVmPqJxTFuh7WMq+cOX5A068sYj0FAgMBAAEwDQYJKoZIhvcNAQELBQADggEB
-AFtUgRS+OZXmDmhIiaw4OrMruz3N2PCjWo/y+rK5gECuApGv7may3k9E65yRUvBb
-Ch68y1TMr+7J0MDl1CIbJUnLJkmcID+IvLVS3hVJ9H0raP6epDRvfkM3Xc/RwNgS
-PS1H1K8oxDPoo4an1yc6UoKng5KCAUYN+8dR9iVpCIPzRm0LSDIqMyamxoeNLfrO
-Nta+sKu1iS/MHy/MVLqyRDwTP2DnfYJTvhQDK5Y5bi7Chkv7g3ug/o2RZ38rRiRd
-Os90dDmTCgnYBSJtfKWF5gSnzP+OTs6Yb6KOIY7gLY/r1PBPabSuAnRMS/iTi6tq
-l91Cs+vnv6HNcZsGphoQJq8=
------END CERTIFICATE-----
diff --git a/command/agentproxyshared/auth/cert/test-fixtures/keys/key1.pem b/command/agentproxyshared/auth/cert/test-fixtures/keys/key1.pem
deleted file mode 100644
index ea07f86416f2..000000000000
--- a/command/agentproxyshared/auth/cert/test-fixtures/keys/key1.pem
+++ /dev/null
@@ -1,28 +0,0 @@
------BEGIN PRIVATE KEY-----
-MIIEvQIBADANBgkqhkiG9w0BAQEFAASCBKcwggSjAgEAAoIBAQDDSMAi8aL1XLCR
-rPl8KjJcH/pJe9QJtzUIU3T9tfj+Eq8yMUbFu+soec+knsxTi5zN7wq1/t9B9tIv
-DVG0C9T7BbhX2dYPNC1oY7DtdI3KqA76Z78v533Yp/WFMHn9X1v0g7qOHm9Y7V6o
-Hg7m+ICq84fORbmfgNW/tPNqTJRU4wyzlIPw1Toi9awHMZHZmbjUwFgSQ8TOXgZf
-Wo1ZmbOFY2epBIRCapsYpJgwKXy1UjIfQIQ6e6xmKbKQ/IIeuufo5U8vYV91nGNO
-VkieeGQ8vmVa1f/oyFfChCRR+aLCqbUGfJWzdicmeqyQVmPqJxTFuh7WMq+cOX5A
-068sYj0FAgMBAAECggEAP1vIMs4xL+g9xVXYsAdExYz+eH77gZd2Vlg1eedjfJN1
-UhSYwKjCmCRFUUTQSD7gxhPLZtblepJpCSkKHB9Gn5bwg1hC0jX8kYTer3wEUP8L
-tQSaDCHQO83qo6bhvWoF/KQMj/Wh7Lk+3864yQlRPaW7pxoKKozzTLqZyyBDc/KR
-YaUco+9NFqClHd/TRehoykYa7OvNVJjBDxTnnxijE0d5w83rP+wDJczhe/Xn/0f1
-Q7JFa4NpKmLEXj93GZiteloE80AbVnMiIemGB8ZZGHcySiib3wzuLk32dLS8zguU
-gp3E1FhL5xI7gsS7ClA/S6+tK1c46FzQYuIA105tAQKBgQDhnoxCW0N8xV5I1tje
-Q1AW+tMVyO6yaREFuoz6tE6AZ1drywyHdqia/QNbXkdllYVQDMlszQ3tmZ8RT/+5
-NdJ+LnNag8T6PaN3AtXAf0uveCL1et5ffWuRicesJCCJ10ESFQaVccZEqhJhtnQk
-giqICNHV0dWIEVsZGi5R4sA0wQKBgQDdlICpZud2SLrSx00Fb6TfZumxWjDwDb9D
-avoQJb376pg1qpAh53hUJbHWPlspeG/k24J0oRrnb3aln8QS21qVov90YllEWwnO
-xebYgdjvfOIZ1b8vJ2/UkfLX9Xa9KuzvGpv4BSNOZ8UNHI6Dj/eFmWP+q/a3vzJT
-rEgoC1xFRQKBgQCGkZtUxLxnAg1vYn3ta7asTiRyvOrqDNKzWQZXTg34dirlRzGM
-5pBACSLkb0Ika98c1NObCl8BVXxTxiRfoqOO0UPKPAfTvcnu5Qj7DLHm0cAALK3P
-xK3RG521pcKmlHXiRBouLrM0J0BZeYqib+TQSHpnjwVOaBOu0DfKbXV4wQKBgAaU
-VEWzcogGnNWJaXYR3Jltmt7TSMS4A8fis04rcLq8OozNZb47+0y0WdV8wIQ4uUnY
-YsVHy1635pQAbHgK32O2FVPFX9UxxtbG9ZXUNTbXRHdz61thFmb/dnCHL2FqluJ6
-rcrtjCDV3/oFsQ2jBryG03tKa+cE3F+zq+jUfYbpAoGAauV0h6kqS5mF+sa3F+I1
-zIZ7k81r5csZXkgQ6HphIAvo5NSv7H1jeSkGbZmg29irReggZLsy6nU4B4ZVt1p9
-GIsLgJfkCkHT+Vf0ipygAwFnbEUKqs6A/D0EUtAF2Oc7nVl0NIX+9LmEx7Dwl34i
-bTTPVgw5bid08eiN46NN9J4=
------END PRIVATE KEY-----
diff --git a/command/agentproxyshared/cache/cacheboltdb/bolt.go b/command/agentproxyshared/cache/cacheboltdb/bolt.go
index 6100ef896298..05d5ad93637a 100644
--- a/command/agentproxyshared/cache/cacheboltdb/bolt.go
+++ b/command/agentproxyshared/cache/cacheboltdb/bolt.go
@@ -12,10 +12,10 @@ import (
"time"
"github.com/golang/protobuf/proto"
+ bolt "github.com/hashicorp-forge/bbolt"
"github.com/hashicorp/go-hclog"
wrapping "github.com/hashicorp/go-kms-wrapping/v2"
"github.com/hashicorp/go-multierror"
- bolt "go.etcd.io/bbolt"
)
const (
diff --git a/command/agentproxyshared/cache/cacheboltdb/bolt_test.go b/command/agentproxyshared/cache/cacheboltdb/bolt_test.go
index dbfafdce7bb4..06a31780b5ad 100644
--- a/command/agentproxyshared/cache/cacheboltdb/bolt_test.go
+++ b/command/agentproxyshared/cache/cacheboltdb/bolt_test.go
@@ -14,11 +14,11 @@ import (
"time"
"github.com/golang/protobuf/proto"
+ bolt "github.com/hashicorp-forge/bbolt"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/vault/command/agentproxyshared/cache/keymanager"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
- bolt "go.etcd.io/bbolt"
)
func getTestKeyManager(t *testing.T) keymanager.KeyManager {
diff --git a/command/command_test.go b/command/command_test.go
index ed3d31545ea1..def68c4fbc89 100644
--- a/command/command_test.go
+++ b/command/command_test.go
@@ -22,8 +22,6 @@ import (
"github.com/hashicorp/vault/builtin/logical/ssh"
"github.com/hashicorp/vault/builtin/logical/transit"
"github.com/hashicorp/vault/helper/builtinplugins"
- "github.com/hashicorp/vault/helper/testhelpers"
- "github.com/hashicorp/vault/helper/testhelpers/teststorage"
vaulthttp "github.com/hashicorp/vault/http"
"github.com/hashicorp/vault/sdk/logical"
"github.com/hashicorp/vault/sdk/physical/inmem"
@@ -163,23 +161,6 @@ func testVaultServerUnsealWithKVVersionWithSeal(tb testing.TB, kvVersion string,
})
}
-func testVaultRaftCluster(tb testing.TB) *vault.TestCluster {
- conf := &vault.CoreConfig{
- CredentialBackends: defaultVaultCredentialBackends,
- AuditBackends: defaultVaultAuditBackends,
- LogicalBackends: defaultVaultLogicalBackends,
- BuiltinRegistry: builtinplugins.Registry,
- }
- opts := &vault.TestClusterOptions{
- HandlerFunc: vaulthttp.Handler,
- NumCores: 3,
- }
- teststorage.RaftBackendSetup(conf, opts)
- cluster := vault.NewTestCluster(tb, conf, opts)
- testhelpers.WaitForActiveNodeAndStandbys(tb, cluster)
- return cluster
-}
-
// testVaultServerUnseal creates a test vault cluster and returns a configured
// API client, list of unseal keys (as strings), and a closer function
// configured with the given plugin directory.
diff --git a/command/command_testonly/operator_usage_testonly_test.go b/command/command_testonly/operator_usage_testonly_test.go
index 74d67291fd1f..31de4b88eb15 100644
--- a/command/command_testonly/operator_usage_testonly_test.go
+++ b/command/command_testonly/operator_usage_testonly_test.go
@@ -53,7 +53,7 @@ func TestOperatorUsageCommandRun(t *testing.T) {
now := time.Now().UTC()
- _, _, _, err = clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewPreviousMonthData(1).
NewClientsSeen(6, clientcountutil.WithClientType("entity")).
NewClientsSeen(4, clientcountutil.WithClientType("non-entity-token")).
diff --git a/command/format.go b/command/format.go
index b83fad6366e5..548a9a089c85 100644
--- a/command/format.go
+++ b/command/format.go
@@ -357,10 +357,6 @@ func (t TableFormatter) OutputSealStatusStruct(ui cli.Ui, secret *api.Secret, da
out = append(out, fmt.Sprintf("Cluster ID | %s", status.ClusterID))
}
- if status.RemovedFromCluster != nil {
- out = append(out, fmt.Sprintf("Removed From Cluster | %t", *status.RemovedFromCluster))
- }
-
// Output if HCP link is configured
if status.HCPLinkStatus != "" {
out = append(out, fmt.Sprintf("HCP Link Status | %s", status.HCPLinkStatus))
diff --git a/command/main.go b/command/main.go
index 45762d559ac6..465ec5e6e864 100644
--- a/command/main.go
+++ b/command/main.go
@@ -186,8 +186,6 @@ func RunCustom(args []string, runOpts *RunOptions) int {
runOpts.Stderr = colorable.NewNonColorable(runOpts.Stderr)
}
- // This bytes.Buffer override of the uiErrWriter is why we don't see errors printed to the screen
- // when running commands with e.g. -output-curl-string
uiErrWriter := runOpts.Stderr
if outputCurlString || outputPolicy {
uiErrWriter = &bytes.Buffer{}
@@ -320,9 +318,6 @@ func generateCurlString(exitCode int, runOpts *RunOptions, preParsingErrBuf *byt
return 1
}
- // When we actually return from client.rawRequestWithContext(), this value should be set to the OutputStringError
- // that contains the data/context required to output the actual string, so it's doubtful this chunk of code will
- // ever run, but I'm guessing it's a defense in depth thing.
if api.LastOutputStringError == nil {
if exitCode == 127 {
// Usage, just pass it through
diff --git a/command/proxy/config/config.go b/command/proxy/config/config.go
index e0a9080cc38e..2f5f5b320181 100644
--- a/command/proxy/config/config.go
+++ b/command/proxy/config/config.go
@@ -117,6 +117,8 @@ type AutoAuth struct {
Method *Method `hcl:"-"`
Sinks []*Sink `hcl:"sinks"`
+ // NOTE: This is unsupported outside of testing and may disappear at any
+ // time.
EnableReauthOnNewCredentials bool `hcl:"enable_reauth_on_new_credentials"`
}
diff --git a/command/server.go b/command/server.go
index c07b1acc959b..8de060772971 100644
--- a/command/server.go
+++ b/command/server.go
@@ -119,7 +119,6 @@ type ServerCommand struct {
flagConfigs []string
flagRecovery bool
flagExperiments []string
- flagCLIDump string
flagDev bool
flagDevTLS bool
flagDevTLSCertDir string
@@ -222,13 +221,6 @@ func (c *ServerCommand) Flags() *FlagSets {
"Valid experiments are: " + strings.Join(experiments.ValidExperiments(), ", "),
})
- f.StringVar(&StringVar{
- Name: "pprof-dump-dir",
- Target: &c.flagCLIDump,
- Completion: complete.PredictDirs("*"),
- Usage: "Directory where generated profiles are created. If left unset, files are not generated.",
- })
-
f = set.NewFlagSet("Dev Options")
f.BoolVar(&BoolVar{
@@ -1597,11 +1589,6 @@ func (c *ServerCommand) Run(args []string) int {
coreShutdownDoneCh = core.ShutdownDone()
}
- cliDumpCh := make(chan struct{})
- if c.flagCLIDump != "" {
- go func() { cliDumpCh <- struct{}{} }()
- }
-
// Wait for shutdown
shutdownTriggered := false
retCode := 0
@@ -1716,8 +1703,8 @@ func (c *ServerCommand) Run(args []string) int {
// Notify systemd that the server has completed reloading config
c.notifySystemd(systemd.SdNotifyReady)
+
case <-c.SigUSR2Ch:
- c.logger.Info("Received SIGUSR2, dumping goroutines. This is expected behavior. Vault continues to run normally.")
logWriter := c.logger.StandardWriter(&hclog.StandardLoggerOptions{})
pprof.Lookup("goroutine").WriteTo(logWriter, 2)
@@ -1768,51 +1755,6 @@ func (c *ServerCommand) Run(args []string) int {
}
c.logger.Info(fmt.Sprintf("Wrote pprof files to: %s", pprofPath))
- case <-cliDumpCh:
- path := c.flagCLIDump
-
- if _, err := os.Stat(path); err != nil && !errors.Is(err, os.ErrNotExist) {
- c.logger.Error("Checking cli dump path failed", "error", err)
- continue
- }
-
- pprofPath := filepath.Join(path, "vault-pprof")
- err := os.MkdirAll(pprofPath, os.ModePerm)
- if err != nil {
- c.logger.Error("Could not create temporary directory for pprof", "error", err)
- continue
- }
-
- dumps := []string{"goroutine", "heap", "allocs", "threadcreate", "profile"}
- for _, dump := range dumps {
- pFile, err := os.Create(filepath.Join(pprofPath, dump))
- if err != nil {
- c.logger.Error("error creating pprof file", "name", dump, "error", err)
- break
- }
-
- if dump != "profile" {
- err = pprof.Lookup(dump).WriteTo(pFile, 0)
- if err != nil {
- c.logger.Error("error generating pprof data", "name", dump, "error", err)
- pFile.Close()
- break
- }
- } else {
- // CPU profiles need to run for a duration so we're going to run it
- // just for one second to avoid blocking here.
- if err := pprof.StartCPUProfile(pFile); err != nil {
- c.logger.Error("could not start CPU profile: ", err)
- pFile.Close()
- break
- }
- time.Sleep(time.Second * 1)
- pprof.StopCPUProfile()
- }
- pFile.Close()
- }
-
- c.logger.Info(fmt.Sprintf("Wrote startup pprof files to: %s", pprofPath))
}
}
// Notify systemd that the server is shutting down
diff --git a/command/status_test.go b/command/status_test.go
index 1eff8ff8b8ae..47a2803d66cb 100644
--- a/command/status_test.go
+++ b/command/status_test.go
@@ -4,15 +4,10 @@
package command
import (
- "errors"
- "fmt"
"strings"
"testing"
- "time"
"github.com/hashicorp/cli"
- "github.com/hashicorp/vault/helper/testhelpers"
- "github.com/stretchr/testify/require"
)
func testStatusCommand(tb testing.TB) (*cli.MockUi, *StatusCommand) {
@@ -26,41 +21,6 @@ func testStatusCommand(tb testing.TB) (*cli.MockUi, *StatusCommand) {
}
}
-// TestStatusCommand_RaftCluster creates a raft cluster and verifies that a
-// follower has "Removed From Cluster" returned as false in the status command.
-// The test then removes that follower, and checks that "Removed From Cluster"
-// is now true
-func TestStatusCommand_RaftCluster(t *testing.T) {
- t.Parallel()
- cluster := testVaultRaftCluster(t)
- defer cluster.Cleanup()
-
- toRemove := cluster.Cores[1]
- expectRemovedFromCluster := func(expectCode int, removed bool) {
- ui, cmd := testStatusCommand(t)
- cmd.client = toRemove.Client
- code := cmd.Run(nil)
- require.Equal(t, expectCode, code)
- combined := ui.OutputWriter.String() + ui.ErrorWriter.String()
- require.Regexp(t, fmt.Sprintf(".*Removed From Cluster\\s+%t.*", removed), combined)
- }
-
- expectRemovedFromCluster(0, false)
-
- _, err := cluster.Cores[0].Client.Logical().Write("sys/storage/raft/remove-peer",
- map[string]interface{}{
- "server_id": toRemove.NodeID,
- })
- require.NoError(t, err)
- testhelpers.RetryUntil(t, 10*time.Second, func() error {
- if !toRemove.Sealed() {
- return errors.New("core not sealed")
- }
- return nil
- })
- expectRemovedFromCluster(2, true)
-}
-
func TestStatusCommand_Run(t *testing.T) {
t.Parallel()
diff --git a/go.mod b/go.mod
index 7577062dfed0..60d5fddaaea8 100644
--- a/go.mod
+++ b/go.mod
@@ -10,7 +10,7 @@ module github.com/hashicorp/vault
// semantic related to Go module handling), this comment should be updated to explain that.
//
// Whenever this value gets updated, sdk/go.mod should be updated to the same value.
-go 1.23.3
+go 1.22.5
replace github.com/hashicorp/vault/api => ./api
@@ -50,7 +50,7 @@ require (
github.com/cockroachdb/cockroach-go/v2 v2.3.8
github.com/coreos/go-systemd v0.0.0-20191104093116-d3cd4ed1dbcf
github.com/denisenkom/go-mssqldb v0.12.3
- github.com/docker/docker v27.2.1+incompatible
+ github.com/docker/docker v26.1.5+incompatible
github.com/duosecurity/duo_api_golang v0.0.0-20190308151101-6c680f768e74
github.com/dustin/go-humanize v1.0.1
github.com/fatih/color v1.17.0
@@ -70,6 +70,7 @@ require (
github.com/google/go-cmp v0.6.0
github.com/google/go-github v17.0.0+incompatible
github.com/google/go-metrics-stackdriver v0.2.0
+ github.com/hashicorp-forge/bbolt v1.3.8-hc3
github.com/hashicorp/cap v0.7.0
github.com/hashicorp/cap/ldap v0.0.0-20240403125925-c0418810d10e
github.com/hashicorp/cli v1.1.6
@@ -151,7 +152,7 @@ require (
github.com/hashicorp/vault-plugin-secrets-mongodbatlas v0.13.0
github.com/hashicorp/vault-plugin-secrets-openldap v0.14.3
github.com/hashicorp/vault-plugin-secrets-terraform v0.10.0
- github.com/hashicorp/vault-testing-stepwise v0.3.2
+ github.com/hashicorp/vault-testing-stepwise v0.3.1
github.com/hashicorp/vault/api v1.15.0
github.com/hashicorp/vault/api/auth/approle v0.1.0
github.com/hashicorp/vault/api/auth/userpass v0.1.0
@@ -192,12 +193,12 @@ require (
github.com/robfig/cron/v3 v3.0.1
github.com/ryanuber/columnize v2.1.2+incompatible
github.com/ryanuber/go-glob v1.0.0
- github.com/sasha-s/go-deadlock v0.3.5
+ github.com/sasha-s/go-deadlock v0.2.0
github.com/sethvargo/go-limiter v0.7.1
github.com/shirou/gopsutil/v3 v3.22.6
github.com/stretchr/testify v1.9.0
github.com/tink-crypto/tink-go/v2 v2.2.0
- go.etcd.io/bbolt v1.4.0-beta.0
+ go.etcd.io/bbolt v1.3.10
go.etcd.io/etcd/client/pkg/v3 v3.5.13
go.etcd.io/etcd/client/v2 v2.305.5
go.etcd.io/etcd/client/v3 v3.5.13
@@ -213,7 +214,7 @@ require (
golang.org/x/net v0.29.0
golang.org/x/oauth2 v0.23.0
golang.org/x/sync v0.8.0
- golang.org/x/sys v0.27.0
+ golang.org/x/sys v0.25.0
golang.org/x/term v0.24.0
golang.org/x/text v0.18.0
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d
@@ -230,14 +231,11 @@ require (
require (
cel.dev/expr v0.15.0 // indirect
cloud.google.com/go/longrunning v0.6.0 // indirect
- github.com/fsnotify/fsnotify v1.6.0 // indirect
+ github.com/containerd/containerd v1.7.20 // indirect
github.com/fxamacker/cbor/v2 v2.7.0 // indirect
- github.com/go-viper/mapstructure/v2 v2.1.0 // indirect
- github.com/grpc-ecosystem/grpc-gateway/v2 v2.16.0 // indirect
github.com/hashicorp/go-secure-stdlib/httputil v0.1.0 // indirect
github.com/mitchellh/go-testing-interface v1.14.1 // indirect
github.com/moby/docker-image-spec v1.3.1 // indirect
- github.com/moby/sys/userns v0.1.0 // indirect
github.com/planetscale/vtprotobuf v0.6.1-0.20240319094008-0393e58bdf10 // indirect
github.com/x448/float16 v0.8.4 // indirect
golang.org/x/xerrors v0.0.0-20240716161551-93cc26a95ae9 // indirect
@@ -342,7 +340,7 @@ require (
github.com/digitalocean/godo v1.7.5 // indirect
github.com/dimchansky/utfbom v1.1.1 // indirect
github.com/distribution/reference v0.6.0 // indirect
- github.com/docker/cli v27.2.1+incompatible // indirect
+ github.com/docker/cli v26.1.5+incompatible // indirect
github.com/docker/go-connections v0.5.0 // indirect
github.com/docker/go-units v0.5.0 // indirect
github.com/dvsekhvalnov/jose2go v1.6.0 // indirect
@@ -400,7 +398,7 @@ require (
github.com/hashicorp/go-immutable-radix v1.3.1 // indirect
github.com/hashicorp/go-msgpack/v2 v2.1.2 // indirect
github.com/hashicorp/go-secure-stdlib/fileutil v0.1.0 // indirect
- github.com/hashicorp/go-secure-stdlib/plugincontainer v0.4.1 // indirect
+ github.com/hashicorp/go-secure-stdlib/plugincontainer v0.4.0 // indirect
github.com/hashicorp/go-slug v0.15.2 // indirect
github.com/hashicorp/go-tfe v1.64.2 // indirect
github.com/hashicorp/jsonapi v1.3.1 // indirect
@@ -456,7 +454,7 @@ require (
github.com/mitchellh/pointerstructure v1.2.1 // indirect
github.com/moby/patternmatcher v0.5.0 // indirect
github.com/moby/sys/sequential v0.5.0 // indirect
- github.com/moby/sys/user v0.3.0 // indirect
+ github.com/moby/sys/user v0.2.0 // indirect
github.com/moby/term v0.5.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
@@ -474,7 +472,7 @@ require (
github.com/oracle/oci-go-sdk/v59 v59.0.0 // indirect
github.com/oracle/oci-go-sdk/v60 v60.0.0 // indirect
github.com/packethost/packngo v0.1.1-0.20180711074735-b9cb5096f54c // indirect
- github.com/petermattis/goid v0.0.0-20240813172612-4fcff4a6cae7 // indirect
+ github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 // indirect
github.com/pierrec/lz4 v2.6.1+incompatible // indirect
github.com/pierrec/lz4/v4 v4.1.18 // indirect
github.com/pjbgf/sha1cd v0.3.0 // indirect
diff --git a/go.sum b/go.sum
index c6657c72572f..540b38a0b789 100644
--- a/go.sum
+++ b/go.sum
@@ -742,6 +742,8 @@ github.com/Masterminds/sprig/v3 v3.2.3/go.mod h1:rXcFaZ2zZbLRJv/xSysmlgIM1u11eBa
github.com/Microsoft/go-winio v0.5.2/go.mod h1:WpS1mjBmmwHBEWmogvA2mj8546UReBk4v8QkMxJ6pZY=
github.com/Microsoft/go-winio v0.6.2 h1:F2VQgta7ecxGYO8k3ZZz3RS8fVIXVxONVUPlNERoyfY=
github.com/Microsoft/go-winio v0.6.2/go.mod h1:yd8OoFMLzJbo9gZq8j5qaps8bJ9aShtEA8Ipt1oGCvU=
+github.com/Microsoft/hcsshim v0.11.7 h1:vl/nj3Bar/CvJSYo7gIQPyRWc9f3c6IeSNavBTSZNZQ=
+github.com/Microsoft/hcsshim v0.11.7/go.mod h1:MV8xMfmECjl5HdO7U/3/hFVnkmSBjAjmA09d4bExKcU=
github.com/NYTimes/gziphandler v0.0.0-20170623195520-56545f4a5d46/go.mod h1:3wb06e3pkSAbeQ52E9H9iFoQsEEwGN64994WTCIhntQ=
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5 h1:TngWCqHvy9oXAN6lEVMRuU21PR1EtLVZJmdB18Gu3Rw=
github.com/Nvveen/Gotty v0.0.0-20120604004816-cd527374f1e5/go.mod h1:lmUJ/7eu/Q8D7ML55dXQrVaamCz2vxCfdQBasLZfHKk=
@@ -923,6 +925,8 @@ github.com/cockroachdb/cockroach-go/v2 v2.3.8 h1:53yoUo4+EtrC1NrAEgnnad4AS3ntNvG
github.com/cockroachdb/cockroach-go/v2 v2.3.8/go.mod h1:9uH5jK4yQ3ZQUT9IXe4I2fHzMIF5+JC/oOdzTRgJYJk=
github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0 h1:sDMmm+q/3+BukdIpxwO365v/Rbspp2Nt5XntgQRXq8Q=
github.com/codegangsta/inject v0.0.0-20150114235600-33e0aa1cb7c0/go.mod h1:4Zcjuz89kmFXt9morQgcfYZAYZ5n8WHjt81YYWIwtTM=
+github.com/containerd/containerd v1.7.20 h1:Sl6jQYk3TRavaU83h66QMbI2Nqg9Jm6qzwX57Vsn1SQ=
+github.com/containerd/containerd v1.7.20/go.mod h1:52GsS5CwquuqPuLncsXwG0t2CiUce+KsNHJZQJvAgR0=
github.com/containerd/continuity v0.4.3 h1:6HVkalIp+2u1ZLH1J/pYX2oBVXlJZvh1X1A7bEZ9Su8=
github.com/containerd/continuity v0.4.3/go.mod h1:F6PTNCKepoxEaXLQp3wDAjygEnImnZ/7o4JzpodfroQ=
github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
@@ -991,10 +995,10 @@ github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5
github.com/dnaeon/go-vcr v1.0.1/go.mod h1:aBB1+wY4s93YsC3HHjMBMrwTj2R9FHDzUr9KyGc8n1E=
github.com/dnaeon/go-vcr v1.2.0 h1:zHCHvJYTMh1N7xnV7zf1m1GPBF9Ad0Jk/whtQ1663qI=
github.com/dnaeon/go-vcr v1.2.0/go.mod h1:R4UdLID7HZT3taECzJs4YgbbH6PIGXB6W/sc5OLb6RQ=
-github.com/docker/cli v27.2.1+incompatible h1:U5BPtiD0viUzjGAjV1p0MGB8eVA3L3cbIrnyWmSJI70=
-github.com/docker/cli v27.2.1+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
-github.com/docker/docker v27.2.1+incompatible h1:fQdiLfW7VLscyoeYEBz7/J8soYFDZV1u6VW6gJEjNMI=
-github.com/docker/docker v27.2.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
+github.com/docker/cli v26.1.5+incompatible h1:NxXGSdz2N+Ibdaw330TDO3d/6/f7MvHuiMbuFaIQDTk=
+github.com/docker/cli v26.1.5+incompatible/go.mod h1:JLrzqnKDaYBop7H2jaqPtU4hHvMKP+vjCwu2uszcLI8=
+github.com/docker/docker v26.1.5+incompatible h1:NEAxTwEjxV6VbBMBoGG3zPqbiJosIApZjxlbrG9q3/g=
+github.com/docker/docker v26.1.5+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/go-connections v0.5.0 h1:USnMq7hx7gwdVZq1L49hLXaFtUdTADjXGp+uj1Br63c=
github.com/docker/go-connections v0.5.0/go.mod h1:ov60Kzw0kKElRwhNs9UlUHAE/F9Fe6GLaXnqyDdmEXc=
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
@@ -1159,8 +1163,6 @@ github.com/go-task/slim-sprig/v3 v3.0.0/go.mod h1:W848ghGpv3Qj3dhTPRyJypKRiqCdHZ
github.com/go-test/deep v1.0.2/go.mod h1:wGDj63lr65AM2AQyKZd/NYHGb0R+1RLqB8NKt3aSFNA=
github.com/go-test/deep v1.1.1 h1:0r/53hagsehfO4bzD2Pgr/+RgHqhmf+k1Bpse2cTu1U=
github.com/go-test/deep v1.1.1/go.mod h1:5C2ZWiW0ErCdrYzpqxLbTX7MG14M9iiw8DgHncVwcsE=
-github.com/go-viper/mapstructure/v2 v2.1.0 h1:gHnMa2Y/pIxElCH2GlZZ1lZSsn6XMtufpGyP1XxdC/w=
-github.com/go-viper/mapstructure/v2 v2.1.0/go.mod h1:oJDH3BJKyqBA2TXFhDsKDGDTlndYOZ6rGS0BRZIxGhM=
github.com/go-zookeeper/zk v1.0.3 h1:7M2kwOsc//9VeeFiPtf+uSJlVpU66x9Ba5+8XK7/TDg=
github.com/go-zookeeper/zk v1.0.3/go.mod h1:nOB03cncLtlp4t+UAkGSV+9beXP/akpekBwL+UX1Qcw=
github.com/goccy/go-json v0.9.11/go.mod h1:6MelG93GURQebXPDq3khkgXZkazVtN9CRI+MGFi0w8I=
@@ -1355,6 +1357,7 @@ github.com/grpc-ecosystem/go-grpc-middleware v1.4.0 h1:UH//fgunKIs4JdUbpDl1VZCDa
github.com/grpc-ecosystem/go-grpc-middleware v1.4.0/go.mod h1:g5qyo/la0ALbONm6Vbp88Yd8NsDy6rZz+RcrMPxvld8=
github.com/grpc-ecosystem/go-grpc-prometheus v1.2.0/go.mod h1:8NvIoxWQoOIhqOTXgfV/d3M/q6VIi02HzZEHgUlZvzk=
github.com/grpc-ecosystem/grpc-gateway v1.9.0/go.mod h1:vNeuVxBJEsws4ogUvrchl83t/GYV9WGTSLVdBhOQFDY=
+github.com/grpc-ecosystem/grpc-gateway v1.16.0 h1:gmcG1KaJ57LophUzW0Hy8NmPhnMZb4M0+kPpLofRdBo=
github.com/grpc-ecosystem/grpc-gateway v1.16.0/go.mod h1:BDjrQk3hbvj6Nolgz8mAMFbcEtjT1g+wF4CSlocrBnw=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.7.0/go.mod h1:hgWBS7lorOAVIJEQMi4ZsPv9hVvWI6+ch50m39Pf2Ks=
github.com/grpc-ecosystem/grpc-gateway/v2 v2.11.3/go.mod h1:o//XUCC/F+yRGJoPO/VU0GSB0f8Nhgmxx0VIRUvaC0w=
@@ -1364,6 +1367,8 @@ github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c h1:6rhixN/i8
github.com/gsterjov/go-libsecret v0.0.0-20161001094733-a6f4afe4910c/go.mod h1:NMPJylDgVpX0MLRlPy15sqSwOFv/U1GZ2m21JhFfek0=
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed h1:5upAirOpQc1Q53c0bnx2ufif5kANL7bfZWcc6VJWJd8=
github.com/hailocab/go-hostpool v0.0.0-20160125115350-e80d13ce29ed/go.mod h1:tMWxXQ9wFIaZeTI9F+hmhFiGpFmhOHzyShyFUhRm0H4=
+github.com/hashicorp-forge/bbolt v1.3.8-hc3 h1:iTWR3RDPj0TGChAvJ8QjHFcNFWAUVgNQV73IE6gAX4E=
+github.com/hashicorp-forge/bbolt v1.3.8-hc3/go.mod h1:sQBu5UIJ+rcUFU4Fo9rpTHNV935jwmGWS3dQ/MV8810=
github.com/hashicorp/cap v0.7.0 h1:atLIEU5lJslYXo1qsv7RtUL1HrJVVxnfkErIT3uxLp0=
github.com/hashicorp/cap v0.7.0/go.mod h1:UynhCoGX3pxL0OfVrfMzPWAyjMYp96bk11BNTf2zt8o=
github.com/hashicorp/cap/ldap v0.0.0-20240403125925-c0418810d10e h1:IakB/NhT0YtMEGqAf2tViMdBABC2cMAZn3O/mVeg2j4=
@@ -1467,8 +1472,8 @@ github.com/hashicorp/go-secure-stdlib/parseutil v0.1.8 h1:iBt4Ew4XEGLfh6/bPk4rSY
github.com/hashicorp/go-secure-stdlib/parseutil v0.1.8/go.mod h1:aiJI+PIApBRQG7FZTEBx5GiiX+HbOHilUdNxUZi4eV0=
github.com/hashicorp/go-secure-stdlib/password v0.1.1 h1:6JzmBqXprakgFEHwBgdchsjaA9x3GyjdI568bXKxa60=
github.com/hashicorp/go-secure-stdlib/password v0.1.1/go.mod h1:9hH302QllNwu1o2TGYtSk8I8kTAN0ca1EHpwhm5Mmzo=
-github.com/hashicorp/go-secure-stdlib/plugincontainer v0.4.1 h1:JY+zGg8gOmslwif1fiCqT5Hu1SikLZQcHkmQhCoA9gY=
-github.com/hashicorp/go-secure-stdlib/plugincontainer v0.4.1/go.mod h1:jW3KCTvdPyAdVecOUwiiO2XaYgUJ/isigt++ISkszkY=
+github.com/hashicorp/go-secure-stdlib/plugincontainer v0.4.0 h1:7Yran48kl6X7jfUg3sfYDrFot1gD3LvzdC3oPu5l/qo=
+github.com/hashicorp/go-secure-stdlib/plugincontainer v0.4.0/go.mod h1:9WJFu7L3d+Z4ViZmwUf+6/73/Uy7YMY1NXrB9wdElYE=
github.com/hashicorp/go-secure-stdlib/reloadutil v0.1.1 h1:SMGUnbpAcat8rIKHkBPjfv81yC46a8eCNZ2hsR2l1EI=
github.com/hashicorp/go-secure-stdlib/reloadutil v0.1.1/go.mod h1:Ch/bf00Qnx77MZd49JRgHYqHQjtEmTgGU2faufpVZb0=
github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 h1:kes8mmyCpxJsI7FTwtzRqEy9CdjCtrXrXGuOpxEA7Ts=
@@ -1592,8 +1597,8 @@ github.com/hashicorp/vault-plugin-secrets-openldap v0.14.3 h1:HY8q7qVmhtBYiNa5K2
github.com/hashicorp/vault-plugin-secrets-openldap v0.14.3/go.mod h1:wqOf/QJqrrNXjnm0eLUnm5Ju9s/LIZUl6wEKmnFL9Uo=
github.com/hashicorp/vault-plugin-secrets-terraform v0.10.0 h1:YzOJrpuDRNrw5SQ4i7IEjedF40I/7ejupQy+gAyQ6Zg=
github.com/hashicorp/vault-plugin-secrets-terraform v0.10.0/go.mod h1:j2nbB//xAQMD+5JivVDalwDEyzJY3AWzKIkw6k65xJQ=
-github.com/hashicorp/vault-testing-stepwise v0.3.2 h1:FCe0yrbK/hHiHqzu7utLcvCTTKjghWHyXwOQ2lxfoQM=
-github.com/hashicorp/vault-testing-stepwise v0.3.2/go.mod h1:aI3k4Nu6TjBKxatj8plXKn8LhA9qb2TeeJyz2psHXEw=
+github.com/hashicorp/vault-testing-stepwise v0.3.1 h1:SqItnMWOOknQfJJR49Fps34ZfBMWSqBFFTx6NoTHzNw=
+github.com/hashicorp/vault-testing-stepwise v0.3.1/go.mod h1:BK7TOCyZ7idR7txAlPGEu+9ETJzlQsYQNdabSv3lyYY=
github.com/hashicorp/vault/vault/hcp_link/proto v0.0.0-20230201201504-b741fa893d77 h1:Y/+BtwxmRak3Us9jrByARvYW6uNeqZlEpMylIdXVIjY=
github.com/hashicorp/vault/vault/hcp_link/proto v0.0.0-20230201201504-b741fa893d77/go.mod h1:a2crHoMWwY6aiL8GWT8hYj7vKD64uX0EdRPbnsHF5wU=
github.com/hashicorp/vic v1.5.1-0.20190403131502-bbfe86ec9443 h1:O/pT5C1Q3mVXMyuqg7yuAWUg/jMZR1/0QTzTRdNR6Uw=
@@ -1866,10 +1871,8 @@ github.com/moby/patternmatcher v0.5.0 h1:YCZgJOeULcxLw1Q+sVR636pmS7sPEn1Qo2iAN6M
github.com/moby/patternmatcher v0.5.0/go.mod h1:hDPoyOpDY7OrrMDLaYoY3hf52gNCR/YOUYxkhApJIxc=
github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5lXtc=
github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo=
-github.com/moby/sys/user v0.3.0 h1:9ni5DlcW5an3SvRSx4MouotOygvzaXbaSrc/wGDFWPo=
-github.com/moby/sys/user v0.3.0/go.mod h1:bG+tYYYJgaMtRKgEmuueC0hJEAZWwtIbZTB+85uoHjs=
-github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g=
-github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28=
+github.com/moby/sys/user v0.2.0 h1:OnpapJsRp25vkhw8TFG6OLJODNh/3rEwRWtJ3kakwRM=
+github.com/moby/sys/user v0.2.0/go.mod h1:RYstrcWOJpVh+6qzUqp2bU3eaRpdiQeKGlKitaH0PM8=
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@@ -1957,8 +1960,8 @@ github.com/patrickmn/go-cache v2.1.0+incompatible h1:HRMgzkcYKYpi3C8ajMPV8OFXaaR
github.com/patrickmn/go-cache v2.1.0+incompatible/go.mod h1:3Qf8kWWT7OJRJbdiICTKqZju1ZixQ/KpMGzzAfe6+WQ=
github.com/pelletier/go-toml v1.2.0/go.mod h1:5z9KED0ma1S8pY6P1sdut58dfprrGBbd/94hg7ilaic=
github.com/peterbourgon/diskv v2.0.1+incompatible/go.mod h1:uqqh8zWWbv1HBMNONnaR/tNboyR3/BZd58JJSHlUSCU=
-github.com/petermattis/goid v0.0.0-20240813172612-4fcff4a6cae7 h1:Dx7Ovyv/SFnMFw3fD4oEoeorXc6saIiQ23LrGLth0Gw=
-github.com/petermattis/goid v0.0.0-20240813172612-4fcff4a6cae7/go.mod h1:pxMtw7cyUw6B2bRH0ZBANSPg+AoSud1I1iyJHI69jH4=
+github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 h1:q2e307iGHPdTGp0hoxKjt1H5pDo6utceo3dQVK3I5XQ=
+github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5/go.mod h1:jvVRKCrJTQWu0XVbaOlby/2lO20uSCHEMzzplHXte1o=
github.com/phpdave11/gofpdf v1.4.2/go.mod h1:zpO6xFn9yxo3YLyMvW8HcKWVdbNqgIfOOp2dXMnm1mY=
github.com/phpdave11/gofpdi v1.0.12/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
github.com/phpdave11/gofpdi v1.0.13/go.mod h1:vBmVV0Do6hSBHC8uKUQ71JGW+ZGQq74llk/7bXwjDoI=
@@ -2048,8 +2051,8 @@ github.com/ryanuber/columnize v2.1.2+incompatible h1:C89EOx/XBWwIXl8wm8OPJBd7kPF
github.com/ryanuber/columnize v2.1.2+incompatible/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
github.com/ryanuber/go-glob v1.0.0 h1:iQh3xXAumdQ+4Ufa5b25cRpC5TYKlno6hsv6Cb3pkBk=
github.com/ryanuber/go-glob v1.0.0/go.mod h1:807d1WSdnB0XRJzKNil9Om6lcp/3a0v4qIHxIXzX/Yc=
-github.com/sasha-s/go-deadlock v0.3.5 h1:tNCOEEDG6tBqrNDOX35j/7hL5FcFViG6awUGROb2NsU=
-github.com/sasha-s/go-deadlock v0.3.5/go.mod h1:bugP6EGbdGYObIlx7pUZtWqlvo8k9H6vCBBsiChJQ5U=
+github.com/sasha-s/go-deadlock v0.2.0 h1:lMqc+fUb7RrFS3gQLtoQsJ7/6TV/pAIFvBsqX73DK8Y=
+github.com/sasha-s/go-deadlock v0.2.0/go.mod h1:StQn567HiB1fF2yJ44N9au7wOhrPS3iZqiDbRupzT10=
github.com/satori/go.uuid v1.2.0 h1:0uYX9dsZ2yD7q2RtLRtPSdGDWzjeM3TbMJP9utgA0ww=
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
github.com/sean-/conswriter v0.0.0-20180208195008-f5ae3917a627/go.mod h1:7zjs06qF79/FKAJpBvFx3P8Ww4UTIMAe+lpNXDHziac=
@@ -2202,8 +2205,8 @@ github.com/zeebo/xxh3 v1.0.2 h1:xZmwmqxHZA8AI603jOQ0tMqmBr9lPeFwGg6d+xy9DC0=
github.com/zeebo/xxh3 v1.0.2/go.mod h1:5NWz9Sef7zIDm2JHfFlcQvNekmcEl9ekUZQQKCYaDcA=
github.com/zenazn/goji v0.9.0/go.mod h1:7S9M489iMyHBNxwZnk9/EHS098H4/F6TATF2mIxtB1Q=
go.etcd.io/bbolt v1.3.2/go.mod h1:IbVyRI1SCnLcuJnV2u8VeU0CEYM7e686BmAb1XKL+uU=
-go.etcd.io/bbolt v1.4.0-beta.0 h1:U7Y9yH6ZojEo5/BDFMXDXD1RNx9L7iKxudzqR68jLaM=
-go.etcd.io/bbolt v1.4.0-beta.0/go.mod h1:Qv5yHB6jkQESXT/uVfxJgUPMqgAyhL0GLxcQaz9bSec=
+go.etcd.io/bbolt v1.3.10 h1:+BqfJTcCzTItrop8mq/lbzL8wSGtj94UO/3U31shqG0=
+go.etcd.io/bbolt v1.3.10/go.mod h1:bK3UQLPJZly7IlNmV7uVHJDxfe5aK9Ll93e/74Y9oEQ=
go.etcd.io/etcd/api/v3 v3.5.5/go.mod h1:KFtNaxGDw4Yx/BA4iPPwevUTAuqcsPxzyX8PHydchN8=
go.etcd.io/etcd/api/v3 v3.5.13 h1:8WXU2/NBge6AUF1K1gOexB6e07NgsN1hXK0rSTtgSp4=
go.etcd.io/etcd/api/v3 v3.5.13/go.mod h1:gBqlqkcMMZMVTMm4NDZloEVJzxQOQIls8splbqBDa0c=
@@ -2235,8 +2238,8 @@ go.opentelemetry.io/otel v1.30.0 h1:F2t8sK4qf1fAmY9ua4ohFS/K+FUuOPemHUIXHtktrts=
go.opentelemetry.io/otel v1.30.0/go.mod h1:tFw4Br9b7fOS+uEao81PJjVMjW/5fvNCbpsDIXqP0pc=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.30.0 h1:lsInsfvhVIfOI6qHVyysXMNDnjO9Npvl7tlDPJFBVd4=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.30.0/go.mod h1:KQsVNh4OjgjTG0G6EiNi1jVpnaeeKsKMRwbLN+f1+8M=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0 h1:umZgi92IyxfXd/l4kaDhnKgY8rnN/cZcF1LKc6I8OQ8=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0/go.mod h1:4lVs6obhSVRb1EW5FhOuBTyiQhtRtAnnva9vD3yRfq8=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.29.0 h1:JAv0Jwtl01UFiyWZEMiJZBiTlv5A50zNs8lsthXqIio=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.29.0/go.mod h1:QNKLmUEAq2QUbPQUfvw4fmv0bgbK7UlOSFCnXyfvSNc=
go.opentelemetry.io/otel/metric v1.30.0 h1:4xNulvn9gjzo4hjg+wzIKG7iNFEaBMX00Qd4QIZs7+w=
go.opentelemetry.io/otel/metric v1.30.0/go.mod h1:aXTfST94tswhWEb+5QjlSqG+cZlmyXy/u8jFpor3WqQ=
go.opentelemetry.io/otel/sdk v1.30.0 h1:cHdik6irO49R5IysVhdn8oaiR9m8XluDaJAs4DfOrYE=
@@ -2608,7 +2611,6 @@ golang.org/x/sys v0.0.0-20220722155257-8c9f86f7a55f/go.mod h1:oPkhp1MJrh7nUepCBc
golang.org/x/sys v0.0.0-20220728004956-3c1f35247d10/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220811171246-fbc7d0a398ab/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.0.0-20220829200755-d48e67d00261/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.0.0-20220908164124-27713097b956/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.1.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.2.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.3.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
@@ -2620,8 +2622,8 @@ golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.17.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/sys v0.18.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
-golang.org/x/sys v0.27.0 h1:wBqf8DvsY9Y/2P8gAfPDEYNuS30J4lPHJxXSb/nJZ+s=
-golang.org/x/sys v0.27.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
+golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
diff --git a/helper/dhutil/dhutil.go b/helper/dhutil/dhutil.go
index 127bb4b02504..7c14cb29fbca 100644
--- a/helper/dhutil/dhutil.go
+++ b/helper/dhutil/dhutil.go
@@ -30,17 +30,14 @@ type Envelope struct {
// generatePublicPrivateKey uses curve25519 to generate a public and private key
// pair.
func GeneratePublicPrivateKey() ([]byte, []byte, error) {
- scalar := make([]byte, 32)
+ var scalar, public [32]byte
- if _, err := io.ReadFull(rand.Reader, scalar); err != nil {
+ if _, err := io.ReadFull(rand.Reader, scalar[:]); err != nil {
return nil, nil, err
}
- public, err := curve25519.X25519(scalar, curve25519.Basepoint)
- if err != nil {
- return nil, nil, err
- }
- return public, scalar, nil
+ curve25519.ScalarBaseMult(&public, &scalar)
+ return public[:], scalar[:], nil
}
// GenerateSharedSecret uses the private key and the other party's public key to
diff --git a/helper/forwarding/types.pb.go b/helper/forwarding/types.pb.go
index f3b1d9f60da0..66b6decb991a 100644
--- a/helper/forwarding/types.pb.go
+++ b/helper/forwarding/types.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: helper/forwarding/types.proto
@@ -42,9 +42,11 @@ type Request struct {
func (x *Request) Reset() {
*x = Request{}
- mi := &file_helper_forwarding_types_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_forwarding_types_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Request) String() string {
@@ -55,7 +57,7 @@ func (*Request) ProtoMessage() {}
func (x *Request) ProtoReflect() protoreflect.Message {
mi := &file_helper_forwarding_types_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -140,9 +142,11 @@ type URL struct {
func (x *URL) Reset() {
*x = URL{}
- mi := &file_helper_forwarding_types_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_forwarding_types_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *URL) String() string {
@@ -153,7 +157,7 @@ func (*URL) ProtoMessage() {}
func (x *URL) ProtoReflect() protoreflect.Message {
mi := &file_helper_forwarding_types_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -227,9 +231,11 @@ type HeaderEntry struct {
func (x *HeaderEntry) Reset() {
*x = HeaderEntry{}
- mi := &file_helper_forwarding_types_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_forwarding_types_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *HeaderEntry) String() string {
@@ -240,7 +246,7 @@ func (*HeaderEntry) ProtoMessage() {}
func (x *HeaderEntry) ProtoReflect() protoreflect.Message {
mi := &file_helper_forwarding_types_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -280,9 +286,11 @@ type Response struct {
func (x *Response) Reset() {
*x = Response{}
- mi := &file_helper_forwarding_types_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_forwarding_types_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Response) String() string {
@@ -293,7 +301,7 @@ func (*Response) ProtoMessage() {}
func (x *Response) ProtoReflect() protoreflect.Message {
mi := &file_helper_forwarding_types_proto_msgTypes[3]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -440,6 +448,56 @@ func file_helper_forwarding_types_proto_init() {
if File_helper_forwarding_types_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_helper_forwarding_types_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*Request); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_forwarding_types_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*URL); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_forwarding_types_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*HeaderEntry); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_forwarding_types_proto_msgTypes[3].Exporter = func(v any, i int) any {
+ switch v := v.(*Response); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/helper/identity/mfa/types.pb.go b/helper/identity/mfa/types.pb.go
index 5d8a57367d39..55c992fc8a09 100644
--- a/helper/identity/mfa/types.pb.go
+++ b/helper/identity/mfa/types.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: helper/identity/mfa/types.proto
@@ -57,9 +57,11 @@ type Config struct {
func (x *Config) Reset() {
*x = Config{}
- mi := &file_helper_identity_mfa_types_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_mfa_types_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Config) String() string {
@@ -70,7 +72,7 @@ func (*Config) ProtoMessage() {}
func (x *Config) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_mfa_types_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -220,9 +222,11 @@ type TOTPConfig struct {
func (x *TOTPConfig) Reset() {
*x = TOTPConfig{}
- mi := &file_helper_identity_mfa_types_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_mfa_types_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *TOTPConfig) String() string {
@@ -233,7 +237,7 @@ func (*TOTPConfig) ProtoMessage() {}
func (x *TOTPConfig) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_mfa_types_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -325,9 +329,11 @@ type DuoConfig struct {
func (x *DuoConfig) Reset() {
*x = DuoConfig{}
- mi := &file_helper_identity_mfa_types_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_mfa_types_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *DuoConfig) String() string {
@@ -338,7 +344,7 @@ func (*DuoConfig) ProtoMessage() {}
func (x *DuoConfig) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_mfa_types_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -409,9 +415,11 @@ type OktaConfig struct {
func (x *OktaConfig) Reset() {
*x = OktaConfig{}
- mi := &file_helper_identity_mfa_types_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_mfa_types_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *OktaConfig) String() string {
@@ -422,7 +430,7 @@ func (*OktaConfig) ProtoMessage() {}
func (x *OktaConfig) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_mfa_types_proto_msgTypes[3]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -496,9 +504,11 @@ type PingIDConfig struct {
func (x *PingIDConfig) Reset() {
*x = PingIDConfig{}
- mi := &file_helper_identity_mfa_types_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_mfa_types_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *PingIDConfig) String() string {
@@ -509,7 +519,7 @@ func (*PingIDConfig) ProtoMessage() {}
func (x *PingIDConfig) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_mfa_types_proto_msgTypes[4]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -590,9 +600,11 @@ type Secret struct {
func (x *Secret) Reset() {
*x = Secret{}
- mi := &file_helper_identity_mfa_types_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_mfa_types_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Secret) String() string {
@@ -603,7 +615,7 @@ func (*Secret) ProtoMessage() {}
func (x *Secret) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_mfa_types_proto_msgTypes[5]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -679,9 +691,11 @@ type TOTPSecret struct {
func (x *TOTPSecret) Reset() {
*x = TOTPSecret{}
- mi := &file_helper_identity_mfa_types_proto_msgTypes[6]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_mfa_types_proto_msgTypes[6]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *TOTPSecret) String() string {
@@ -692,7 +706,7 @@ func (*TOTPSecret) ProtoMessage() {}
func (x *TOTPSecret) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_mfa_types_proto_msgTypes[6]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -782,9 +796,11 @@ type MFAEnforcementConfig struct {
func (x *MFAEnforcementConfig) Reset() {
*x = MFAEnforcementConfig{}
- mi := &file_helper_identity_mfa_types_proto_msgTypes[7]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_mfa_types_proto_msgTypes[7]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *MFAEnforcementConfig) String() string {
@@ -795,7 +811,7 @@ func (*MFAEnforcementConfig) ProtoMessage() {}
func (x *MFAEnforcementConfig) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_mfa_types_proto_msgTypes[7]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1036,6 +1052,104 @@ func file_helper_identity_mfa_types_proto_init() {
if File_helper_identity_mfa_types_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_helper_identity_mfa_types_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*Config); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_identity_mfa_types_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*TOTPConfig); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_identity_mfa_types_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*DuoConfig); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_identity_mfa_types_proto_msgTypes[3].Exporter = func(v any, i int) any {
+ switch v := v.(*OktaConfig); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_identity_mfa_types_proto_msgTypes[4].Exporter = func(v any, i int) any {
+ switch v := v.(*PingIDConfig); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_identity_mfa_types_proto_msgTypes[5].Exporter = func(v any, i int) any {
+ switch v := v.(*Secret); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_identity_mfa_types_proto_msgTypes[6].Exporter = func(v any, i int) any {
+ switch v := v.(*TOTPSecret); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_identity_mfa_types_proto_msgTypes[7].Exporter = func(v any, i int) any {
+ switch v := v.(*MFAEnforcementConfig); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
file_helper_identity_mfa_types_proto_msgTypes[0].OneofWrappers = []any{
(*Config_TOTPConfig)(nil),
(*Config_OktaConfig)(nil),
diff --git a/helper/identity/types.pb.go b/helper/identity/types.pb.go
index a3055992823f..fbdcb636735d 100644
--- a/helper/identity/types.pb.go
+++ b/helper/identity/types.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: helper/identity/types.proto
@@ -86,9 +86,11 @@ type Group struct {
func (x *Group) Reset() {
*x = Group{}
- mi := &file_helper_identity_types_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_types_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Group) String() string {
@@ -99,7 +101,7 @@ func (*Group) ProtoMessage() {}
func (x *Group) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_types_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -217,9 +219,11 @@ type LocalAliases struct {
func (x *LocalAliases) Reset() {
*x = LocalAliases{}
- mi := &file_helper_identity_types_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_types_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *LocalAliases) String() string {
@@ -230,7 +234,7 @@ func (*LocalAliases) ProtoMessage() {}
func (x *LocalAliases) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_types_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -321,9 +325,11 @@ type Entity struct {
func (x *Entity) Reset() {
*x = Entity{}
- mi := &file_helper_identity_types_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_types_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Entity) String() string {
@@ -334,7 +340,7 @@ func (*Entity) ProtoMessage() {}
func (x *Entity) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_types_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -504,9 +510,11 @@ type Alias struct {
func (x *Alias) Reset() {
*x = Alias{}
- mi := &file_helper_identity_types_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_types_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Alias) String() string {
@@ -517,7 +525,7 @@ func (*Alias) ProtoMessage() {}
func (x *Alias) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_types_proto_msgTypes[3]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -650,9 +658,11 @@ type EntityStorageEntry struct {
func (x *EntityStorageEntry) Reset() {
*x = EntityStorageEntry{}
- mi := &file_helper_identity_types_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_types_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *EntityStorageEntry) String() string {
@@ -663,7 +673,7 @@ func (*EntityStorageEntry) ProtoMessage() {}
func (x *EntityStorageEntry) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_types_proto_msgTypes[4]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -768,9 +778,11 @@ type PersonaIndexEntry struct {
func (x *PersonaIndexEntry) Reset() {
*x = PersonaIndexEntry{}
- mi := &file_helper_identity_types_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_identity_types_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *PersonaIndexEntry) String() string {
@@ -781,7 +793,7 @@ func (*PersonaIndexEntry) ProtoMessage() {}
func (x *PersonaIndexEntry) ProtoReflect() protoreflect.Message {
mi := &file_helper_identity_types_proto_msgTypes[5]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1149,6 +1161,80 @@ func file_helper_identity_types_proto_init() {
if File_helper_identity_types_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_helper_identity_types_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*Group); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_identity_types_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*LocalAliases); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_identity_types_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*Entity); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_identity_types_proto_msgTypes[3].Exporter = func(v any, i int) any {
+ switch v := v.(*Alias); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_identity_types_proto_msgTypes[4].Exporter = func(v any, i int) any {
+ switch v := v.(*EntityStorageEntry); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_identity_types_proto_msgTypes[5].Exporter = func(v any, i int) any {
+ switch v := v.(*PersonaIndexEntry); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/helper/storagepacker/types.pb.go b/helper/storagepacker/types.pb.go
index a2893799fc4c..fd73a0ebb2e9 100644
--- a/helper/storagepacker/types.pb.go
+++ b/helper/storagepacker/types.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: helper/storagepacker/types.proto
@@ -42,9 +42,11 @@ type Item struct {
func (x *Item) Reset() {
*x = Item{}
- mi := &file_helper_storagepacker_types_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_storagepacker_types_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Item) String() string {
@@ -55,7 +57,7 @@ func (*Item) ProtoMessage() {}
func (x *Item) ProtoReflect() protoreflect.Message {
mi := &file_helper_storagepacker_types_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -105,9 +107,11 @@ type Bucket struct {
func (x *Bucket) Reset() {
*x = Bucket{}
- mi := &file_helper_storagepacker_types_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_helper_storagepacker_types_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Bucket) String() string {
@@ -118,7 +122,7 @@ func (*Bucket) ProtoMessage() {}
func (x *Bucket) ProtoReflect() protoreflect.Message {
mi := &file_helper_storagepacker_types_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -222,6 +226,32 @@ func file_helper_storagepacker_types_proto_init() {
if File_helper_storagepacker_types_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_helper_storagepacker_types_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*Item); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_helper_storagepacker_types_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*Bucket); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/helper/testhelpers/mssql/mssqlhelper.go b/helper/testhelpers/mssql/mssqlhelper.go
index 71c2ea2d654a..154caf259843 100644
--- a/helper/testhelpers/mssql/mssqlhelper.go
+++ b/helper/testhelpers/mssql/mssqlhelper.go
@@ -12,9 +12,7 @@ import (
"runtime"
"strings"
"testing"
- "time"
- "github.com/hashicorp/vault/helper/testhelpers"
"github.com/hashicorp/vault/helper/testhelpers/corehelpers"
"github.com/hashicorp/vault/sdk/helper/docker"
)
@@ -37,61 +35,24 @@ func PrepareMSSQLTestContainer(t *testing.T) (cleanup func(), retURL string) {
logger := corehelpers.NewTestLogger(t)
- // Workaround for https://github.com/microsoft/mssql-docker/issues/895 and us temporary seeing
- // tls: failed to parse certificate from server: x509: negative serial number in test case failures.
- containerfile := `
-FROM mcr.microsoft.com/mssql/server:2022-latest
-USER root
-ENV MSDIR=/var/opt/mssql
-RUN mkdir -p $MSDIR \
- && openssl req -x509 -nodes -newkey rsa:2048 -subj '/CN=mssql' -addext "subjectAltName = DNS:mssql" -keyout $MSDIR/mssql.key -out $MSDIR/mssql.pem -days 1 \
- && chmod 400 $MSDIR/mssql.key \
- && chmod 400 $MSDIR/mssql.pem \
- && chown -R mssql $MSDIR
-
-RUN echo "[network]" > $MSDIR/mssql.conf \
- && echo "tlscert = $MSDIR/mssql.pem" >> $MSDIR/mssql.conf \
- && echo "tlskey = $MSDIR/mssql.key" >> $MSDIR/mssql.conf \
- && echo "tlsprotocols = 1.2" >> $MSDIR/mssql.conf \
- && echo "forceencryption = 1" >> $MSDIR/mssql.conf
-
-USER mssql
-`
- bCtx := docker.NewBuildContext()
- imageName := "mssql-workaround-895"
- imageTag := "latest"
-
- runner, err := docker.NewServiceRunner(docker.RunOptions{
- ContainerName: "sqlserver",
- ImageRepo: imageName,
- ImageTag: imageTag,
- Env: []string{"ACCEPT_EULA=Y", "SA_PASSWORD=" + mssqlPassword},
- Ports: []string{"1433/tcp"},
- LogConsumer: func(s string) {
- logger.Info(s)
- },
- })
- if err != nil {
- t.Fatalf("Could not provision docker service runner: %s", err)
- }
-
- // Sometimes we see timeouts and issues pulling the zlint code from GitHub
- testhelpers.RetryUntil(t, 30*time.Second, func() error {
- output, err := runner.BuildImage(context.Background(), containerfile, bCtx,
- docker.BuildRemove(true),
- docker.BuildForceRemove(true),
- docker.BuildPullParent(true),
- docker.BuildTags([]string{imageName + ":" + imageTag}))
- if err != nil {
- return fmt.Errorf("could not build new mssql image: %w", err)
- }
-
- t.Logf("Image build output: %v", string(output))
- return nil
- })
-
+ var err error
for i := 0; i < numRetries; i++ {
var svc *docker.Service
+ var runner *docker.Runner
+ runner, err = docker.NewServiceRunner(docker.RunOptions{
+ ContainerName: "sqlserver",
+ ImageRepo: "mcr.microsoft.com/mssql/server",
+ ImageTag: "2022-latest",
+ Env: []string{"ACCEPT_EULA=Y", "SA_PASSWORD=" + mssqlPassword},
+ Ports: []string{"1433/tcp"},
+ LogConsumer: func(s string) {
+ logger.Info(s)
+ },
+ })
+ if err != nil {
+ logger.Error("failed creating new service runner", "error", err.Error())
+ continue
+ }
svc, err = runner.StartService(context.Background(), connectMSSQL)
if err == nil {
diff --git a/http/handler.go b/http/handler.go
index d8f040251ff5..c74f392aee75 100644
--- a/http/handler.go
+++ b/http/handler.go
@@ -988,14 +988,9 @@ func forwardRequest(core *vault.Core, w http.ResponseWriter, r *http.Request) {
// ErrCannotForward and we simply fall back
statusCode, header, retBytes, err := core.ForwardRequest(r)
if err != nil {
- switch {
- case errors.Is(err, vault.ErrCannotForward):
+ if err == vault.ErrCannotForward {
core.Logger().Trace("cannot forward request (possibly disabled on active node), falling back to redirection to standby")
- case errors.Is(err, vault.StatusNotHAMember):
- core.Logger().Trace("this node is not a member of the HA cluster", "error", err)
- respondError(w, http.StatusInternalServerError, err)
- return
- default:
+ } else {
core.Logger().Error("forward request error", "error", err)
}
diff --git a/http/sys_seal.go b/http/sys_seal.go
index 81af5551db00..4852d57d5e4c 100644
--- a/http/sys_seal.go
+++ b/http/sys_seal.go
@@ -85,14 +85,6 @@ func handleSysUnseal(core *vault.Core) http.Handler {
return
}
- // Check if this node was removed from the cluster. If so, respond with an error and return,
- // since we don't want a removed node to be able to unseal.
- removed, ok := core.IsRemovedFromCluster()
- if ok && removed {
- respondError(w, http.StatusInternalServerError, errors.New("node was removed from a HA cluster"))
- return
- }
-
// Parse the request
var req UnsealRequest
if _, err := parseJSONRequest(core.PerfStandby(), r, w, &req); err != nil {
diff --git a/http/sys_seal_test.go b/http/sys_seal_test.go
index 173d0ab541bf..694dc971c7ea 100644
--- a/http/sys_seal_test.go
+++ b/http/sys_seal_test.go
@@ -439,35 +439,6 @@ func TestSysUnseal_Reset(t *testing.T) {
}
}
-// TestSysUnseal_NodeRemovedFromCluster verifies that a call to /sys/unseal fails
-// with an appropriate error when the node has been removed from a cluster.
-func TestSysUnseal_NodeRemovedFromCluster(t *testing.T) {
- core, err := vault.TestCoreWithMockRemovableNodeHABackend(t, true)
- if err != nil {
- t.Fatalf("err: %s", err)
- }
- ln, addr := TestServer(t, core)
- defer ln.Close()
-
- // The value of key doesn't matter here, we just need to make a request.
- resp := testHttpPut(t, "", addr+"/v1/sys/unseal", map[string]interface{}{
- "key": "foo",
- })
-
- testResponseStatus(t, resp, 500)
- var actual map[string]interface{}
- testResponseBody(t, resp, &actual)
-
- errors, ok := actual["errors"].([]interface{})
- if !ok {
- t.Fatalf("no errors in the response, request should be invalid")
- }
- expectedErrorMsg := "node was removed from a HA cluster"
- if !strings.Contains(errors[0].(string), expectedErrorMsg) {
- t.Fatalf("error message should contain %q", expectedErrorMsg)
- }
-}
-
// Test Seal's permissions logic, which is slightly different than normal code
// paths in that it queries the ACL rather than having checkToken do it. This
// is because it was abusing RootPaths in logical_system, but that caused some
diff --git a/physical/dynamodb/dynamodb.go b/physical/dynamodb/dynamodb.go
index bc27def0c987..c4484d20d446 100644
--- a/physical/dynamodb/dynamodb.go
+++ b/physical/dynamodb/dynamodb.go
@@ -294,7 +294,7 @@ func (d *DynamoDBBackend) Put(ctx context.Context, entry *physical.Entry) error
})
}
- return d.batchWriteRequests(ctx, requests)
+ return d.batchWriteRequests(requests)
}
// Get is used to fetch an entry
@@ -304,7 +304,7 @@ func (d *DynamoDBBackend) Get(ctx context.Context, key string) (*physical.Entry,
d.permitPool.Acquire()
defer d.permitPool.Release()
- resp, err := d.client.GetItemWithContext(ctx, &dynamodb.GetItemInput{
+ resp, err := d.client.GetItem(&dynamodb.GetItemInput{
TableName: aws.String(d.table),
ConsistentRead: aws.Bool(true),
Key: map[string]*dynamodb.AttributeValue{
@@ -363,7 +363,7 @@ func (d *DynamoDBBackend) Delete(ctx context.Context, key string) error {
excluded = append(excluded, recordKeyForVaultKey(prefixes[index-1]))
}
- hasChildren, err := d.hasChildren(ctx, prefix, excluded)
+ hasChildren, err := d.hasChildren(prefix, excluded)
if err != nil {
return err
}
@@ -387,7 +387,7 @@ func (d *DynamoDBBackend) Delete(ctx context.Context, key string) error {
}
}
- return d.batchWriteRequests(ctx, requests)
+ return d.batchWriteRequests(requests)
}
// List is used to list all the keys under a given
@@ -420,7 +420,7 @@ func (d *DynamoDBBackend) List(ctx context.Context, prefix string) ([]string, er
d.permitPool.Acquire()
defer d.permitPool.Release()
- err := d.client.QueryPagesWithContext(ctx, queryInput, func(out *dynamodb.QueryOutput, lastPage bool) bool {
+ err := d.client.QueryPages(queryInput, func(out *dynamodb.QueryOutput, lastPage bool) bool {
var record DynamoDBRecord
for _, item := range out.Items {
dynamodbattribute.UnmarshalMap(item, &record)
@@ -443,7 +443,7 @@ func (d *DynamoDBBackend) List(ctx context.Context, prefix string) ([]string, er
// before any deletes take place. To account for that hasChildren accepts a slice of
// strings representing values we expect to find that should NOT be counted as children
// because they are going to be deleted.
-func (d *DynamoDBBackend) hasChildren(ctx context.Context, prefix string, exclude []string) (bool, error) {
+func (d *DynamoDBBackend) hasChildren(prefix string, exclude []string) (bool, error) {
prefix = strings.TrimSuffix(prefix, "/")
prefix = escapeEmptyPath(prefix)
@@ -473,7 +473,7 @@ func (d *DynamoDBBackend) hasChildren(ctx context.Context, prefix string, exclud
d.permitPool.Acquire()
defer d.permitPool.Release()
- out, err := d.client.QueryWithContext(ctx, queryInput)
+ out, err := d.client.Query(queryInput)
if err != nil {
return false, err
}
@@ -519,7 +519,7 @@ func (d *DynamoDBBackend) HAEnabled() bool {
// batchWriteRequests takes a list of write requests and executes them in badges
// with a maximum size of 25 (which is the limit of BatchWriteItem requests).
-func (d *DynamoDBBackend) batchWriteRequests(ctx context.Context, requests []*dynamodb.WriteRequest) error {
+func (d *DynamoDBBackend) batchWriteRequests(requests []*dynamodb.WriteRequest) error {
for len(requests) > 0 {
batchSize := int(math.Min(float64(len(requests)), 25))
batch := map[string][]*dynamodb.WriteRequest{d.table: requests[:batchSize]}
@@ -534,7 +534,7 @@ func (d *DynamoDBBackend) batchWriteRequests(ctx context.Context, requests []*dy
for len(batch) > 0 {
var output *dynamodb.BatchWriteItemOutput
- output, err = d.client.BatchWriteItemWithContext(ctx, &dynamodb.BatchWriteItemInput{
+ output, err = d.client.BatchWriteItem(&dynamodb.BatchWriteItemInput{
RequestItems: batch,
})
if err != nil {
diff --git a/physical/raft/config.go b/physical/raft/config.go
index 03cdc14041e2..fbd0d5038d61 100644
--- a/physical/raft/config.go
+++ b/physical/raft/config.go
@@ -11,12 +11,13 @@ import (
"strconv"
"time"
+ bolt "github.com/hashicorp-forge/bbolt"
log "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-secure-stdlib/parseutil"
"github.com/hashicorp/go-uuid"
goversion "github.com/hashicorp/go-version"
autopilot "github.com/hashicorp/raft-autopilot"
- bolt "go.etcd.io/bbolt"
+ etcdbolt "go.etcd.io/bbolt"
)
type RaftBackendConfig struct {
@@ -281,3 +282,39 @@ func boltOptions(path string) *bolt.Options {
return o
}
+
+func etcdboltOptions(path string) *etcdbolt.Options {
+ o := &etcdbolt.Options{
+ Timeout: 1 * time.Second,
+ FreelistType: etcdbolt.FreelistMapType,
+ NoFreelistSync: true,
+ MmapFlags: getMmapFlags(path),
+ }
+
+ if os.Getenv("VAULT_RAFT_FREELIST_TYPE") == "array" {
+ o.FreelistType = etcdbolt.FreelistArrayType
+ }
+
+ if os.Getenv("VAULT_RAFT_FREELIST_SYNC") != "" {
+ o.NoFreelistSync = false
+ }
+
+ // By default, we want to set InitialMmapSize to 100GB, but only on 64bit platforms.
+ // Otherwise, we set it to whatever the value of VAULT_RAFT_INITIAL_MMAP_SIZE
+ // is, assuming it can be parsed as an int. Bolt itself sets this to 0 by default,
+ // so if users are wanting to turn this off, they can also set it to 0. Setting it
+ // to a negative value is the same as not setting it at all.
+ if os.Getenv("VAULT_RAFT_INITIAL_MMAP_SIZE") == "" {
+ o.InitialMmapSize = initialMmapSize
+ } else {
+ imms, err := strconv.Atoi(os.Getenv("VAULT_RAFT_INITIAL_MMAP_SIZE"))
+
+ // If there's an error here, it means they passed something that's not convertible to
+ // a number. Rather than fail startup, just ignore it.
+ if err == nil && imms > 0 {
+ o.InitialMmapSize = imms
+ }
+ }
+
+ return o
+}
diff --git a/physical/raft/fsm.go b/physical/raft/fsm.go
index 7c6dabc8f68c..cfbe8374aaff 100644
--- a/physical/raft/fsm.go
+++ b/physical/raft/fsm.go
@@ -22,6 +22,7 @@ import (
"github.com/armon/go-metrics"
"github.com/golang/protobuf/proto"
+ bolt "github.com/hashicorp-forge/bbolt"
log "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-multierror"
"github.com/hashicorp/go-raftchunking"
@@ -31,7 +32,6 @@ import (
"github.com/hashicorp/vault/sdk/helper/jsonutil"
"github.com/hashicorp/vault/sdk/physical"
"github.com/hashicorp/vault/sdk/plugin/pb"
- bolt "go.etcd.io/bbolt"
)
const (
diff --git a/physical/raft/raft.go b/physical/raft/raft.go
index de828377c002..6202b3734f81 100644
--- a/physical/raft/raft.go
+++ b/physical/raft/raft.go
@@ -22,6 +22,7 @@ import (
"github.com/armon/go-metrics"
"github.com/golang/protobuf/proto"
+ bolt "github.com/hashicorp-forge/bbolt"
log "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-raftchunking"
"github.com/hashicorp/go-secure-stdlib/parseutil"
@@ -40,7 +41,7 @@ import (
"github.com/hashicorp/vault/sdk/physical"
"github.com/hashicorp/vault/vault/cluster"
"github.com/hashicorp/vault/version"
- bolt "go.etcd.io/bbolt"
+ etcdbolt "go.etcd.io/bbolt"
)
const (
@@ -86,7 +87,6 @@ var (
_ physical.TransactionalLimits = (*RaftBackend)(nil)
_ physical.HABackend = (*RaftBackend)(nil)
_ physical.MountTableLimitingBackend = (*RaftBackend)(nil)
- _ physical.RemovableNodeHABackend = (*RaftBackend)(nil)
_ physical.Lock = (*RaftLock)(nil)
)
@@ -255,30 +255,6 @@ type RaftBackend struct {
// specialPathLimits is a map of special paths to their configured entrySize
// limits.
specialPathLimits map[string]uint64
-
- removed atomic.Bool
-}
-
-func (b *RaftBackend) IsNodeRemoved(ctx context.Context, nodeID string) (bool, error) {
- conf, err := b.GetConfiguration(ctx)
- if err != nil {
- return false, err
- }
- for _, srv := range conf.Servers {
- if srv.NodeID == nodeID {
- return false, nil
- }
- }
- return true, nil
-}
-
-func (b *RaftBackend) IsRemoved() bool {
- return b.removed.Load()
-}
-
-func (b *RaftBackend) RemoveSelf() error {
- b.removed.Store(true)
- return nil
}
// LeaderJoinInfo contains information required by a node to join itself as a
@@ -527,7 +503,7 @@ func NewRaftBackend(conf map[string]string, logger log.Logger) (physical.Backend
logStore = wal
} else {
// use the traditional BoltDB setup
- opts := boltOptions(dbPath)
+ opts := etcdboltOptions(dbPath)
raftOptions := raftboltdb.Options{
Path: dbPath,
BoltOptions: opts,
@@ -863,7 +839,7 @@ func makeLogVerifyReportFn(logger log.Logger) verifier.ReportFn {
func (b *RaftBackend) CollectMetrics(sink *metricsutil.ClusterMetricSink) {
var stats map[string]string
- var logStoreStats *bolt.Stats
+ var logStoreStats *etcdbolt.Stats
b.l.RLock()
if boltStore, ok := b.stableStore.(*raftboltdb.BoltStore); ok {
@@ -879,7 +855,7 @@ func (b *RaftBackend) CollectMetrics(sink *metricsutil.ClusterMetricSink) {
b.l.RUnlock()
if logStoreStats != nil {
- b.collectMetricsWithStats(*logStoreStats, sink, "logstore")
+ b.collectEtcdBoltMetricsWithStats(*logStoreStats, sink, "logstore")
}
b.collectMetricsWithStats(fsmStats, sink, "fsm")
@@ -923,6 +899,29 @@ func (b *RaftBackend) collectMetricsWithStats(stats bolt.Stats, sink *metricsuti
sink.IncrCounterWithLabels([]string{"raft_storage", "bolt", "write", "time"}, float32(txstats.GetWriteTime().Milliseconds()), labels)
}
+func (b *RaftBackend) collectEtcdBoltMetricsWithStats(stats etcdbolt.Stats, sink *metricsutil.ClusterMetricSink, database string) {
+ txstats := stats.TxStats
+ labels := []metricsutil.Label{{"database", database}}
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "freelist", "free_pages"}, float32(stats.FreePageN), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "freelist", "pending_pages"}, float32(stats.PendingPageN), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "freelist", "allocated_bytes"}, float32(stats.FreeAlloc), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "freelist", "used_bytes"}, float32(stats.FreelistInuse), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "transaction", "started_read_transactions"}, float32(stats.TxN), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "transaction", "currently_open_read_transactions"}, float32(stats.OpenTxN), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "page", "count"}, float32(txstats.GetPageCount()), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "page", "bytes_allocated"}, float32(txstats.GetPageAlloc()), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "cursor", "count"}, float32(txstats.GetCursorCount()), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "node", "count"}, float32(txstats.GetNodeCount()), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "node", "dereferences"}, float32(txstats.GetNodeDeref()), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "rebalance", "count"}, float32(txstats.GetRebalance()), labels)
+ sink.AddSampleWithLabels([]string{"raft_storage", "bolt", "rebalance", "time"}, float32(txstats.GetRebalanceTime().Milliseconds()), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "split", "count"}, float32(txstats.GetSplit()), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "spill", "count"}, float32(txstats.GetSpill()), labels)
+ sink.AddSampleWithLabels([]string{"raft_storage", "bolt", "spill", "time"}, float32(txstats.GetSpillTime().Milliseconds()), labels)
+ sink.SetGaugeWithLabels([]string{"raft_storage", "bolt", "write", "count"}, float32(txstats.GetWrite()), labels)
+ sink.IncrCounterWithLabels([]string{"raft_storage", "bolt", "write", "time"}, float32(txstats.GetWriteTime().Milliseconds()), labels)
+}
+
// RaftServer has information about a server in the Raft configuration
type RaftServer struct {
// NodeID is the name of the server
@@ -1391,8 +1390,6 @@ func (b *RaftBackend) SetupCluster(ctx context.Context, opts SetupOpts) error {
}
}
- b.StartRemovedChecker(ctx)
-
b.logger.Trace("finished setting up raft cluster")
return nil
}
@@ -1426,34 +1423,6 @@ func (b *RaftBackend) TeardownCluster(clusterListener cluster.ClusterHook) error
return nil
}
-func (b *RaftBackend) StartRemovedChecker(ctx context.Context) {
- go func() {
- ticker := time.NewTicker(time.Second)
- defer ticker.Stop()
-
- logger := b.logger.Named("removed.checker")
- for {
- select {
- case <-ticker.C:
- removed, err := b.IsNodeRemoved(ctx, b.localID)
- if err != nil {
- logger.Error("failed to check if node is removed", "node ID", b.localID, "error", err)
- continue
- }
- if removed {
- err := b.RemoveSelf()
- if err != nil {
- logger.Error("failed to remove self", "node ID", b.localID, "error", err)
- }
- return
- }
- case <-ctx.Done():
- return
- }
- }
- }()
-}
-
// CommittedIndex returns the latest index committed to stable storage
func (b *RaftBackend) CommittedIndex() uint64 {
b.l.RLock()
diff --git a/physical/raft/raft_test.go b/physical/raft/raft_test.go
index 70b14c1b7b75..112764aa1974 100644
--- a/physical/raft/raft_test.go
+++ b/physical/raft/raft_test.go
@@ -20,6 +20,7 @@ import (
"github.com/go-test/deep"
"github.com/golang/protobuf/proto"
+ bolt "github.com/hashicorp-forge/bbolt"
"github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-secure-stdlib/base62"
"github.com/hashicorp/go-uuid"
@@ -27,7 +28,6 @@ import (
"github.com/hashicorp/vault/sdk/helper/jsonutil"
"github.com/hashicorp/vault/sdk/physical"
"github.com/stretchr/testify/require"
- bolt "go.etcd.io/bbolt"
)
func testBothRaftBackends(t *testing.T, f func(t *testing.T, raftWALValue string)) {
@@ -758,42 +758,6 @@ func TestRaft_TransactionalBackend_ThreeNode(t *testing.T) {
})
}
-// TestRaft_Removed creates a 3 node cluster and checks that the nodes are not
-// removed, then verifies that node3 marks itself as removed when it gets
-// removed from the cluster
-func TestRaft_Removed(t *testing.T) {
- t.Parallel()
- testBothRaftBackends(t, func(t *testing.T, raftWALValue string) {
- conf := map[string]string{
- "trailing_logs": "100",
- "raft_wal": raftWALValue,
- }
-
- raft1, _ := GetRaftWithConfig(t, true, true, conf)
- raft2, _ := GetRaftWithConfig(t, false, true, conf)
- raft3, _ := GetRaftWithConfig(t, false, true, conf)
-
- addPeer(t, raft1, raft2)
- addPeer(t, raft1, raft3)
- physical.ExerciseBackend(t, raft1)
-
- commitIdx := raft1.CommittedIndex()
- ensureCommitApplied(t, commitIdx, raft2)
- ensureCommitApplied(t, commitIdx, raft3)
-
- require.False(t, raft1.IsRemoved())
- require.False(t, raft2.IsRemoved())
- require.False(t, raft3.IsRemoved())
-
- err := raft1.RemovePeer(context.Background(), raft3.NodeID())
- require.NoError(t, err)
-
- require.Eventually(t, raft3.IsRemoved, 15*time.Second, 500*time.Millisecond)
- require.False(t, raft1.IsRemoved())
- require.False(t, raft2.IsRemoved())
- })
-}
-
// TestRaft_TransactionalLimitsEnvOverride ensures the ENV var overrides for
// transaction size limits are plumbed through as expected.
func TestRaft_TransactionalLimitsEnvOverride(t *testing.T) {
diff --git a/physical/raft/snapshot.go b/physical/raft/snapshot.go
index 5a8f881d7b35..e44769bf6d2a 100644
--- a/physical/raft/snapshot.go
+++ b/physical/raft/snapshot.go
@@ -18,11 +18,11 @@ import (
"time"
"github.com/golang/protobuf/proto"
+ bolt "github.com/hashicorp-forge/bbolt"
log "github.com/hashicorp/go-hclog"
"github.com/hashicorp/raft"
"github.com/hashicorp/vault/sdk/plugin/pb"
"github.com/rboyer/safeio"
- bolt "go.etcd.io/bbolt"
"go.uber.org/atomic"
)
diff --git a/physical/raft/types.pb.go b/physical/raft/types.pb.go
index 301664e42274..9790739d7c88 100644
--- a/physical/raft/types.pb.go
+++ b/physical/raft/types.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: physical/raft/types.proto
@@ -40,9 +40,11 @@ type LogOperation struct {
func (x *LogOperation) Reset() {
*x = LogOperation{}
- mi := &file_physical_raft_types_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_physical_raft_types_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *LogOperation) String() string {
@@ -53,7 +55,7 @@ func (*LogOperation) ProtoMessage() {}
func (x *LogOperation) ProtoReflect() protoreflect.Message {
mi := &file_physical_raft_types_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -106,9 +108,11 @@ type LogData struct {
func (x *LogData) Reset() {
*x = LogData{}
- mi := &file_physical_raft_types_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_physical_raft_types_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *LogData) String() string {
@@ -119,7 +123,7 @@ func (*LogData) ProtoMessage() {}
func (x *LogData) ProtoReflect() protoreflect.Message {
mi := &file_physical_raft_types_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -152,9 +156,11 @@ type IndexValue struct {
func (x *IndexValue) Reset() {
*x = IndexValue{}
- mi := &file_physical_raft_types_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_physical_raft_types_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *IndexValue) String() string {
@@ -165,7 +171,7 @@ func (*IndexValue) ProtoMessage() {}
func (x *IndexValue) ProtoReflect() protoreflect.Message {
mi := &file_physical_raft_types_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -206,9 +212,11 @@ type Server struct {
func (x *Server) Reset() {
*x = Server{}
- mi := &file_physical_raft_types_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_physical_raft_types_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Server) String() string {
@@ -219,7 +227,7 @@ func (*Server) ProtoMessage() {}
func (x *Server) ProtoReflect() protoreflect.Message {
mi := &file_physical_raft_types_proto_msgTypes[3]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -266,9 +274,11 @@ type ConfigurationValue struct {
func (x *ConfigurationValue) Reset() {
*x = ConfigurationValue{}
- mi := &file_physical_raft_types_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_physical_raft_types_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ConfigurationValue) String() string {
@@ -279,7 +289,7 @@ func (*ConfigurationValue) ProtoMessage() {}
func (x *ConfigurationValue) ProtoReflect() protoreflect.Message {
mi := &file_physical_raft_types_proto_msgTypes[4]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -318,9 +328,11 @@ type LocalNodeConfigValue struct {
func (x *LocalNodeConfigValue) Reset() {
*x = LocalNodeConfigValue{}
- mi := &file_physical_raft_types_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_physical_raft_types_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *LocalNodeConfigValue) String() string {
@@ -331,7 +343,7 @@ func (*LocalNodeConfigValue) ProtoMessage() {}
func (x *LocalNodeConfigValue) ProtoReflect() protoreflect.Message {
mi := &file_physical_raft_types_proto_msgTypes[5]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -428,6 +440,80 @@ func file_physical_raft_types_proto_init() {
if File_physical_raft_types_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_physical_raft_types_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*LogOperation); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_physical_raft_types_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*LogData); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_physical_raft_types_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*IndexValue); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_physical_raft_types_proto_msgTypes[3].Exporter = func(v any, i int) any {
+ switch v := v.(*Server); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_physical_raft_types_proto_msgTypes[4].Exporter = func(v any, i int) any {
+ switch v := v.(*ConfigurationValue); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_physical_raft_types_proto_msgTypes[5].Exporter = func(v any, i int) any {
+ switch v := v.(*LocalNodeConfigValue); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/physical/s3/s3.go b/physical/s3/s3.go
index b1687a91622e..da82acccd3ca 100644
--- a/physical/s3/s3.go
+++ b/physical/s3/s3.go
@@ -183,7 +183,7 @@ func (s *S3Backend) Put(ctx context.Context, entry *physical.Entry) error {
putObjectInput.SSEKMSKeyId = aws.String(s.kmsKeyId)
}
- _, err := s.client.PutObjectWithContext(ctx, putObjectInput)
+ _, err := s.client.PutObject(putObjectInput)
if err != nil {
return err
}
@@ -201,7 +201,7 @@ func (s *S3Backend) Get(ctx context.Context, key string) (*physical.Entry, error
// Setup key
key = path.Join(s.path, key)
- resp, err := s.client.GetObjectWithContext(ctx, &s3.GetObjectInput{
+ resp, err := s.client.GetObject(&s3.GetObjectInput{
Bucket: aws.String(s.bucket),
Key: aws.String(key),
})
@@ -254,7 +254,7 @@ func (s *S3Backend) Delete(ctx context.Context, key string) error {
// Setup key
key = path.Join(s.path, key)
- _, err := s.client.DeleteObjectWithContext(ctx, &s3.DeleteObjectInput{
+ _, err := s.client.DeleteObject(&s3.DeleteObjectInput{
Bucket: aws.String(s.bucket),
Key: aws.String(key),
})
@@ -289,7 +289,7 @@ func (s *S3Backend) List(ctx context.Context, prefix string) ([]string, error) {
keys := []string{}
- err := s.client.ListObjectsV2PagesWithContext(ctx, params,
+ err := s.client.ListObjectsV2Pages(params,
func(page *s3.ListObjectsV2Output, lastPage bool) bool {
if page != nil {
// Add truncated 'folder' paths
diff --git a/sdk/database/dbplugin/database.pb.go b/sdk/database/dbplugin/database.pb.go
index d48c5f107bdc..825813ec9349 100644
--- a/sdk/database/dbplugin/database.pb.go
+++ b/sdk/database/dbplugin/database.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: sdk/database/dbplugin/database.proto
@@ -36,9 +36,11 @@ type InitializeRequest struct {
func (x *InitializeRequest) Reset() {
*x = InitializeRequest{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *InitializeRequest) String() string {
@@ -49,7 +51,7 @@ func (*InitializeRequest) ProtoMessage() {}
func (x *InitializeRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -89,9 +91,11 @@ type InitRequest struct {
func (x *InitRequest) Reset() {
*x = InitRequest{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *InitRequest) String() string {
@@ -102,7 +106,7 @@ func (*InitRequest) ProtoMessage() {}
func (x *InitRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -143,9 +147,11 @@ type CreateUserRequest struct {
func (x *CreateUserRequest) Reset() {
*x = CreateUserRequest{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *CreateUserRequest) String() string {
@@ -156,7 +162,7 @@ func (*CreateUserRequest) ProtoMessage() {}
func (x *CreateUserRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -204,9 +210,11 @@ type RenewUserRequest struct {
func (x *RenewUserRequest) Reset() {
*x = RenewUserRequest{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *RenewUserRequest) String() string {
@@ -217,7 +225,7 @@ func (*RenewUserRequest) ProtoMessage() {}
func (x *RenewUserRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[3]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -264,9 +272,11 @@ type RevokeUserRequest struct {
func (x *RevokeUserRequest) Reset() {
*x = RevokeUserRequest{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *RevokeUserRequest) String() string {
@@ -277,7 +287,7 @@ func (*RevokeUserRequest) ProtoMessage() {}
func (x *RevokeUserRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[4]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -316,9 +326,11 @@ type RotateRootCredentialsRequest struct {
func (x *RotateRootCredentialsRequest) Reset() {
*x = RotateRootCredentialsRequest{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *RotateRootCredentialsRequest) String() string {
@@ -329,7 +341,7 @@ func (*RotateRootCredentialsRequest) ProtoMessage() {}
func (x *RotateRootCredentialsRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[5]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -381,9 +393,11 @@ type Statements struct {
func (x *Statements) Reset() {
*x = Statements{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[6]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[6]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Statements) String() string {
@@ -394,7 +408,7 @@ func (*Statements) ProtoMessage() {}
func (x *Statements) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[6]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -487,9 +501,11 @@ type UsernameConfig struct {
func (x *UsernameConfig) Reset() {
*x = UsernameConfig{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[7]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[7]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *UsernameConfig) String() string {
@@ -500,7 +516,7 @@ func (*UsernameConfig) ProtoMessage() {}
func (x *UsernameConfig) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[7]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -539,9 +555,11 @@ type InitResponse struct {
func (x *InitResponse) Reset() {
*x = InitResponse{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[8]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[8]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *InitResponse) String() string {
@@ -552,7 +570,7 @@ func (*InitResponse) ProtoMessage() {}
func (x *InitResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[8]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -585,9 +603,11 @@ type CreateUserResponse struct {
func (x *CreateUserResponse) Reset() {
*x = CreateUserResponse{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[9]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[9]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *CreateUserResponse) String() string {
@@ -598,7 +618,7 @@ func (*CreateUserResponse) ProtoMessage() {}
func (x *CreateUserResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[9]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -637,9 +657,11 @@ type TypeResponse struct {
func (x *TypeResponse) Reset() {
*x = TypeResponse{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[10]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[10]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *TypeResponse) String() string {
@@ -650,7 +672,7 @@ func (*TypeResponse) ProtoMessage() {}
func (x *TypeResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[10]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -682,9 +704,11 @@ type RotateRootCredentialsResponse struct {
func (x *RotateRootCredentialsResponse) Reset() {
*x = RotateRootCredentialsResponse{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[11]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[11]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *RotateRootCredentialsResponse) String() string {
@@ -695,7 +719,7 @@ func (*RotateRootCredentialsResponse) ProtoMessage() {}
func (x *RotateRootCredentialsResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[11]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -725,9 +749,11 @@ type Empty struct {
func (x *Empty) Reset() {
*x = Empty{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[12]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[12]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Empty) String() string {
@@ -738,7 +764,7 @@ func (*Empty) ProtoMessage() {}
func (x *Empty) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[12]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -763,9 +789,11 @@ type GenerateCredentialsResponse struct {
func (x *GenerateCredentialsResponse) Reset() {
*x = GenerateCredentialsResponse{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[13]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[13]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *GenerateCredentialsResponse) String() string {
@@ -776,7 +804,7 @@ func (*GenerateCredentialsResponse) ProtoMessage() {}
func (x *GenerateCredentialsResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[13]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -810,9 +838,11 @@ type StaticUserConfig struct {
func (x *StaticUserConfig) Reset() {
*x = StaticUserConfig{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[14]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[14]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *StaticUserConfig) String() string {
@@ -823,7 +853,7 @@ func (*StaticUserConfig) ProtoMessage() {}
func (x *StaticUserConfig) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[14]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -870,9 +900,11 @@ type SetCredentialsRequest struct {
func (x *SetCredentialsRequest) Reset() {
*x = SetCredentialsRequest{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[15]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[15]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *SetCredentialsRequest) String() string {
@@ -883,7 +915,7 @@ func (*SetCredentialsRequest) ProtoMessage() {}
func (x *SetCredentialsRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[15]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -923,9 +955,11 @@ type SetCredentialsResponse struct {
func (x *SetCredentialsResponse) Reset() {
*x = SetCredentialsResponse{}
- mi := &file_sdk_database_dbplugin_database_proto_msgTypes[16]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_database_proto_msgTypes[16]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *SetCredentialsResponse) String() string {
@@ -936,7 +970,7 @@ func (*SetCredentialsResponse) ProtoMessage() {}
func (x *SetCredentialsResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_database_proto_msgTypes[16]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1207,6 +1241,212 @@ func file_sdk_database_dbplugin_database_proto_init() {
if File_sdk_database_dbplugin_database_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_sdk_database_dbplugin_database_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*InitializeRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*InitRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*CreateUserRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[3].Exporter = func(v any, i int) any {
+ switch v := v.(*RenewUserRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[4].Exporter = func(v any, i int) any {
+ switch v := v.(*RevokeUserRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[5].Exporter = func(v any, i int) any {
+ switch v := v.(*RotateRootCredentialsRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[6].Exporter = func(v any, i int) any {
+ switch v := v.(*Statements); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[7].Exporter = func(v any, i int) any {
+ switch v := v.(*UsernameConfig); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[8].Exporter = func(v any, i int) any {
+ switch v := v.(*InitResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[9].Exporter = func(v any, i int) any {
+ switch v := v.(*CreateUserResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[10].Exporter = func(v any, i int) any {
+ switch v := v.(*TypeResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[11].Exporter = func(v any, i int) any {
+ switch v := v.(*RotateRootCredentialsResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[12].Exporter = func(v any, i int) any {
+ switch v := v.(*Empty); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[13].Exporter = func(v any, i int) any {
+ switch v := v.(*GenerateCredentialsResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[14].Exporter = func(v any, i int) any {
+ switch v := v.(*StaticUserConfig); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[15].Exporter = func(v any, i int) any {
+ switch v := v.(*SetCredentialsRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_database_proto_msgTypes[16].Exporter = func(v any, i int) any {
+ switch v := v.(*SetCredentialsResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/sdk/database/dbplugin/database_grpc.pb.go b/sdk/database/dbplugin/database_grpc.pb.go
index b6f20e5cb509..57a0c059adf6 100644
--- a/sdk/database/dbplugin/database_grpc.pb.go
+++ b/sdk/database/dbplugin/database_grpc.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
-// - protoc-gen-go-grpc v1.5.1
+// - protoc-gen-go-grpc v1.4.0
// - protoc (unknown)
// source: sdk/database/dbplugin/database.proto
@@ -18,8 +18,8 @@ import (
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
-// Requires gRPC-Go v1.64.0 or later.
-const _ = grpc.SupportPackageIsVersion9
+// Requires gRPC-Go v1.62.0 or later.
+const _ = grpc.SupportPackageIsVersion8
const (
Database_Type_FullMethodName = "/dbplugin.Database/Type"
@@ -162,7 +162,7 @@ func (c *databaseClient) Initialize(ctx context.Context, in *InitializeRequest,
// DatabaseServer is the server API for Database service.
// All implementations must embed UnimplementedDatabaseServer
-// for forward compatibility.
+// for forward compatibility
type DatabaseServer interface {
Type(context.Context, *Empty) (*TypeResponse, error)
CreateUser(context.Context, *CreateUserRequest) (*CreateUserResponse, error)
@@ -178,12 +178,9 @@ type DatabaseServer interface {
mustEmbedUnimplementedDatabaseServer()
}
-// UnimplementedDatabaseServer must be embedded to have
-// forward compatible implementations.
-//
-// NOTE: this should be embedded by value instead of pointer to avoid a nil
-// pointer dereference when methods are called.
-type UnimplementedDatabaseServer struct{}
+// UnimplementedDatabaseServer must be embedded to have forward compatible implementations.
+type UnimplementedDatabaseServer struct {
+}
func (UnimplementedDatabaseServer) Type(context.Context, *Empty) (*TypeResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Type not implemented")
@@ -216,7 +213,6 @@ func (UnimplementedDatabaseServer) Initialize(context.Context, *InitializeReques
return nil, status.Errorf(codes.Unimplemented, "method Initialize not implemented")
}
func (UnimplementedDatabaseServer) mustEmbedUnimplementedDatabaseServer() {}
-func (UnimplementedDatabaseServer) testEmbeddedByValue() {}
// UnsafeDatabaseServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to DatabaseServer will
@@ -226,13 +222,6 @@ type UnsafeDatabaseServer interface {
}
func RegisterDatabaseServer(s grpc.ServiceRegistrar, srv DatabaseServer) {
- // If the following call pancis, it indicates UnimplementedDatabaseServer was
- // embedded by pointer and is nil. This will cause panics if an
- // unimplemented method is ever invoked, so we test this at initialization
- // time to prevent it from happening at runtime later due to I/O.
- if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
- t.testEmbeddedByValue()
- }
s.RegisterService(&Database_ServiceDesc, srv)
}
diff --git a/sdk/database/dbplugin/v5/proto/database.pb.go b/sdk/database/dbplugin/v5/proto/database.pb.go
index 594d77a8c7a2..376315758927 100644
--- a/sdk/database/dbplugin/v5/proto/database.pb.go
+++ b/sdk/database/dbplugin/v5/proto/database.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: sdk/database/dbplugin/v5/proto/database.proto
@@ -39,9 +39,11 @@ type InitializeRequest struct {
func (x *InitializeRequest) Reset() {
*x = InitializeRequest{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *InitializeRequest) String() string {
@@ -52,7 +54,7 @@ func (*InitializeRequest) ProtoMessage() {}
func (x *InitializeRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -91,9 +93,11 @@ type InitializeResponse struct {
func (x *InitializeResponse) Reset() {
*x = InitializeResponse{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *InitializeResponse) String() string {
@@ -104,7 +108,7 @@ func (*InitializeResponse) ProtoMessage() {}
func (x *InitializeResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -143,9 +147,11 @@ type NewUserRequest struct {
func (x *NewUserRequest) Reset() {
*x = NewUserRequest{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *NewUserRequest) String() string {
@@ -156,7 +162,7 @@ func (*NewUserRequest) ProtoMessage() {}
func (x *NewUserRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -238,9 +244,11 @@ type UsernameConfig struct {
func (x *UsernameConfig) Reset() {
*x = UsernameConfig{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *UsernameConfig) String() string {
@@ -251,7 +259,7 @@ func (*UsernameConfig) ProtoMessage() {}
func (x *UsernameConfig) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[3]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -290,9 +298,11 @@ type NewUserResponse struct {
func (x *NewUserResponse) Reset() {
*x = NewUserResponse{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *NewUserResponse) String() string {
@@ -303,7 +313,7 @@ func (*NewUserResponse) ProtoMessage() {}
func (x *NewUserResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[4]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -343,9 +353,11 @@ type UpdateUserRequest struct {
func (x *UpdateUserRequest) Reset() {
*x = UpdateUserRequest{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *UpdateUserRequest) String() string {
@@ -356,7 +368,7 @@ func (*UpdateUserRequest) ProtoMessage() {}
func (x *UpdateUserRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[5]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -424,9 +436,11 @@ type ChangePassword struct {
func (x *ChangePassword) Reset() {
*x = ChangePassword{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[6]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[6]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ChangePassword) String() string {
@@ -437,7 +451,7 @@ func (*ChangePassword) ProtoMessage() {}
func (x *ChangePassword) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[6]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -477,9 +491,11 @@ type ChangePublicKey struct {
func (x *ChangePublicKey) Reset() {
*x = ChangePublicKey{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[7]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[7]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ChangePublicKey) String() string {
@@ -490,7 +506,7 @@ func (*ChangePublicKey) ProtoMessage() {}
func (x *ChangePublicKey) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[7]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -530,9 +546,11 @@ type ChangeExpiration struct {
func (x *ChangeExpiration) Reset() {
*x = ChangeExpiration{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[8]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[8]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ChangeExpiration) String() string {
@@ -543,7 +561,7 @@ func (*ChangeExpiration) ProtoMessage() {}
func (x *ChangeExpiration) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[8]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -580,9 +598,11 @@ type UpdateUserResponse struct {
func (x *UpdateUserResponse) Reset() {
*x = UpdateUserResponse{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[9]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[9]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *UpdateUserResponse) String() string {
@@ -593,7 +613,7 @@ func (*UpdateUserResponse) ProtoMessage() {}
func (x *UpdateUserResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[9]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -622,9 +642,11 @@ type DeleteUserRequest struct {
func (x *DeleteUserRequest) Reset() {
*x = DeleteUserRequest{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[10]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[10]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *DeleteUserRequest) String() string {
@@ -635,7 +657,7 @@ func (*DeleteUserRequest) ProtoMessage() {}
func (x *DeleteUserRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[10]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -672,9 +694,11 @@ type DeleteUserResponse struct {
func (x *DeleteUserResponse) Reset() {
*x = DeleteUserResponse{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[11]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[11]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *DeleteUserResponse) String() string {
@@ -685,7 +709,7 @@ func (*DeleteUserResponse) ProtoMessage() {}
func (x *DeleteUserResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[11]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -713,9 +737,11 @@ type TypeResponse struct {
func (x *TypeResponse) Reset() {
*x = TypeResponse{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[12]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[12]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *TypeResponse) String() string {
@@ -726,7 +752,7 @@ func (*TypeResponse) ProtoMessage() {}
func (x *TypeResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[12]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -761,9 +787,11 @@ type Statements struct {
func (x *Statements) Reset() {
*x = Statements{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[13]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[13]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Statements) String() string {
@@ -774,7 +802,7 @@ func (*Statements) ProtoMessage() {}
func (x *Statements) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[13]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -804,9 +832,11 @@ type Empty struct {
func (x *Empty) Reset() {
*x = Empty{}
- mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[14]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[14]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Empty) String() string {
@@ -817,7 +847,7 @@ func (*Empty) ProtoMessage() {}
func (x *Empty) ProtoReflect() protoreflect.Message {
mi := &file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[14]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1051,6 +1081,188 @@ func file_sdk_database_dbplugin_v5_proto_database_proto_init() {
if File_sdk_database_dbplugin_v5_proto_database_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*InitializeRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*InitializeResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*NewUserRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[3].Exporter = func(v any, i int) any {
+ switch v := v.(*UsernameConfig); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[4].Exporter = func(v any, i int) any {
+ switch v := v.(*NewUserResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[5].Exporter = func(v any, i int) any {
+ switch v := v.(*UpdateUserRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[6].Exporter = func(v any, i int) any {
+ switch v := v.(*ChangePassword); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[7].Exporter = func(v any, i int) any {
+ switch v := v.(*ChangePublicKey); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[8].Exporter = func(v any, i int) any {
+ switch v := v.(*ChangeExpiration); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[9].Exporter = func(v any, i int) any {
+ switch v := v.(*UpdateUserResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[10].Exporter = func(v any, i int) any {
+ switch v := v.(*DeleteUserRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[11].Exporter = func(v any, i int) any {
+ switch v := v.(*DeleteUserResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[12].Exporter = func(v any, i int) any {
+ switch v := v.(*TypeResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[13].Exporter = func(v any, i int) any {
+ switch v := v.(*Statements); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_database_dbplugin_v5_proto_database_proto_msgTypes[14].Exporter = func(v any, i int) any {
+ switch v := v.(*Empty); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/sdk/database/dbplugin/v5/proto/database_grpc.pb.go b/sdk/database/dbplugin/v5/proto/database_grpc.pb.go
index 37f366a35ae8..28c8d775238a 100644
--- a/sdk/database/dbplugin/v5/proto/database_grpc.pb.go
+++ b/sdk/database/dbplugin/v5/proto/database_grpc.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
-// - protoc-gen-go-grpc v1.5.1
+// - protoc-gen-go-grpc v1.4.0
// - protoc (unknown)
// source: sdk/database/dbplugin/v5/proto/database.proto
@@ -18,8 +18,8 @@ import (
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
-// Requires gRPC-Go v1.64.0 or later.
-const _ = grpc.SupportPackageIsVersion9
+// Requires gRPC-Go v1.62.0 or later.
+const _ = grpc.SupportPackageIsVersion8
const (
Database_Initialize_FullMethodName = "/dbplugin.v5.Database/Initialize"
@@ -112,7 +112,7 @@ func (c *databaseClient) Close(ctx context.Context, in *Empty, opts ...grpc.Call
// DatabaseServer is the server API for Database service.
// All implementations must embed UnimplementedDatabaseServer
-// for forward compatibility.
+// for forward compatibility
type DatabaseServer interface {
Initialize(context.Context, *InitializeRequest) (*InitializeResponse, error)
NewUser(context.Context, *NewUserRequest) (*NewUserResponse, error)
@@ -123,12 +123,9 @@ type DatabaseServer interface {
mustEmbedUnimplementedDatabaseServer()
}
-// UnimplementedDatabaseServer must be embedded to have
-// forward compatible implementations.
-//
-// NOTE: this should be embedded by value instead of pointer to avoid a nil
-// pointer dereference when methods are called.
-type UnimplementedDatabaseServer struct{}
+// UnimplementedDatabaseServer must be embedded to have forward compatible implementations.
+type UnimplementedDatabaseServer struct {
+}
func (UnimplementedDatabaseServer) Initialize(context.Context, *InitializeRequest) (*InitializeResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method Initialize not implemented")
@@ -149,7 +146,6 @@ func (UnimplementedDatabaseServer) Close(context.Context, *Empty) (*Empty, error
return nil, status.Errorf(codes.Unimplemented, "method Close not implemented")
}
func (UnimplementedDatabaseServer) mustEmbedUnimplementedDatabaseServer() {}
-func (UnimplementedDatabaseServer) testEmbeddedByValue() {}
// UnsafeDatabaseServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to DatabaseServer will
@@ -159,13 +155,6 @@ type UnsafeDatabaseServer interface {
}
func RegisterDatabaseServer(s grpc.ServiceRegistrar, srv DatabaseServer) {
- // If the following call pancis, it indicates UnimplementedDatabaseServer was
- // embedded by pointer and is nil. This will cause panics if an
- // unimplemented method is ever invoked, so we test this at initialization
- // time to prevent it from happening at runtime later due to I/O.
- if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
- t.testEmbeddedByValue()
- }
s.RegisterService(&Database_ServiceDesc, srv)
}
diff --git a/sdk/go.mod b/sdk/go.mod
index 68f13ce14ea7..55c541e50bb2 100644
--- a/sdk/go.mod
+++ b/sdk/go.mod
@@ -1,13 +1,13 @@
module github.com/hashicorp/vault/sdk
-go 1.23.0
+go 1.22
require (
cloud.google.com/go/cloudsqlconn v1.4.3
github.com/armon/go-metrics v0.4.1
github.com/armon/go-radix v1.0.0
github.com/cenkalti/backoff/v3 v3.2.2
- github.com/docker/docker v27.2.1+incompatible
+ github.com/docker/docker v26.1.5+incompatible
github.com/docker/go-connections v0.4.0
github.com/evanphx/json-patch/v5 v5.6.0
github.com/fatih/structs v1.1.0
@@ -29,7 +29,7 @@ require (
github.com/hashicorp/go-secure-stdlib/mlock v0.1.2
github.com/hashicorp/go-secure-stdlib/parseutil v0.1.8
github.com/hashicorp/go-secure-stdlib/password v0.1.1
- github.com/hashicorp/go-secure-stdlib/plugincontainer v0.4.1
+ github.com/hashicorp/go-secure-stdlib/plugincontainer v0.4.0
github.com/hashicorp/go-secure-stdlib/strutil v0.1.2
github.com/hashicorp/go-secure-stdlib/tlsutil v0.1.3
github.com/hashicorp/go-sockaddr v1.0.6
@@ -45,29 +45,23 @@ require (
github.com/stretchr/testify v1.9.0
github.com/tink-crypto/tink-go/v2 v2.2.0
go.uber.org/atomic v1.9.0
- golang.org/x/crypto v0.27.0
- golang.org/x/net v0.29.0
- golang.org/x/text v0.18.0
- google.golang.org/grpc v1.66.1
+ golang.org/x/crypto v0.26.0
+ golang.org/x/net v0.28.0
+ golang.org/x/text v0.17.0
+ google.golang.org/grpc v1.65.0
google.golang.org/protobuf v1.34.2
)
require (
- github.com/AdaLogics/go-fuzz-headers v0.0.0-20230811130428-ced1acdcaa24 // indirect
github.com/beorn7/perks v1.0.1 // indirect
- github.com/cenkalti/backoff/v4 v4.2.1 // indirect
github.com/cespare/xxhash/v2 v2.3.0 // indirect
github.com/go-jose/go-jose/v4 v4.0.1 // indirect
- github.com/kr/pretty v0.3.1 // indirect
github.com/matttproud/golang_protobuf_extensions v1.0.4 // indirect
github.com/moby/docker-image-spec v1.3.1 // indirect
- github.com/moby/sys/userns v0.1.0 // indirect
github.com/prometheus/client_golang v1.14.0 // indirect
github.com/prometheus/client_model v0.3.0 // indirect
github.com/prometheus/common v0.37.0 // indirect
github.com/prometheus/procfs v0.8.0 // indirect
- go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 // indirect
- go.opentelemetry.io/proto/otlp v1.0.0 // indirect
golang.org/x/sync v0.8.0 // indirect
)
@@ -76,6 +70,7 @@ require (
github.com/Azure/go-ansiterm v0.0.0-20210617225240-d185dfc1b5a1 // indirect
github.com/Azure/go-ntlmssp v0.0.0-20221128193559-754e69321358 // indirect
github.com/Microsoft/go-winio v0.6.1 // indirect
+ github.com/containerd/containerd v1.7.12 // indirect
github.com/containerd/log v0.1.0 // indirect
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc // indirect
github.com/distribution/reference v0.6.0 // indirect
@@ -112,29 +107,30 @@ require (
github.com/moby/patternmatcher v0.5.0 // indirect
github.com/moby/sys/sequential v0.5.0 // indirect
github.com/moby/sys/user v0.1.0 // indirect
+ github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e // indirect
github.com/oklog/run v1.1.0 // indirect
github.com/opencontainers/go-digest v1.0.0 // indirect
github.com/opencontainers/image-spec v1.1.0-rc2.0.20221005185240-3a7f492d3f1b // indirect
github.com/petermattis/goid v0.0.0-20180202154549-b0b1615b78e5 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.1-0.20181226105442-5d4384ee4fb2 // indirect
- github.com/rogpeppe/go-internal v1.10.0 // indirect
+ github.com/rogpeppe/go-internal v1.8.1 // indirect
github.com/sasha-s/go-deadlock v0.2.0
github.com/sirupsen/logrus v1.9.3 // indirect
github.com/stretchr/objx v0.5.2 // indirect
go.opencensus.io v0.24.0 // indirect
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 // indirect
- go.opentelemetry.io/otel v1.30.0 // indirect
- go.opentelemetry.io/otel/metric v1.30.0 // indirect
- go.opentelemetry.io/otel/trace v1.30.0 // indirect
+ go.opentelemetry.io/otel v1.28.0 // indirect
+ go.opentelemetry.io/otel/metric v1.28.0 // indirect
+ go.opentelemetry.io/otel/trace v1.28.0 // indirect
golang.org/x/mod v0.17.0 // indirect
- golang.org/x/oauth2 v0.21.0 // indirect
- golang.org/x/sys v0.25.0 // indirect
- golang.org/x/term v0.24.0 // indirect
+ golang.org/x/oauth2 v0.20.0 // indirect
+ golang.org/x/sys v0.24.0 // indirect
+ golang.org/x/term v0.23.0 // indirect
golang.org/x/time v0.5.0 // indirect
golang.org/x/tools v0.21.1-0.20240508182429-e35e4ccd0d2d // indirect
google.golang.org/api v0.169.0 // indirect
- google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 // indirect
- gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c // indirect
+ google.golang.org/genproto/googleapis/rpc v0.0.0-20240812133136-8ffd90a71988 // indirect
+ gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
)
diff --git a/sdk/go.sum b/sdk/go.sum
index 55705f4931ac..a15693362241 100644
--- a/sdk/go.sum
+++ b/sdk/go.sum
@@ -48,6 +48,8 @@ github.com/Masterminds/semver/v3 v3.1.1 h1:hLg3sBzpNErnxhQtUy/mmLR2I9foDujNK030I
github.com/Masterminds/semver/v3 v3.1.1/go.mod h1:VPu/7SZ7ePZ3QOrcuXROw5FAcLl4a0cBrbBpGY/8hQs=
github.com/Microsoft/go-winio v0.6.1 h1:9/kr64B9VUZrLm5YYwbGtUJnMgqWVOdUAXu6Migciow=
github.com/Microsoft/go-winio v0.6.1/go.mod h1:LRdKpFKfdobln8UmuiYcKPot9D2v6svN5+sAH+4kjUM=
+github.com/Microsoft/hcsshim v0.11.4 h1:68vKo2VN8DE9AdN4tnkWnmdhqdbpUFM8OF3Airm7fz8=
+github.com/Microsoft/hcsshim v0.11.4/go.mod h1:smjE4dvqPX9Zldna+t5FG3rnoHhaB7QYxPRqGcpAD9w=
github.com/alecthomas/template v0.0.0-20160405071501-a0175ee3bccc/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/template v0.0.0-20190718012654-fb15b899a751/go.mod h1:LOuyumcjzFXgccqObfd/Ljyb9UuFJ6TxHnclSeseNhc=
github.com/alecthomas/units v0.0.0-20151022065526-2efee857e7cf/go.mod h1:ybxpYRFXyAe+OPACYpWeL0wqObRcbAqCMya13uyzqw0=
@@ -85,6 +87,8 @@ github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDk
github.com/cncf/udpa/go v0.0.0-20191209042840-269d4d468f6f/go.mod h1:M8M6+tZqaGXZJjfX53e64911xZQV5JYwmTeXPW+k8Sc=
github.com/cockroachdb/apd v1.1.0 h1:3LFP3629v+1aKXU5Q37mxmRxX/pIu1nijXydLShEq5I=
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
+github.com/containerd/containerd v1.7.12 h1:+KQsnv4VnzyxWcfO9mlxxELaoztsDEjOuCMPAuPqgU0=
+github.com/containerd/containerd v1.7.12/go.mod h1:/5OMpE1p0ylxtEUGY8kuCYkDRzJm9NO1TFMWjUpdevk=
github.com/containerd/log v0.1.0 h1:TCJt7ioM2cr/tfR8GPbGf9/VRAX8D2B4PjzCpfX540I=
github.com/containerd/log v0.1.0/go.mod h1:VRRf09a7mHDIRezVKTRCrOq78v577GXq3bSa3EhrzVo=
github.com/coreos/go-systemd v0.0.0-20190321100706-95778dfbb74e/go.mod h1:F5haX7vjVVG0kc13fIWeqUViNPyEJxv/OmvnBo0Yme4=
@@ -97,8 +101,8 @@ github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc h1:U9qPSI2PIWSS1
github.com/davecgh/go-spew v1.1.2-0.20180830191138-d8f796af33cc/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
github.com/distribution/reference v0.6.0 h1:0IXCQ5g4/QMHHkarYzh5l+u8T3t73zM5QvfrDyIgxBk=
github.com/distribution/reference v0.6.0/go.mod h1:BbU0aIcezP1/5jX/8MP0YiH4SdvB5Y4f/wlDRiLyi3E=
-github.com/docker/docker v27.2.1+incompatible h1:fQdiLfW7VLscyoeYEBz7/J8soYFDZV1u6VW6gJEjNMI=
-github.com/docker/docker v27.2.1+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
+github.com/docker/docker v26.1.5+incompatible h1:NEAxTwEjxV6VbBMBoGG3zPqbiJosIApZjxlbrG9q3/g=
+github.com/docker/docker v26.1.5+incompatible/go.mod h1:eEKB0N0r5NX/I1kEveEz05bcu8tLC/8azJZsviup8Sk=
github.com/docker/go-connections v0.4.0 h1:El9xVISelRB7BuFusrZozjnkIM5YnzCViNKohAFqRJQ=
github.com/docker/go-connections v0.4.0/go.mod h1:Gbd7IOopHjR8Iph03tsViu4nIes5XhDvyHbTtUxmeec=
github.com/docker/go-units v0.5.0 h1:69rxXcBk27SvSaaxTtLh/8llcHD8vYHT7WSdRZ/jvr4=
@@ -261,8 +265,8 @@ github.com/hashicorp/go-secure-stdlib/parseutil v0.1.8 h1:iBt4Ew4XEGLfh6/bPk4rSY
github.com/hashicorp/go-secure-stdlib/parseutil v0.1.8/go.mod h1:aiJI+PIApBRQG7FZTEBx5GiiX+HbOHilUdNxUZi4eV0=
github.com/hashicorp/go-secure-stdlib/password v0.1.1 h1:6JzmBqXprakgFEHwBgdchsjaA9x3GyjdI568bXKxa60=
github.com/hashicorp/go-secure-stdlib/password v0.1.1/go.mod h1:9hH302QllNwu1o2TGYtSk8I8kTAN0ca1EHpwhm5Mmzo=
-github.com/hashicorp/go-secure-stdlib/plugincontainer v0.4.1 h1:JY+zGg8gOmslwif1fiCqT5Hu1SikLZQcHkmQhCoA9gY=
-github.com/hashicorp/go-secure-stdlib/plugincontainer v0.4.1/go.mod h1:jW3KCTvdPyAdVecOUwiiO2XaYgUJ/isigt++ISkszkY=
+github.com/hashicorp/go-secure-stdlib/plugincontainer v0.4.0 h1:7Yran48kl6X7jfUg3sfYDrFot1gD3LvzdC3oPu5l/qo=
+github.com/hashicorp/go-secure-stdlib/plugincontainer v0.4.0/go.mod h1:9WJFu7L3d+Z4ViZmwUf+6/73/Uy7YMY1NXrB9wdElYE=
github.com/hashicorp/go-secure-stdlib/strutil v0.1.2 h1:kes8mmyCpxJsI7FTwtzRqEy9CdjCtrXrXGuOpxEA7Ts=
github.com/hashicorp/go-secure-stdlib/strutil v0.1.2/go.mod h1:Gou2R9+il93BqX25LAKCLuM+y9U2T4hlwvT1yprcna4=
github.com/hashicorp/go-secure-stdlib/tlsutil v0.1.3 h1:xbrxd0U9XQW8qL1BAz2XrAjAF/P2vcqUTAues9c24B8=
@@ -361,10 +365,8 @@ github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxv
github.com/konsorten/go-windows-terminal-sequences v1.0.3/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
-github.com/kr/pretty v0.2.1/go.mod h1:ipq/a2n7PKx3OHsz4KJII5eveXtPO4qwEXGdVfWzfnI=
+github.com/kr/pretty v0.3.0 h1:WgNl7dwNpEZ6jJ9k1snq4pZsg7DOEN8hP9Xw0Tsjwk0=
github.com/kr/pretty v0.3.0/go.mod h1:640gp4NfQd8pI5XOwp5fnNeVWj67G7CFk/SaSQn7NBk=
-github.com/kr/pretty v0.3.1 h1:flRD4NNwYAUpkphVc1HcthR4KEIFJ65n8Mw5qdRn3LE=
-github.com/kr/pretty v0.3.1/go.mod h1:hoEshYVHaxMs3cyo3Yncou5ZscifuDolrwPKZanG3xk=
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
@@ -411,8 +413,6 @@ github.com/moby/sys/sequential v0.5.0 h1:OPvI35Lzn9K04PBbCLW0g4LcFAJgHsvXsRyewg5
github.com/moby/sys/sequential v0.5.0/go.mod h1:tH2cOOs5V9MlPiXcQzRC+eEyab644PWKGRYaaV5ZZlo=
github.com/moby/sys/user v0.1.0 h1:WmZ93f5Ux6het5iituh9x2zAG7NFY9Aqi49jjE1PaQg=
github.com/moby/sys/user v0.1.0/go.mod h1:fKJhFOnsCN6xZ5gSfbM6zaHGgDJMrqt9/reuj4T7MmU=
-github.com/moby/sys/userns v0.1.0 h1:tVLXkFOxVu9A64/yh59slHVv9ahO9UIev4JZusOLG/g=
-github.com/moby/sys/userns v0.1.0/go.mod h1:IHUYgu/kao6N8YZlp9Cf444ySSvCmDlmzUcYfDHOl28=
github.com/moby/term v0.5.0 h1:xt8Q1nalod/v7BqbG21f8mQPqH+xAaC9C3N3wfWbVP0=
github.com/moby/term v0.5.0/go.mod h1:8FzsFHVUBGZdbDsJw/ot+X+d5HLUbvklYLJ9uGfcI3Y=
github.com/modern-go/concurrent v0.0.0-20180228061459-e0a39a4cb421/go.mod h1:6dJC0mAP4ikYIbvyc7fijjWJddQyLn8Ig3JB5CqoB9Q=
@@ -424,6 +424,8 @@ github.com/morikuni/aec v1.0.0 h1:nP9CBfwrvYnBRgY6qfDQkygYDmYwOilePFkwzv4dU8A=
github.com/morikuni/aec v1.0.0/go.mod h1:BbKIizmSmc5MMPqRYbxO4ZU0S0+P200+tUnFx7PXmsc=
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
github.com/mwitkow/go-conntrack v0.0.0-20190716064945-2f068394615f/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
+github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
+github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e/go.mod h1:zD1mROLANZcx1PVRCS0qkT7pwLkGfwJo4zjcN/Tysno=
github.com/oklog/run v1.1.0 h1:GEenZ1cK0+q0+wsJew9qUg/DyD8k3JzYsZAi5gYi2mA=
github.com/oklog/run v1.1.0/go.mod h1:sVPdnTZT1zYwAJeCMu2Th4T21pA3FPOQRfWjQlk7DVU=
github.com/opencontainers/go-digest v1.0.0 h1:apOUWs51W5PlhuyGyz9FCeeBIOUDA/6nW8Oi/yOhh5U=
@@ -475,9 +477,8 @@ github.com/prometheus/procfs v0.8.0 h1:ODq8ZFEaYeCaZOJlZZdJA2AbQR98dSHSM1KW/You5
github.com/prometheus/procfs v0.8.0/go.mod h1:z7EfXMXOkbkqb9IINtpCn86r/to3BnA0uaxHdg830/4=
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
github.com/rogpeppe/go-internal v1.6.1/go.mod h1:xXDCJY+GAPziupqXw64V24skbSoqbTEfhy4qGm1nDQc=
-github.com/rogpeppe/go-internal v1.9.0/go.mod h1:WtVeX8xhTBvf0smdhujwtBcq4Qrzq/fJaraNFVN+nFs=
-github.com/rogpeppe/go-internal v1.10.0 h1:TMyTOH3F/DB16zRVcYyreMH6GnZZrwQVAoYjRBZyWFQ=
-github.com/rogpeppe/go-internal v1.10.0/go.mod h1:UQnix2H7Ngw/k4C5ijL5+65zddjncjaFoBhdsK/akog=
+github.com/rogpeppe/go-internal v1.8.1 h1:geMPLpDpQOgVyCg5z5GoRwLHepNdb71NXb67XFkP+Eg=
+github.com/rogpeppe/go-internal v1.8.1/go.mod h1:JeRgkft04UBgHMgCIwADu4Pn6Mtm5d4nPKWu0nJ5d+o=
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
github.com/rs/zerolog v1.13.0/go.mod h1:YbFCdg8HfsridGWAh22vktObvhZbQsZXe4/zB0OKkWU=
github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThCjNc=
@@ -531,18 +532,18 @@ go.opencensus.io v0.24.0 h1:y73uSU6J157QMP2kn2r30vwW1A2W2WFwSCGnAVxeaD0=
go.opencensus.io v0.24.0/go.mod h1:vNK8G9p7aAivkbmorf4v+7Hgx+Zs0yY+0fOtgBfjQKo=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0 h1:Xs2Ncz0gNihqu9iosIZ5SkBbWo5T8JhhLJFMQL1qmLI=
go.opentelemetry.io/contrib/instrumentation/net/http/otelhttp v0.51.0/go.mod h1:vy+2G/6NvVMpwGX/NyLqcC41fxepnuKHk16E6IZUcJc=
-go.opentelemetry.io/otel v1.30.0 h1:F2t8sK4qf1fAmY9ua4ohFS/K+FUuOPemHUIXHtktrts=
-go.opentelemetry.io/otel v1.30.0/go.mod h1:tFw4Br9b7fOS+uEao81PJjVMjW/5fvNCbpsDIXqP0pc=
+go.opentelemetry.io/otel v1.28.0 h1:/SqNcYk+idO0CxKEUOtKQClMK/MimZihKYMruSMViUo=
+go.opentelemetry.io/otel v1.28.0/go.mod h1:q68ijF8Fc8CnMHKyzqL6akLO46ePnjkgfIMIjUIX9z4=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0 h1:Mne5On7VWdx7omSrSSZvM4Kw7cS7NQkOOmLcgscI51U=
go.opentelemetry.io/otel/exporters/otlp/otlptrace v1.19.0/go.mod h1:IPtUMKL4O3tH5y+iXVyAXqpAwMuzC1IrxVS81rummfE=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0 h1:umZgi92IyxfXd/l4kaDhnKgY8rnN/cZcF1LKc6I8OQ8=
-go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.30.0/go.mod h1:4lVs6obhSVRb1EW5FhOuBTyiQhtRtAnnva9vD3yRfq8=
-go.opentelemetry.io/otel/metric v1.30.0 h1:4xNulvn9gjzo4hjg+wzIKG7iNFEaBMX00Qd4QIZs7+w=
-go.opentelemetry.io/otel/metric v1.30.0/go.mod h1:aXTfST94tswhWEb+5QjlSqG+cZlmyXy/u8jFpor3WqQ=
-go.opentelemetry.io/otel/sdk v1.30.0 h1:cHdik6irO49R5IysVhdn8oaiR9m8XluDaJAs4DfOrYE=
-go.opentelemetry.io/otel/sdk v1.30.0/go.mod h1:p14X4Ok8S+sygzblytT1nqG98QG2KYKv++HE0LY/mhg=
-go.opentelemetry.io/otel/trace v1.30.0 h1:7UBkkYzeg3C7kQX8VAidWh2biiQbtAKjyIML8dQ9wmc=
-go.opentelemetry.io/otel/trace v1.30.0/go.mod h1:5EyKqTzzmyqB9bwtCCq6pDLktPK6fmGf/Dph+8VI02o=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.28.0 h1:j9+03ymgYhPKmeXGk5Zu+cIZOlVzd9Zv7QIiyItjFBU=
+go.opentelemetry.io/otel/exporters/otlp/otlptrace/otlptracehttp v1.28.0/go.mod h1:Y5+XiUG4Emn1hTfciPzGPJaSI+RpDts6BnCIir0SLqk=
+go.opentelemetry.io/otel/metric v1.28.0 h1:f0HGvSl1KRAU1DLgLGFjrwVyismPlnuU6JD6bOeuA5Q=
+go.opentelemetry.io/otel/metric v1.28.0/go.mod h1:Fb1eVBFZmLVTMb6PPohq3TO9IIhUisDsbJoL/+uQW4s=
+go.opentelemetry.io/otel/sdk v1.28.0 h1:b9d7hIry8yZsgtbmM0DKyPWMMUMlK9NEKuIG4aBqWyE=
+go.opentelemetry.io/otel/sdk v1.28.0/go.mod h1:oYj7ClPUA7Iw3m+r7GeEjz0qckQRJK2B8zjcZEfu7Pg=
+go.opentelemetry.io/otel/trace v1.28.0 h1:GhQ9cUuQGmNDd5BTCP2dAvv75RdMxEfTmYejp+lkx9g=
+go.opentelemetry.io/otel/trace v1.28.0/go.mod h1:jPyXzNPg6da9+38HEwElrQiHlVMTnVfM3/yv2OlIHaI=
go.opentelemetry.io/proto/otlp v1.0.0 h1:T0TX0tmXU8a3CbNXzEKGeU5mIVOdf0oykP+u2lIVU/I=
go.opentelemetry.io/proto/otlp v1.0.0/go.mod h1:Sy6pihPLfYHkr3NkUbEhGHFhINUSI/v80hjKIs5JXpM=
go.uber.org/atomic v1.3.2/go.mod h1:gD2HeocX3+yG+ygLZcrzQJaqmWj9AIm7n08wl/qW/PE=
@@ -571,8 +572,8 @@ golang.org/x/crypto v0.0.0-20210616213533-5ff15b29337e/go.mod h1:GvvjBRRGRdwPK5y
golang.org/x/crypto v0.0.0-20210711020723-a769d52b0f97/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.0.0-20210921155107-089bfa567519/go.mod h1:GvvjBRRGRdwPK5ydBHafDWAxML/pGHZbMvKqRZ5+Abc=
golang.org/x/crypto v0.13.0/go.mod h1:y6Z2r+Rw4iayiXXAIxJIDAJ1zMW4yaTpebo8fPOliYc=
-golang.org/x/crypto v0.27.0 h1:GXm2NjJrPaiv/h1tb2UH8QfgC/hOf/+z0p6PT8o1w7A=
-golang.org/x/crypto v0.27.0/go.mod h1:1Xngt8kV6Dvbssa53Ziq6Eqn0HqbZi5Z6R0ZpwQzt70=
+golang.org/x/crypto v0.26.0 h1:RrRspgV4mU+YwB4FYnuBoKsUapNIL5cohGAmSH3azsw=
+golang.org/x/crypto v0.26.0/go.mod h1:GY7jblb9wI+FOo5y8/S2oY4zWP07AkOJ4+jxCqdqn54=
golang.org/x/exp v0.0.0-20190121172915-509febef88a4/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL+f9T1iT9GByDxfZKAQTCR3kQA=
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
@@ -647,8 +648,8 @@ golang.org/x/net v0.0.0-20220225172249-27dd8689420f/go.mod h1:CfG3xpIq0wQ8r1q4Su
golang.org/x/net v0.0.0-20220722155237-a158d28d115b/go.mod h1:XRhObCWvk6IyKnWLug+ECip1KBveYUHfp+8e9klMJ9c=
golang.org/x/net v0.6.0/go.mod h1:2Tu9+aMcznHK/AK1HMvgo6xiTLG5rD5rZLDS+rp2Bjs=
golang.org/x/net v0.10.0/go.mod h1:0qNGK6F8kojg2nk9dLZ2mShWaEBan6FAoqfSigmmuDg=
-golang.org/x/net v0.29.0 h1:5ORfpBpCs4HzDYoodCDBbwHzdR5UrLBZ3sOnUJmFoHo=
-golang.org/x/net v0.29.0/go.mod h1:gLkgy8jTGERgjzMic6DS9+SP0ajcu6Xu3Orq/SpETg0=
+golang.org/x/net v0.28.0 h1:a9JDOJc5GMUJ0+UDqmLT86WiEy7iWyIhz8gz8E4e5hE=
+golang.org/x/net v0.28.0/go.mod h1:yqtgsTWOOnlGLG9GFRrK3++bGOUEkNBoHZc8MEDWPNg=
golang.org/x/oauth2 v0.0.0-20180821212333-d2e6202438be/go.mod h1:N/0e6XlmueqKjAGxoOufVs8QHGRruUQn6yWY3a++T0U=
golang.org/x/oauth2 v0.0.0-20190226205417-e64efc72b421/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20190604053449-0f29369cfe45/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
@@ -656,8 +657,8 @@ golang.org/x/oauth2 v0.0.0-20191202225959-858c2ad4c8b6/go.mod h1:gOpvHmFTYa4Iltr
golang.org/x/oauth2 v0.0.0-20200107190931-bf48bf16ab8d/go.mod h1:gOpvHmFTYa4IltrdGE7lF6nIHvwfUNPOp7c8zoXwtLw=
golang.org/x/oauth2 v0.0.0-20210514164344-f6687ab2804c/go.mod h1:KelEdhl1UZF7XfJ4dDtk6s++YSgaE7mD/BuKKDLBl4A=
golang.org/x/oauth2 v0.0.0-20220223155221-ee480838109b/go.mod h1:DAh4E804XQdzx2j+YRIaUnCqCV2RuMz24cGBJ5QYIrc=
-golang.org/x/oauth2 v0.21.0 h1:tsimM75w1tF/uws5rbeHzIWxEqElMehnc+iW793zsZs=
-golang.org/x/oauth2 v0.21.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
+golang.org/x/oauth2 v0.20.0 h1:4mQdhULixXKP1rwYBW0vAijoXnkTG0BLCDRzfe1idMo=
+golang.org/x/oauth2 v0.20.0/go.mod h1:XYTD2NtWslqkgxebSiOHnXEap4TF09sJSc7H1sXbhtI=
golang.org/x/sync v0.0.0-20180314180146-1d60e4601c6f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181108010431-42b317875d0f/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
golang.org/x/sync v0.0.0-20181221193216-37e7f081c4d4/go.mod h1:RxMgew5VJxzue5/jJTE5uejpjVlOe/izrB70Jof72aM=
@@ -728,16 +729,16 @@ golang.org/x/sys v0.5.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.6.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.8.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
golang.org/x/sys v0.12.0/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
-golang.org/x/sys v0.25.0 h1:r+8e+loiHxRqhXVl6ML1nO3l1+oFoWbnlu2Ehimmi34=
-golang.org/x/sys v0.25.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
+golang.org/x/sys v0.24.0 h1:Twjiwq9dn6R1fQcyiK+wQyHWfaz/BJB+YIpzU/Cv3Xg=
+golang.org/x/sys v0.24.0/go.mod h1:/VUhepiaJMQUp4+oa/7Zr1D23ma6VTLIYjOOTFZPUcA=
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
golang.org/x/term v0.0.0-20210927222741-03fcf44c2211/go.mod h1:jbD1KX2456YbFQfuXm/mYQcufACuNUgVhRMnK/tPxf8=
golang.org/x/term v0.5.0/go.mod h1:jMB1sMXY+tzblOD4FWmEbocvup2/aLOaQEp7JmGp78k=
golang.org/x/term v0.8.0/go.mod h1:xPskH00ivmX89bAKVGSKKtLOWNx2+17Eiy94tnKShWo=
golang.org/x/term v0.12.0/go.mod h1:owVbMEjm3cBLCHdkQu9b1opXd4ETQWc3BhuQGKgXgvU=
-golang.org/x/term v0.24.0 h1:Mh5cbb+Zk2hqqXNO7S1iTjEphVL+jb8ZWaqh/g+JWkM=
-golang.org/x/term v0.24.0/go.mod h1:lOBK/LVxemqiMij05LGJ0tzNr8xlmwBRJ81PX6wVLH8=
+golang.org/x/term v0.23.0 h1:F6D4vR+EHoL9/sWAWgAR1H2DcHr4PareCbAaCo1RpuU=
+golang.org/x/term v0.23.0/go.mod h1:DgV24QBUrK6jhZXl+20l6UWznPlwAHm1Q1mGHtydmSk=
golang.org/x/text v0.0.0-20170915032832-14c0d48ead0c/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.0/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
golang.org/x/text v0.3.1-0.20180807135948-17ff2d5776d2/go.mod h1:NqM8EUOU14njkJ3fqMW+pc6Ldnwhi/IjpwHt7yyuwOQ=
@@ -749,8 +750,8 @@ golang.org/x/text v0.3.7/go.mod h1:u+2+/6zg+i71rQMx5EYifcz6MCKuco9NR6JIITiCfzQ=
golang.org/x/text v0.7.0/go.mod h1:mrYo+phRRbMaCq/xk9113O4dZlRixOauAjOtrjsXDZ8=
golang.org/x/text v0.9.0/go.mod h1:e1OnstbJyHTd6l/uOt8jFFHp6TRDWZR/bV3emEE/zU8=
golang.org/x/text v0.13.0/go.mod h1:TvPlkZtksWOMsz7fbANvkp4WM8x/WCo/om8BMLbz+aE=
-golang.org/x/text v0.18.0 h1:XvMDiNzPAl0jr17s6W9lcaIhGUfUORdGCNsuLmPG224=
-golang.org/x/text v0.18.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
+golang.org/x/text v0.17.0 h1:XtiM5bkSOt+ewxlOE/aE/AKEHibwj/6gvWMl9Rsh0Qc=
+golang.org/x/text v0.17.0/go.mod h1:BuEKDfySbSR4drPmRPG/7iBdf8hvFMuRexcpahXilzY=
golang.org/x/time v0.0.0-20181108054448-85acf8d2951c/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20190308202827-9d24e82272b4/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
golang.org/x/time v0.0.0-20191024005414-555d28b269f0/go.mod h1:tRJNPiyCQ0inRvYxbN9jk5I+vvW/OXSQhTDSoE431IQ=
@@ -867,10 +868,10 @@ google.golang.org/genproto v0.0.0-20200729003335-053ba62fc06f/go.mod h1:FWY/as6D
google.golang.org/genproto v0.0.0-20200804131852-c06518451d9c/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20200825200019-8632dd797987/go.mod h1:FWY/as6DDZQgahTzZj3fqbO1CbirC29ZNUFHwi0/+no=
google.golang.org/genproto v0.0.0-20240205150955-31a09d347014 h1:g/4bk7P6TPMkAUbUhquq98xey1slwvuVJPosdBqYJlU=
-google.golang.org/genproto/googleapis/api v0.0.0-20240604185151-ef581f913117 h1:+rdxYoE3E5htTEWIe15GlN6IfvbURM//Jt0mmkmm6ZU=
-google.golang.org/genproto/googleapis/api v0.0.0-20240604185151-ef581f913117/go.mod h1:OimBR/bc1wPO9iV4NC2bpyjy3VnAwZh5EBPQdtaE5oo=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1 h1:pPJltXNxVzT4pK9yD8vR9X75DaWYYmLGMsEvBfFQZzQ=
-google.golang.org/genproto/googleapis/rpc v0.0.0-20240903143218-8af14fe29dc1/go.mod h1:UqMtugtsSgubUsoxbuAoiCXvqvErP7Gf0so0mK9tHxU=
+google.golang.org/genproto/googleapis/api v0.0.0-20240528184218-531527333157 h1:7whR9kGa5LUwFtpLm2ArCEejtnxlGeLbAyjFY8sGNFw=
+google.golang.org/genproto/googleapis/api v0.0.0-20240528184218-531527333157/go.mod h1:99sLkeliLXfdj2J75X3Ho+rrVCaJze0uwN7zDDkjPVU=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240812133136-8ffd90a71988 h1:V71AcdLZr2p8dC9dbOIMCpqi4EmRl8wUwnJzXXLmbmc=
+google.golang.org/genproto/googleapis/rpc v0.0.0-20240812133136-8ffd90a71988/go.mod h1:Ue6ibwXGpU+dqIcODieyLOcgj7z8+IcskoNIgZxtrFY=
google.golang.org/grpc v1.19.0/go.mod h1:mqu4LbDTu4XGKhr4mRzUsmM4RtVoemTSY81AxZiDr8c=
google.golang.org/grpc v1.20.1/go.mod h1:10oTOabMzJvdu6/UiuZezV6QK5dSlG84ov/aaiqXj38=
google.golang.org/grpc v1.21.1/go.mod h1:oYelfM1adQP15Ek0mdvEgi9Df8B9CZIaU1084ijfRaM=
@@ -884,8 +885,8 @@ google.golang.org/grpc v1.29.1/go.mod h1:itym6AZVZYACWQqET3MqgPpjcuV5QH3BxFS3Iji
google.golang.org/grpc v1.30.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.31.0/go.mod h1:N36X2cJ7JwdamYAgDz+s+rVMFjt3numwzf/HckM8pak=
google.golang.org/grpc v1.33.2/go.mod h1:JMHMWHQWaTccqQQlmk3MJZS+GWXOdAesneDmEnv2fbc=
-google.golang.org/grpc v1.66.1 h1:hO5qAXR19+/Z44hmvIM4dQFMSYX9XcWsByfoxutBpAM=
-google.golang.org/grpc v1.66.1/go.mod h1:s3/l6xSSCURdVfAnL+TqCNMyTDAGN6+lZeVxnZR128Y=
+google.golang.org/grpc v1.65.0 h1:bs/cUb4lp1G5iImFFd3u5ixQzweKizoZJAwBNLR42lc=
+google.golang.org/grpc v1.65.0/go.mod h1:WgYC2ypjlB0EiQi6wdKixMqukr6lBc0Vo+oOgjrM5ZQ=
google.golang.org/protobuf v0.0.0-20200109180630-ec00e32a8dfd/go.mod h1:DFci5gLYBciE7Vtevhsrf46CRTquxDuWsQurQQe4oz8=
google.golang.org/protobuf v0.0.0-20200221191635-4d8936d0db64/go.mod h1:kwYJMbMJ01Woi6D6+Kah6886xMZcty6N08ah7+eCXa0=
google.golang.org/protobuf v0.0.0-20200228230310-ab0ca4ff8a60/go.mod h1:cfTl7dwQJ+fmap5saPgwCLgHXTUD7jkjRqWcaiX5VyM=
@@ -904,8 +905,8 @@ gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLks
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/check.v1 v1.0.0-20190902080502-41f04d3bba15/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
-gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c h1:Hei/4ADfdWqJk1ZMxUNpqntNwaWcugrBjAiHlqqRiVk=
-gopkg.in/check.v1 v1.0.0-20201130134442-10cb98267c6c/go.mod h1:JHkPIbrfpd72SG/EVd6muEfDQjcINNoR0C8j2r3qZ4Q=
+gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
+gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
gopkg.in/inconshreveable/log15.v2 v2.0.0-20180818164646-67afb5ed74ec/go.mod h1:aPpfJ7XW+gOuirDoZ8gHhLh3kZ1B08FtV2bbmy7Jv3s=
gopkg.in/yaml.v2 v2.2.1/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
diff --git a/sdk/helper/clientcountutil/clientcountutil.go b/sdk/helper/clientcountutil/clientcountutil.go
index d09c5be13d33..7d0be5526e1b 100644
--- a/sdk/helper/clientcountutil/clientcountutil.go
+++ b/sdk/helper/clientcountutil/clientcountutil.go
@@ -282,53 +282,33 @@ func (d *ActivityLogDataGenerator) ToProto() *generation.ActivityLogMockInput {
// Write writes the data to the API with the given write options. The method
// returns the new paths that have been written. Note that the API endpoint will
// only be present when Vault has been compiled with the "testonly" flag.
-func (d *ActivityLogDataGenerator) Write(ctx context.Context, writeOptions ...generation.WriteOptions) ([]string, []string, []string, error) {
+func (d *ActivityLogDataGenerator) Write(ctx context.Context, writeOptions ...generation.WriteOptions) ([]string, error) {
d.data.Write = writeOptions
err := VerifyInput(d.data)
if err != nil {
- return nil, nil, nil, err
+ return nil, err
}
data, err := d.ToJSON()
if err != nil {
- return nil, nil, nil, err
+ return nil, err
}
resp, err := d.client.Logical().WriteWithContext(ctx, "sys/internal/counters/activity/write", map[string]interface{}{"input": string(data)})
if err != nil {
- return nil, nil, nil, err
+ return nil, err
}
if resp.Data == nil {
- return nil, nil, nil, fmt.Errorf("received no data")
+ return nil, fmt.Errorf("received no data")
}
paths := resp.Data["paths"]
castedPaths, ok := paths.([]interface{})
if !ok {
- return nil, nil, nil, fmt.Errorf("invalid paths data: %v", paths)
+ return nil, fmt.Errorf("invalid paths data: %v", paths)
}
returnPaths := make([]string, 0, len(castedPaths))
for _, path := range castedPaths {
returnPaths = append(returnPaths, path.(string))
}
-
- localPaths := resp.Data["local_paths"]
- localCastedPaths, ok := localPaths.([]interface{})
- if !ok {
- return nil, nil, nil, fmt.Errorf("invalid local paths data: %v", localPaths)
- }
- returnLocalPaths := make([]string, 0, len(localCastedPaths))
- for _, path := range localCastedPaths {
- returnLocalPaths = append(returnLocalPaths, path.(string))
- }
-
- globalPaths := resp.Data["global_paths"]
- globalCastedPaths, ok := globalPaths.([]interface{})
- if !ok {
- return nil, nil, nil, fmt.Errorf("invalid global paths data: %v", globalPaths)
- }
- returnGlobalPaths := make([]string, 0, len(globalCastedPaths))
- for _, path := range globalCastedPaths {
- returnGlobalPaths = append(returnGlobalPaths, path.(string))
- }
- return returnPaths, returnLocalPaths, returnGlobalPaths, nil
+ return returnPaths, nil
}
// VerifyInput checks that the input data is valid
diff --git a/sdk/helper/clientcountutil/clientcountutil_test.go b/sdk/helper/clientcountutil/clientcountutil_test.go
index 4ea987fed025..6a5b224bc675 100644
--- a/sdk/helper/clientcountutil/clientcountutil_test.go
+++ b/sdk/helper/clientcountutil/clientcountutil_test.go
@@ -116,7 +116,7 @@ func TestNewCurrentMonthData_AddClients(t *testing.T) {
// sent to the server is correct.
func TestWrite(t *testing.T) {
ts := httptest.NewServer(http.HandlerFunc(func(w http.ResponseWriter, r *http.Request) {
- _, err := io.WriteString(w, `{"data":{"paths":["path1","path2"],"global_paths":["path2","path3"], "local_paths":["path3","path4"]}}`)
+ _, err := io.WriteString(w, `{"data":{"paths":["path1","path2"]}}`)
require.NoError(t, err)
body, err := io.ReadAll(r.Body)
require.NoError(t, err)
@@ -131,7 +131,7 @@ func TestWrite(t *testing.T) {
Address: ts.URL,
})
require.NoError(t, err)
- paths, localPaths, globalPaths, err := NewActivityLogData(client).
+ paths, err := NewActivityLogData(client).
NewPreviousMonthData(3).
NewClientSeen().
NewPreviousMonthData(2).
@@ -141,8 +141,6 @@ func TestWrite(t *testing.T) {
require.NoError(t, err)
require.Equal(t, []string{"path1", "path2"}, paths)
- require.Equal(t, []string{"path2", "path3"}, globalPaths)
- require.Equal(t, []string{"path3", "path4"}, localPaths)
}
func testAddClients(t *testing.T, makeGenerator func() *ActivityLogDataGenerator, getClient func(data *ActivityLogDataGenerator) *generation.Client) {
diff --git a/sdk/helper/clientcountutil/generation/generate_data.pb.go b/sdk/helper/clientcountutil/generation/generate_data.pb.go
index 02f621b173d6..29267282e90f 100644
--- a/sdk/helper/clientcountutil/generation/generate_data.pb.go
+++ b/sdk/helper/clientcountutil/generation/generate_data.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: sdk/helper/clientcountutil/generation/generate_data.proto
@@ -92,9 +92,11 @@ type ActivityLogMockInput struct {
func (x *ActivityLogMockInput) Reset() {
*x = ActivityLogMockInput{}
- mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ActivityLogMockInput) String() string {
@@ -105,7 +107,7 @@ func (*ActivityLogMockInput) ProtoMessage() {}
func (x *ActivityLogMockInput) ProtoReflect() protoreflect.Message {
mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -156,9 +158,11 @@ type Data struct {
func (x *Data) Reset() {
*x = Data{}
- mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Data) String() string {
@@ -169,7 +173,7 @@ func (*Data) ProtoMessage() {}
func (x *Data) ProtoReflect() protoreflect.Message {
mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -289,9 +293,11 @@ type Segments struct {
func (x *Segments) Reset() {
*x = Segments{}
- mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Segments) String() string {
@@ -302,7 +308,7 @@ func (*Segments) ProtoMessage() {}
func (x *Segments) ProtoReflect() protoreflect.Message {
mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -335,9 +341,11 @@ type Segment struct {
func (x *Segment) Reset() {
*x = Segment{}
- mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Segment) String() string {
@@ -348,7 +356,7 @@ func (*Segment) ProtoMessage() {}
func (x *Segment) ProtoReflect() protoreflect.Message {
mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[3]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -387,9 +395,11 @@ type Clients struct {
func (x *Clients) Reset() {
*x = Clients{}
- mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Clients) String() string {
@@ -400,7 +410,7 @@ func (*Clients) ProtoMessage() {}
func (x *Clients) ProtoReflect() protoreflect.Message {
mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[4]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -438,9 +448,11 @@ type Client struct {
func (x *Client) Reset() {
*x = Client{}
- mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Client) String() string {
@@ -451,7 +463,7 @@ func (*Client) ProtoMessage() {}
func (x *Client) ProtoReflect() protoreflect.Message {
mi := &file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[5]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -638,6 +650,80 @@ func file_sdk_helper_clientcountutil_generation_generate_data_proto_init() {
if File_sdk_helper_clientcountutil_generation_generate_data_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*ActivityLogMockInput); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*Data); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*Segments); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[3].Exporter = func(v any, i int) any {
+ switch v := v.(*Segment); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[4].Exporter = func(v any, i int) any {
+ switch v := v.(*Clients); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[5].Exporter = func(v any, i int) any {
+ switch v := v.(*Client); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
file_sdk_helper_clientcountutil_generation_generate_data_proto_msgTypes[1].OneofWrappers = []any{
(*Data_CurrentMonth)(nil),
(*Data_MonthsAgo)(nil),
diff --git a/sdk/helper/docker/testhelpers.go b/sdk/helper/docker/testhelpers.go
index 6a29f285dbf8..af1f3f72a666 100644
--- a/sdk/helper/docker/testhelpers.go
+++ b/sdk/helper/docker/testhelpers.go
@@ -23,7 +23,6 @@ import (
"github.com/docker/docker/api/types"
"github.com/docker/docker/api/types/container"
"github.com/docker/docker/api/types/filters"
- "github.com/docker/docker/api/types/image"
"github.com/docker/docker/api/types/mount"
"github.com/docker/docker/api/types/network"
"github.com/docker/docker/api/types/strslice"
@@ -401,7 +400,7 @@ func (d *Runner) Start(ctx context.Context, addSuffix, forceLocalAddr bool) (*St
}
// best-effort pull
- var opts image.CreateOptions
+ var opts types.ImageCreateOptions
if d.RunOptions.AuthUsername != "" && d.RunOptions.AuthPassword != "" {
var buf bytes.Buffer
auth := map[string]string{
diff --git a/sdk/helper/keysutil/policy.go b/sdk/helper/keysutil/policy.go
index 0d50775c666d..873f99149eda 100644
--- a/sdk/helper/keysutil/policy.go
+++ b/sdk/helper/keysutil/policy.go
@@ -10,7 +10,6 @@ import (
"crypto/aes"
"crypto/cipher"
"crypto/ecdsa"
- stdlibEd25519 "crypto/ed25519"
"crypto/elliptic"
"crypto/hmac"
"crypto/rand"
@@ -83,44 +82,6 @@ const (
DefaultVersionTemplate = "vault:v{{version}}:"
)
-type PaddingScheme string
-
-const (
- PaddingScheme_OAEP = PaddingScheme("oaep")
- PaddingScheme_PKCS1v15 = PaddingScheme("pkcs1v15")
-)
-
-var genEd25519Options = func(hashAlgorithm HashType, signatureContext string) (*stdlibEd25519.Options, error) {
- if signatureContext != "" {
- return nil, fmt.Errorf("signature context is not supported feature")
- }
-
- if hashAlgorithm == HashTypeSHA2512 {
- return nil, fmt.Errorf("hash algorithm of SHA2 512 is not supported feature")
- }
-
- return &stdlibEd25519.Options{
- Hash: crypto.Hash(0),
- }, nil
-}
-
-func (p PaddingScheme) String() string {
- return string(p)
-}
-
-// ParsePaddingScheme expects a lower case string that can be directly compared to
-// a defined padding scheme or returns an error.
-func ParsePaddingScheme(s string) (PaddingScheme, error) {
- switch s {
- case PaddingScheme_OAEP.String():
- return PaddingScheme_OAEP, nil
- case PaddingScheme_PKCS1v15.String():
- return PaddingScheme_PKCS1v15, nil
- default:
- return "", fmt.Errorf("unknown padding scheme: %s", s)
- }
-}
-
type AEADFactory interface {
GetAEAD(iv []byte) (cipher.AEAD, error)
}
@@ -148,7 +109,6 @@ type SigningOptions struct {
Marshaling MarshalingType
SaltLength int
SigAlgorithm string
- SigContext string // Provide a context for Ed25519ctx signatures
ManagedKeyParams ManagedKeyParameters
}
@@ -239,15 +199,6 @@ func (kt KeyType) ImportPublicKeySupported() bool {
return false
}
-func (kt KeyType) PaddingSchemesSupported() bool {
- switch kt {
- case KeyType_RSA2048, KeyType_RSA3072, KeyType_RSA4096:
- return true
- default:
- return false
- }
-}
-
func (kt KeyType) String() string {
switch kt {
case KeyType_AES128_GCM96:
@@ -988,7 +939,7 @@ func (p *Policy) Decrypt(context, nonce []byte, value string) (string, error) {
return p.DecryptWithFactory(context, nonce, value, nil)
}
-func (p *Policy) DecryptWithFactory(context, nonce []byte, value string, factories ...any) (string, error) {
+func (p *Policy) DecryptWithFactory(context, nonce []byte, value string, factories ...interface{}) (string, error) {
if !p.Type.DecryptionSupported() {
return "", errutil.UserError{Err: fmt.Sprintf("message decryption not supported for key type %v", p.Type)}
}
@@ -1083,24 +1034,15 @@ func (p *Policy) DecryptWithFactory(context, nonce []byte, value string, factori
return "", err
}
case KeyType_RSA2048, KeyType_RSA3072, KeyType_RSA4096:
- paddingScheme, err := getPaddingScheme(factories)
- if err != nil {
- return "", err
- }
keyEntry, err := p.safeGetKeyEntry(ver)
if err != nil {
return "", err
}
key := keyEntry.RSAKey
-
- switch paddingScheme {
- case PaddingScheme_PKCS1v15:
- plain, err = rsa.DecryptPKCS1v15(rand.Reader, key, decoded)
- case PaddingScheme_OAEP:
- plain, err = rsa.DecryptOAEP(sha256.New(), rand.Reader, key, decoded, nil)
- default:
- return "", errutil.InternalError{Err: fmt.Sprintf("unsupported RSA padding scheme %s", paddingScheme)}
+ if key == nil {
+ return "", errutil.InternalError{Err: fmt.Sprintf("cannot decrypt ciphertext, key version does not have a private counterpart")}
}
+ plain, err = rsa.DecryptOAEP(sha256.New(), rand.Reader, key, decoded, nil)
if err != nil {
return "", errutil.InternalError{Err: fmt.Sprintf("failed to RSA decrypt the ciphertext: %v", err)}
}
@@ -1316,12 +1258,9 @@ func (p *Policy) SignWithOptions(ver int, context, input []byte, options *Signin
key = ed25519.PrivateKey(keyParams.Key)
}
- opts, err := genEd25519Options(hashAlgorithm, options.SigContext)
- if err != nil {
- return nil, errutil.UserError{Err: fmt.Sprintf("error generating Ed25519 options: %v", err)}
- }
-
- sig, err = key.Sign(rand.Reader, input, opts)
+ // Per docs, do not pre-hash ed25519; it does two passes and performs
+ // its own hashing
+ sig, err = key.Sign(rand.Reader, input, crypto.Hash(0))
if err != nil {
return nil, err
}
@@ -1517,16 +1456,7 @@ func (p *Policy) VerifySignatureWithOptions(context, input []byte, sig string, o
pub = ed25519.PublicKey(raw)
}
- opts, err := genEd25519Options(hashAlgorithm, options.SigContext)
- if err != nil {
- return false, errutil.UserError{Err: fmt.Sprintf("error generating Ed25519 options: %v", err)}
- }
- if err := stdlibEd25519.VerifyWithOptions(pub, input, sigBytes, opts); err != nil {
- // We drop the error, just report back that we failed signature verification
- return false, nil
- }
-
- return true, nil
+ return ed25519.Verify(pub, input, sigBytes), nil
case KeyType_RSA2048, KeyType_RSA3072, KeyType_RSA4096:
keyEntry, err := p.safeGetKeyEntry(ver)
@@ -2103,7 +2033,7 @@ func (p *Policy) SymmetricDecryptRaw(encKey, ciphertext []byte, opts SymmetricOp
return plain, nil
}
-func (p *Policy) EncryptWithFactory(ver int, context []byte, nonce []byte, value string, factories ...any) (string, error) {
+func (p *Policy) EncryptWithFactory(ver int, context []byte, nonce []byte, value string, factories ...interface{}) (string, error) {
if !p.Type.EncryptionSupported() {
return "", errutil.UserError{Err: fmt.Sprintf("message encryption not supported for key type %v", p.Type)}
}
@@ -2198,10 +2128,6 @@ func (p *Policy) EncryptWithFactory(ver int, context []byte, nonce []byte, value
return "", err
}
case KeyType_RSA2048, KeyType_RSA3072, KeyType_RSA4096:
- paddingScheme, err := getPaddingScheme(factories)
- if err != nil {
- return "", err
- }
keyEntry, err := p.safeGetKeyEntry(ver)
if err != nil {
return "", err
@@ -2212,15 +2138,7 @@ func (p *Policy) EncryptWithFactory(ver int, context []byte, nonce []byte, value
} else {
publicKey = keyEntry.RSAPublicKey
}
- switch paddingScheme {
- case PaddingScheme_PKCS1v15:
- ciphertext, err = rsa.EncryptPKCS1v15(rand.Reader, publicKey, plaintext)
- case PaddingScheme_OAEP:
- ciphertext, err = rsa.EncryptOAEP(sha256.New(), rand.Reader, publicKey, plaintext, nil)
- default:
- return "", errutil.InternalError{Err: fmt.Sprintf("unsupported RSA padding scheme %s", paddingScheme)}
- }
-
+ ciphertext, err = rsa.EncryptOAEP(sha256.New(), rand.Reader, publicKey, plaintext, nil)
if err != nil {
return "", errutil.InternalError{Err: fmt.Sprintf("failed to RSA encrypt the plaintext: %v", err)}
}
@@ -2266,19 +2184,6 @@ func (p *Policy) EncryptWithFactory(ver int, context []byte, nonce []byte, value
return encoded, nil
}
-func getPaddingScheme(factories []any) (PaddingScheme, error) {
- for _, rawFactory := range factories {
- if rawFactory == nil {
- continue
- }
-
- if p, ok := rawFactory.(PaddingScheme); ok && p != "" {
- return p, nil
- }
- }
- return PaddingScheme_OAEP, nil
-}
-
func (p *Policy) KeyVersionCanBeUpdated(keyVersion int, isPrivateKey bool) error {
keyEntry, err := p.safeGetKeyEntry(keyVersion)
if err != nil {
@@ -2474,7 +2379,7 @@ func (ke *KeyEntry) parseFromKey(PolKeyType KeyType, parsedKey any) error {
return nil
}
-func (p *Policy) WrapKey(ver int, targetKey any, targetKeyType KeyType, hash hash.Hash) (string, error) {
+func (p *Policy) WrapKey(ver int, targetKey interface{}, targetKeyType KeyType, hash hash.Hash) (string, error) {
if !p.Type.SigningSupported() {
return "", fmt.Errorf("message signing not supported for key type %v", p.Type)
}
@@ -2498,7 +2403,7 @@ func (p *Policy) WrapKey(ver int, targetKey any, targetKeyType KeyType, hash has
return keyEntry.WrapKey(targetKey, targetKeyType, hash)
}
-func (ke *KeyEntry) WrapKey(targetKey any, targetKeyType KeyType, hash hash.Hash) (string, error) {
+func (ke *KeyEntry) WrapKey(targetKey interface{}, targetKeyType KeyType, hash hash.Hash) (string, error) {
// Presently this method implements a CKM_RSA_AES_KEY_WRAP-compatible
// wrapping interface and only works on RSA keyEntries as a result.
if ke.RSAPublicKey == nil {
diff --git a/sdk/helper/keysutil/policy_test.go b/sdk/helper/keysutil/policy_test.go
index cd921a52065b..fd753f22ba7e 100644
--- a/sdk/helper/keysutil/policy_test.go
+++ b/sdk/helper/keysutil/policy_test.go
@@ -11,7 +11,6 @@ import (
"crypto/rand"
"crypto/rsa"
"crypto/x509"
- "encoding/base64"
"errors"
"fmt"
mathrand "math/rand"
@@ -934,25 +933,6 @@ func autoVerify(depth int, t *testing.T, p *Policy, input []byte, sig *SigningRe
}
}
-func autoVerifyDecrypt(depth int, t *testing.T, p *Policy, input []byte, ct string, factories ...any) {
- tabs := strings.Repeat("\t", depth)
- t.Log(tabs, "Automatically decrypting with options:", factories)
-
- tabs = strings.Repeat("\t", depth+1)
- ptb64, err := p.DecryptWithFactory(nil, nil, ct, factories...)
- if err != nil {
- t.Fatal(tabs, "❌ Failed to automatically verify signature:", err)
- }
-
- pt, err := base64.StdEncoding.DecodeString(ptb64)
- if err != nil {
- t.Fatal(tabs, "❌ Failed decoding plaintext:", err)
- }
- if !bytes.Equal(input, pt) {
- t.Fatal(tabs, "❌ Failed to automatically decrypt")
- }
-}
-
func Test_RSA_PSS(t *testing.T) {
t.Log("Testing RSA PSS")
mathrand.Seed(time.Now().UnixNano())
@@ -1103,64 +1083,8 @@ func Test_RSA_PSS(t *testing.T) {
}
}
-func Test_RSA_PKCS1Encryption(t *testing.T) {
- t.Log("Testing RSA PKCS#1v1.5 padded encryption")
-
- ctx := context.Background()
- storage := &logical.InmemStorage{}
- // https://crypto.stackexchange.com/a/1222
- pt := []byte("Sphinx of black quartz, judge my vow")
- input := base64.StdEncoding.EncodeToString(pt)
-
- tabs := make(map[int]string)
- for i := 1; i <= 6; i++ {
- tabs[i] = strings.Repeat("\t", i)
- }
-
- test_RSA_PKCS1 := func(t *testing.T, p *Policy, rsaKey *rsa.PrivateKey, padding PaddingScheme) {
- // 1. Make a signature with the given key size and hash algorithm.
- t.Log(tabs[3], "Make an automatic signature")
- ct, err := p.EncryptWithFactory(0, nil, nil, string(input), padding)
- if err != nil {
- t.Fatal(tabs[4], "❌ Failed to automatically encrypt:", err)
- }
-
- // 1.1 Verify this signature using the *inferred* salt length.
- autoVerifyDecrypt(4, t, p, pt, ct, padding)
- }
-
- rsaKeyTypes := []KeyType{KeyType_RSA2048, KeyType_RSA3072, KeyType_RSA4096}
- testKeys, err := generateTestKeys()
- if err != nil {
- t.Fatalf("error generating test keys: %s", err)
- }
-
- // 1. For each standard RSA key size 2048, 3072, and 4096...
- for _, rsaKeyType := range rsaKeyTypes {
- t.Log("Key size: ", rsaKeyType)
- p := &Policy{
- Name: fmt.Sprint(rsaKeyType), // NOTE: crucial to create a new key per key size
- Type: rsaKeyType,
- }
-
- rsaKeyBytes := testKeys[rsaKeyType]
- err := p.Import(ctx, storage, rsaKeyBytes, rand.Reader)
- if err != nil {
- t.Fatal(tabs[1], "❌ Failed to import key:", err)
- }
- rsaKeyAny, err := x509.ParsePKCS8PrivateKey(rsaKeyBytes)
- if err != nil {
- t.Fatalf("error parsing test keys: %s", err)
- }
- rsaKey := rsaKeyAny.(*rsa.PrivateKey)
- for _, padding := range []PaddingScheme{PaddingScheme_OAEP, PaddingScheme_PKCS1v15, ""} {
- t.Run(fmt.Sprintf("%s/%s", rsaKeyType.String(), padding), func(t *testing.T) { test_RSA_PKCS1(t, p, rsaKey, padding) })
- }
- }
-}
-
-func Test_RSA_PKCS1Signing(t *testing.T) {
- t.Log("Testing RSA PKCS#1v1.5 signatures")
+func Test_RSA_PKCS1(t *testing.T) {
+ t.Log("Testing RSA PKCS#1v1.5")
ctx := context.Background()
storage := &logical.InmemStorage{}
diff --git a/sdk/helper/pluginutil/multiplexing.pb.go b/sdk/helper/pluginutil/multiplexing.pb.go
index c6b2fe4942c1..4e6c9f46a2e7 100644
--- a/sdk/helper/pluginutil/multiplexing.pb.go
+++ b/sdk/helper/pluginutil/multiplexing.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: sdk/helper/pluginutil/multiplexing.proto
@@ -31,9 +31,11 @@ type MultiplexingSupportRequest struct {
func (x *MultiplexingSupportRequest) Reset() {
*x = MultiplexingSupportRequest{}
- mi := &file_sdk_helper_pluginutil_multiplexing_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_helper_pluginutil_multiplexing_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *MultiplexingSupportRequest) String() string {
@@ -44,7 +46,7 @@ func (*MultiplexingSupportRequest) ProtoMessage() {}
func (x *MultiplexingSupportRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_helper_pluginutil_multiplexing_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -69,9 +71,11 @@ type MultiplexingSupportResponse struct {
func (x *MultiplexingSupportResponse) Reset() {
*x = MultiplexingSupportResponse{}
- mi := &file_sdk_helper_pluginutil_multiplexing_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_helper_pluginutil_multiplexing_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *MultiplexingSupportResponse) String() string {
@@ -82,7 +86,7 @@ func (*MultiplexingSupportResponse) ProtoMessage() {}
func (x *MultiplexingSupportResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_helper_pluginutil_multiplexing_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -165,6 +169,32 @@ func file_sdk_helper_pluginutil_multiplexing_proto_init() {
if File_sdk_helper_pluginutil_multiplexing_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_sdk_helper_pluginutil_multiplexing_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*MultiplexingSupportRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_helper_pluginutil_multiplexing_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*MultiplexingSupportResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/sdk/helper/pluginutil/multiplexing_grpc.pb.go b/sdk/helper/pluginutil/multiplexing_grpc.pb.go
index f66aa9151aa9..0f0df2128ba7 100644
--- a/sdk/helper/pluginutil/multiplexing_grpc.pb.go
+++ b/sdk/helper/pluginutil/multiplexing_grpc.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
-// - protoc-gen-go-grpc v1.5.1
+// - protoc-gen-go-grpc v1.4.0
// - protoc (unknown)
// source: sdk/helper/pluginutil/multiplexing.proto
@@ -18,8 +18,8 @@ import (
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
-// Requires gRPC-Go v1.64.0 or later.
-const _ = grpc.SupportPackageIsVersion9
+// Requires gRPC-Go v1.62.0 or later.
+const _ = grpc.SupportPackageIsVersion8
const (
PluginMultiplexing_MultiplexingSupport_FullMethodName = "/pluginutil.multiplexing.PluginMultiplexing/MultiplexingSupport"
@@ -52,24 +52,20 @@ func (c *pluginMultiplexingClient) MultiplexingSupport(ctx context.Context, in *
// PluginMultiplexingServer is the server API for PluginMultiplexing service.
// All implementations must embed UnimplementedPluginMultiplexingServer
-// for forward compatibility.
+// for forward compatibility
type PluginMultiplexingServer interface {
MultiplexingSupport(context.Context, *MultiplexingSupportRequest) (*MultiplexingSupportResponse, error)
mustEmbedUnimplementedPluginMultiplexingServer()
}
-// UnimplementedPluginMultiplexingServer must be embedded to have
-// forward compatible implementations.
-//
-// NOTE: this should be embedded by value instead of pointer to avoid a nil
-// pointer dereference when methods are called.
-type UnimplementedPluginMultiplexingServer struct{}
+// UnimplementedPluginMultiplexingServer must be embedded to have forward compatible implementations.
+type UnimplementedPluginMultiplexingServer struct {
+}
func (UnimplementedPluginMultiplexingServer) MultiplexingSupport(context.Context, *MultiplexingSupportRequest) (*MultiplexingSupportResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method MultiplexingSupport not implemented")
}
func (UnimplementedPluginMultiplexingServer) mustEmbedUnimplementedPluginMultiplexingServer() {}
-func (UnimplementedPluginMultiplexingServer) testEmbeddedByValue() {}
// UnsafePluginMultiplexingServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to PluginMultiplexingServer will
@@ -79,13 +75,6 @@ type UnsafePluginMultiplexingServer interface {
}
func RegisterPluginMultiplexingServer(s grpc.ServiceRegistrar, srv PluginMultiplexingServer) {
- // If the following call pancis, it indicates UnimplementedPluginMultiplexingServer was
- // embedded by pointer and is nil. This will cause panics if an
- // unimplemented method is ever invoked, so we test this at initialization
- // time to prevent it from happening at runtime later due to I/O.
- if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
- t.testEmbeddedByValue()
- }
s.RegisterService(&PluginMultiplexing_ServiceDesc, srv)
}
diff --git a/sdk/logical/event.pb.go b/sdk/logical/event.pb.go
index 4197adec5fcb..1db6d46dc94d 100644
--- a/sdk/logical/event.pb.go
+++ b/sdk/logical/event.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: sdk/logical/event.proto
@@ -46,9 +46,11 @@ type EventPluginInfo struct {
func (x *EventPluginInfo) Reset() {
*x = EventPluginInfo{}
- mi := &file_sdk_logical_event_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_logical_event_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *EventPluginInfo) String() string {
@@ -59,7 +61,7 @@ func (*EventPluginInfo) ProtoMessage() {}
func (x *EventPluginInfo) ProtoReflect() protoreflect.Message {
mi := &file_sdk_logical_event_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -140,9 +142,11 @@ type EventData struct {
func (x *EventData) Reset() {
*x = EventData{}
- mi := &file_sdk_logical_event_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_logical_event_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *EventData) String() string {
@@ -153,7 +157,7 @@ func (*EventData) ProtoMessage() {}
func (x *EventData) ProtoReflect() protoreflect.Message {
mi := &file_sdk_logical_event_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -212,9 +216,11 @@ type EventReceived struct {
func (x *EventReceived) Reset() {
*x = EventReceived{}
- mi := &file_sdk_logical_event_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_logical_event_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *EventReceived) String() string {
@@ -225,7 +231,7 @@ func (*EventReceived) ProtoMessage() {}
func (x *EventReceived) ProtoReflect() protoreflect.Message {
mi := &file_sdk_logical_event_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -348,6 +354,44 @@ func file_sdk_logical_event_proto_init() {
if File_sdk_logical_event_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_sdk_logical_event_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*EventPluginInfo); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_logical_event_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*EventData); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_logical_event_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*EventReceived); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/sdk/logical/identity.pb.go b/sdk/logical/identity.pb.go
index bccf313866fa..5f08ce168935 100644
--- a/sdk/logical/identity.pb.go
+++ b/sdk/logical/identity.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: sdk/logical/identity.proto
@@ -45,9 +45,11 @@ type Entity struct {
func (x *Entity) Reset() {
*x = Entity{}
- mi := &file_sdk_logical_identity_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_logical_identity_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Entity) String() string {
@@ -58,7 +60,7 @@ func (*Entity) ProtoMessage() {}
func (x *Entity) ProtoReflect() protoreflect.Message {
mi := &file_sdk_logical_identity_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -149,9 +151,11 @@ type Alias struct {
func (x *Alias) Reset() {
*x = Alias{}
- mi := &file_sdk_logical_identity_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_logical_identity_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Alias) String() string {
@@ -162,7 +166,7 @@ func (*Alias) ProtoMessage() {}
func (x *Alias) ProtoReflect() protoreflect.Message {
mi := &file_sdk_logical_identity_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -251,9 +255,11 @@ type Group struct {
func (x *Group) Reset() {
*x = Group{}
- mi := &file_sdk_logical_identity_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_logical_identity_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Group) String() string {
@@ -264,7 +270,7 @@ func (*Group) ProtoMessage() {}
func (x *Group) ProtoReflect() protoreflect.Message {
mi := &file_sdk_logical_identity_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -320,9 +326,11 @@ type MFAMethodID struct {
func (x *MFAMethodID) Reset() {
*x = MFAMethodID{}
- mi := &file_sdk_logical_identity_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_logical_identity_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *MFAMethodID) String() string {
@@ -333,7 +341,7 @@ func (*MFAMethodID) ProtoMessage() {}
func (x *MFAMethodID) ProtoReflect() protoreflect.Message {
mi := &file_sdk_logical_identity_proto_msgTypes[3]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -386,9 +394,11 @@ type MFAConstraintAny struct {
func (x *MFAConstraintAny) Reset() {
*x = MFAConstraintAny{}
- mi := &file_sdk_logical_identity_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_logical_identity_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *MFAConstraintAny) String() string {
@@ -399,7 +409,7 @@ func (*MFAConstraintAny) ProtoMessage() {}
func (x *MFAConstraintAny) ProtoReflect() protoreflect.Message {
mi := &file_sdk_logical_identity_proto_msgTypes[4]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -432,9 +442,11 @@ type MFARequirement struct {
func (x *MFARequirement) Reset() {
*x = MFARequirement{}
- mi := &file_sdk_logical_identity_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_logical_identity_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *MFARequirement) String() string {
@@ -445,7 +457,7 @@ func (*MFARequirement) ProtoMessage() {}
func (x *MFARequirement) ProtoReflect() protoreflect.Message {
mi := &file_sdk_logical_identity_proto_msgTypes[5]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -614,6 +626,80 @@ func file_sdk_logical_identity_proto_init() {
if File_sdk_logical_identity_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_sdk_logical_identity_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*Entity); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_logical_identity_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*Alias); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_logical_identity_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*Group); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_logical_identity_proto_msgTypes[3].Exporter = func(v any, i int) any {
+ switch v := v.(*MFAMethodID); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_logical_identity_proto_msgTypes[4].Exporter = func(v any, i int) any {
+ switch v := v.(*MFAConstraintAny); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_logical_identity_proto_msgTypes[5].Exporter = func(v any, i int) any {
+ switch v := v.(*MFARequirement); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/sdk/logical/plugin.pb.go b/sdk/logical/plugin.pb.go
index 3c6f951c9ed4..7b8fe8ce723a 100644
--- a/sdk/logical/plugin.pb.go
+++ b/sdk/logical/plugin.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: sdk/logical/plugin.proto
@@ -38,9 +38,11 @@ type PluginEnvironment struct {
func (x *PluginEnvironment) Reset() {
*x = PluginEnvironment{}
- mi := &file_sdk_logical_plugin_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_logical_plugin_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *PluginEnvironment) String() string {
@@ -51,7 +53,7 @@ func (*PluginEnvironment) ProtoMessage() {}
func (x *PluginEnvironment) ProtoReflect() protoreflect.Message {
mi := &file_sdk_logical_plugin_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -137,6 +139,20 @@ func file_sdk_logical_plugin_proto_init() {
if File_sdk_logical_plugin_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_sdk_logical_plugin_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*PluginEnvironment); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/sdk/logical/version.pb.go b/sdk/logical/version.pb.go
index 66cd5872124f..abb579096b27 100644
--- a/sdk/logical/version.pb.go
+++ b/sdk/logical/version.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: sdk/logical/version.proto
@@ -31,9 +31,11 @@ type Empty struct {
func (x *Empty) Reset() {
*x = Empty{}
- mi := &file_sdk_logical_version_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_logical_version_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Empty) String() string {
@@ -44,7 +46,7 @@ func (*Empty) ProtoMessage() {}
func (x *Empty) ProtoReflect() protoreflect.Message {
mi := &file_sdk_logical_version_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -70,9 +72,11 @@ type VersionReply struct {
func (x *VersionReply) Reset() {
*x = VersionReply{}
- mi := &file_sdk_logical_version_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_logical_version_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *VersionReply) String() string {
@@ -83,7 +87,7 @@ func (*VersionReply) ProtoMessage() {}
func (x *VersionReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_logical_version_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -156,6 +160,32 @@ func file_sdk_logical_version_proto_init() {
if File_sdk_logical_version_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_sdk_logical_version_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*Empty); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_logical_version_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*VersionReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/sdk/logical/version_grpc.pb.go b/sdk/logical/version_grpc.pb.go
index 53bc496d8602..9aa110fce98f 100644
--- a/sdk/logical/version_grpc.pb.go
+++ b/sdk/logical/version_grpc.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
-// - protoc-gen-go-grpc v1.5.1
+// - protoc-gen-go-grpc v1.4.0
// - protoc (unknown)
// source: sdk/logical/version.proto
@@ -18,8 +18,8 @@ import (
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
-// Requires gRPC-Go v1.64.0 or later.
-const _ = grpc.SupportPackageIsVersion9
+// Requires gRPC-Go v1.62.0 or later.
+const _ = grpc.SupportPackageIsVersion8
const (
PluginVersion_Version_FullMethodName = "/logical.PluginVersion/Version"
@@ -55,7 +55,7 @@ func (c *pluginVersionClient) Version(ctx context.Context, in *Empty, opts ...gr
// PluginVersionServer is the server API for PluginVersion service.
// All implementations must embed UnimplementedPluginVersionServer
-// for forward compatibility.
+// for forward compatibility
//
// PluginVersion is an optional RPC service implemented by plugins.
type PluginVersionServer interface {
@@ -64,18 +64,14 @@ type PluginVersionServer interface {
mustEmbedUnimplementedPluginVersionServer()
}
-// UnimplementedPluginVersionServer must be embedded to have
-// forward compatible implementations.
-//
-// NOTE: this should be embedded by value instead of pointer to avoid a nil
-// pointer dereference when methods are called.
-type UnimplementedPluginVersionServer struct{}
+// UnimplementedPluginVersionServer must be embedded to have forward compatible implementations.
+type UnimplementedPluginVersionServer struct {
+}
func (UnimplementedPluginVersionServer) Version(context.Context, *Empty) (*VersionReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method Version not implemented")
}
func (UnimplementedPluginVersionServer) mustEmbedUnimplementedPluginVersionServer() {}
-func (UnimplementedPluginVersionServer) testEmbeddedByValue() {}
// UnsafePluginVersionServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to PluginVersionServer will
@@ -85,13 +81,6 @@ type UnsafePluginVersionServer interface {
}
func RegisterPluginVersionServer(s grpc.ServiceRegistrar, srv PluginVersionServer) {
- // If the following call pancis, it indicates UnimplementedPluginVersionServer was
- // embedded by pointer and is nil. This will cause panics if an
- // unimplemented method is ever invoked, so we test this at initialization
- // time to prevent it from happening at runtime later due to I/O.
- if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
- t.testEmbeddedByValue()
- }
s.RegisterService(&PluginVersion_ServiceDesc, srv)
}
diff --git a/sdk/physical/physical.go b/sdk/physical/physical.go
index b20f9ab8d83d..624e580f9a1f 100644
--- a/sdk/physical/physical.go
+++ b/sdk/physical/physical.go
@@ -60,25 +60,6 @@ type HABackend interface {
HAEnabled() bool
}
-// RemovableNodeHABackend is used for HA backends that can remove nodes from
-// their cluster
-type RemovableNodeHABackend interface {
- HABackend
-
- // IsNodeRemoved checks if the node with the given ID has been removed.
- // This will only be called on the active node.
- IsNodeRemoved(ctx context.Context, nodeID string) (bool, error)
-
- // NodeID returns the ID for this node
- NodeID() string
-
- // IsRemoved checks if this node has been removed
- IsRemoved() bool
-
- // RemoveSelf marks this node as being removed
- RemoveSelf() error
-}
-
// FencingHABackend is an HABackend which provides the additional guarantee that
// each Lock it returns from LockWith is also a FencingLock. A FencingLock
// provides a mechanism to retrieve a fencing token that can be included by
diff --git a/sdk/plugin/pb/backend.pb.go b/sdk/plugin/pb/backend.pb.go
index 4813700b0d52..4c28e80b1378 100644
--- a/sdk/plugin/pb/backend.pb.go
+++ b/sdk/plugin/pb/backend.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: sdk/plugin/pb/backend.proto
@@ -33,9 +33,11 @@ type Empty struct {
func (x *Empty) Reset() {
*x = Empty{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Empty) String() string {
@@ -46,7 +48,7 @@ func (*Empty) ProtoMessage() {}
func (x *Empty) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -71,9 +73,11 @@ type Header struct {
func (x *Header) Reset() {
*x = Header{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Header) String() string {
@@ -84,7 +88,7 @@ func (*Header) ProtoMessage() {}
func (x *Header) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -130,9 +134,11 @@ type ProtoError struct {
func (x *ProtoError) Reset() {
*x = ProtoError{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ProtoError) String() string {
@@ -143,7 +149,7 @@ func (*ProtoError) ProtoMessage() {}
func (x *ProtoError) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -214,9 +220,11 @@ type Paths struct {
func (x *Paths) Reset() {
*x = Paths{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Paths) String() string {
@@ -227,7 +235,7 @@ func (*Paths) ProtoMessage() {}
func (x *Paths) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[3]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -366,9 +374,11 @@ type Request struct {
func (x *Request) Reset() {
*x = Request{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Request) String() string {
@@ -379,7 +389,7 @@ func (*Request) ProtoMessage() {}
func (x *Request) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[4]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -603,9 +613,11 @@ type Auth struct {
func (x *Auth) Reset() {
*x = Auth{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Auth) String() string {
@@ -616,7 +628,7 @@ func (*Auth) ProtoMessage() {}
func (x *Auth) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[5]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -788,9 +800,11 @@ type TokenEntry struct {
func (x *TokenEntry) Reset() {
*x = TokenEntry{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[6]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[6]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *TokenEntry) String() string {
@@ -801,7 +815,7 @@ func (*TokenEntry) ProtoMessage() {}
func (x *TokenEntry) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[6]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -984,9 +998,11 @@ type LeaseOptions struct {
func (x *LeaseOptions) Reset() {
*x = LeaseOptions{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[7]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[7]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *LeaseOptions) String() string {
@@ -997,7 +1013,7 @@ func (*LeaseOptions) ProtoMessage() {}
func (x *LeaseOptions) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[7]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1065,9 +1081,11 @@ type Secret struct {
func (x *Secret) Reset() {
*x = Secret{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[8]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[8]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Secret) String() string {
@@ -1078,7 +1096,7 @@ func (*Secret) ProtoMessage() {}
func (x *Secret) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[8]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1150,9 +1168,11 @@ type Response struct {
func (x *Response) Reset() {
*x = Response{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[9]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[9]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Response) String() string {
@@ -1163,7 +1183,7 @@ func (*Response) ProtoMessage() {}
func (x *Response) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[9]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1266,9 +1286,11 @@ type ResponseWrapInfo struct {
func (x *ResponseWrapInfo) Reset() {
*x = ResponseWrapInfo{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[10]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[10]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ResponseWrapInfo) String() string {
@@ -1279,7 +1301,7 @@ func (*ResponseWrapInfo) ProtoMessage() {}
func (x *ResponseWrapInfo) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[10]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1375,9 +1397,11 @@ type RequestWrapInfo struct {
func (x *RequestWrapInfo) Reset() {
*x = RequestWrapInfo{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[11]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[11]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *RequestWrapInfo) String() string {
@@ -1388,7 +1412,7 @@ func (*RequestWrapInfo) ProtoMessage() {}
func (x *RequestWrapInfo) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[11]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1436,9 +1460,11 @@ type HandleRequestArgs struct {
func (x *HandleRequestArgs) Reset() {
*x = HandleRequestArgs{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[12]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[12]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *HandleRequestArgs) String() string {
@@ -1449,7 +1475,7 @@ func (*HandleRequestArgs) ProtoMessage() {}
func (x *HandleRequestArgs) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[12]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1490,9 +1516,11 @@ type HandleRequestReply struct {
func (x *HandleRequestReply) Reset() {
*x = HandleRequestReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[13]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[13]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *HandleRequestReply) String() string {
@@ -1503,7 +1531,7 @@ func (*HandleRequestReply) ProtoMessage() {}
func (x *HandleRequestReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[13]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1541,9 +1569,11 @@ type InitializeArgs struct {
func (x *InitializeArgs) Reset() {
*x = InitializeArgs{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[14]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[14]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *InitializeArgs) String() string {
@@ -1554,7 +1584,7 @@ func (*InitializeArgs) ProtoMessage() {}
func (x *InitializeArgs) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[14]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1580,9 +1610,11 @@ type InitializeReply struct {
func (x *InitializeReply) Reset() {
*x = InitializeReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[15]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[15]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *InitializeReply) String() string {
@@ -1593,7 +1625,7 @@ func (*InitializeReply) ProtoMessage() {}
func (x *InitializeReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[15]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1626,9 +1658,11 @@ type SpecialPathsReply struct {
func (x *SpecialPathsReply) Reset() {
*x = SpecialPathsReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[16]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[16]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *SpecialPathsReply) String() string {
@@ -1639,7 +1673,7 @@ func (*SpecialPathsReply) ProtoMessage() {}
func (x *SpecialPathsReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[16]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1673,9 +1707,11 @@ type HandleExistenceCheckArgs struct {
func (x *HandleExistenceCheckArgs) Reset() {
*x = HandleExistenceCheckArgs{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[17]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[17]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *HandleExistenceCheckArgs) String() string {
@@ -1686,7 +1722,7 @@ func (*HandleExistenceCheckArgs) ProtoMessage() {}
func (x *HandleExistenceCheckArgs) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[17]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1728,9 +1764,11 @@ type HandleExistenceCheckReply struct {
func (x *HandleExistenceCheckReply) Reset() {
*x = HandleExistenceCheckReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[18]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[18]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *HandleExistenceCheckReply) String() string {
@@ -1741,7 +1779,7 @@ func (*HandleExistenceCheckReply) ProtoMessage() {}
func (x *HandleExistenceCheckReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[18]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1790,9 +1828,11 @@ type SetupArgs struct {
func (x *SetupArgs) Reset() {
*x = SetupArgs{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[19]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[19]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *SetupArgs) String() string {
@@ -1803,7 +1843,7 @@ func (*SetupArgs) ProtoMessage() {}
func (x *SetupArgs) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[19]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1850,9 +1890,11 @@ type SetupReply struct {
func (x *SetupReply) Reset() {
*x = SetupReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[20]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[20]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *SetupReply) String() string {
@@ -1863,7 +1905,7 @@ func (*SetupReply) ProtoMessage() {}
func (x *SetupReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[20]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1896,9 +1938,11 @@ type TypeReply struct {
func (x *TypeReply) Reset() {
*x = TypeReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[21]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[21]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *TypeReply) String() string {
@@ -1909,7 +1953,7 @@ func (*TypeReply) ProtoMessage() {}
func (x *TypeReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[21]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1941,9 +1985,11 @@ type InvalidateKeyArgs struct {
func (x *InvalidateKeyArgs) Reset() {
*x = InvalidateKeyArgs{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[22]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[22]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *InvalidateKeyArgs) String() string {
@@ -1954,7 +2000,7 @@ func (*InvalidateKeyArgs) ProtoMessage() {}
func (x *InvalidateKeyArgs) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[22]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1988,9 +2034,11 @@ type StorageEntry struct {
func (x *StorageEntry) Reset() {
*x = StorageEntry{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[23]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[23]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *StorageEntry) String() string {
@@ -2001,7 +2049,7 @@ func (*StorageEntry) ProtoMessage() {}
func (x *StorageEntry) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[23]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2047,9 +2095,11 @@ type StorageListArgs struct {
func (x *StorageListArgs) Reset() {
*x = StorageListArgs{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[24]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[24]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *StorageListArgs) String() string {
@@ -2060,7 +2110,7 @@ func (*StorageListArgs) ProtoMessage() {}
func (x *StorageListArgs) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[24]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2093,9 +2143,11 @@ type StorageListReply struct {
func (x *StorageListReply) Reset() {
*x = StorageListReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[25]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[25]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *StorageListReply) String() string {
@@ -2106,7 +2158,7 @@ func (*StorageListReply) ProtoMessage() {}
func (x *StorageListReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[25]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2145,9 +2197,11 @@ type StorageGetArgs struct {
func (x *StorageGetArgs) Reset() {
*x = StorageGetArgs{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[26]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[26]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *StorageGetArgs) String() string {
@@ -2158,7 +2212,7 @@ func (*StorageGetArgs) ProtoMessage() {}
func (x *StorageGetArgs) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[26]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2191,9 +2245,11 @@ type StorageGetReply struct {
func (x *StorageGetReply) Reset() {
*x = StorageGetReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[27]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[27]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *StorageGetReply) String() string {
@@ -2204,7 +2260,7 @@ func (*StorageGetReply) ProtoMessage() {}
func (x *StorageGetReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[27]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2243,9 +2299,11 @@ type StoragePutArgs struct {
func (x *StoragePutArgs) Reset() {
*x = StoragePutArgs{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[28]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[28]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *StoragePutArgs) String() string {
@@ -2256,7 +2314,7 @@ func (*StoragePutArgs) ProtoMessage() {}
func (x *StoragePutArgs) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[28]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2288,9 +2346,11 @@ type StoragePutReply struct {
func (x *StoragePutReply) Reset() {
*x = StoragePutReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[29]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[29]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *StoragePutReply) String() string {
@@ -2301,7 +2361,7 @@ func (*StoragePutReply) ProtoMessage() {}
func (x *StoragePutReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[29]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2333,9 +2393,11 @@ type StorageDeleteArgs struct {
func (x *StorageDeleteArgs) Reset() {
*x = StorageDeleteArgs{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[30]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[30]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *StorageDeleteArgs) String() string {
@@ -2346,7 +2408,7 @@ func (*StorageDeleteArgs) ProtoMessage() {}
func (x *StorageDeleteArgs) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[30]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2378,9 +2440,11 @@ type StorageDeleteReply struct {
func (x *StorageDeleteReply) Reset() {
*x = StorageDeleteReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[31]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[31]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *StorageDeleteReply) String() string {
@@ -2391,7 +2455,7 @@ func (*StorageDeleteReply) ProtoMessage() {}
func (x *StorageDeleteReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[31]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2423,9 +2487,11 @@ type TTLReply struct {
func (x *TTLReply) Reset() {
*x = TTLReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[32]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[32]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *TTLReply) String() string {
@@ -2436,7 +2502,7 @@ func (*TTLReply) ProtoMessage() {}
func (x *TTLReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[32]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2468,9 +2534,11 @@ type TaintedReply struct {
func (x *TaintedReply) Reset() {
*x = TaintedReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[33]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[33]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *TaintedReply) String() string {
@@ -2481,7 +2549,7 @@ func (*TaintedReply) ProtoMessage() {}
func (x *TaintedReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[33]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2513,9 +2581,11 @@ type CachingDisabledReply struct {
func (x *CachingDisabledReply) Reset() {
*x = CachingDisabledReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[34]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[34]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *CachingDisabledReply) String() string {
@@ -2526,7 +2596,7 @@ func (*CachingDisabledReply) ProtoMessage() {}
func (x *CachingDisabledReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[34]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2558,9 +2628,11 @@ type ReplicationStateReply struct {
func (x *ReplicationStateReply) Reset() {
*x = ReplicationStateReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[35]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[35]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ReplicationStateReply) String() string {
@@ -2571,7 +2643,7 @@ func (*ReplicationStateReply) ProtoMessage() {}
func (x *ReplicationStateReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[35]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2605,9 +2677,11 @@ type ResponseWrapDataArgs struct {
func (x *ResponseWrapDataArgs) Reset() {
*x = ResponseWrapDataArgs{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[36]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[36]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ResponseWrapDataArgs) String() string {
@@ -2618,7 +2692,7 @@ func (*ResponseWrapDataArgs) ProtoMessage() {}
func (x *ResponseWrapDataArgs) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[36]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2665,9 +2739,11 @@ type ResponseWrapDataReply struct {
func (x *ResponseWrapDataReply) Reset() {
*x = ResponseWrapDataReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[37]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[37]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ResponseWrapDataReply) String() string {
@@ -2678,7 +2754,7 @@ func (*ResponseWrapDataReply) ProtoMessage() {}
func (x *ResponseWrapDataReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[37]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2717,9 +2793,11 @@ type MlockEnabledReply struct {
func (x *MlockEnabledReply) Reset() {
*x = MlockEnabledReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[38]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[38]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *MlockEnabledReply) String() string {
@@ -2730,7 +2808,7 @@ func (*MlockEnabledReply) ProtoMessage() {}
func (x *MlockEnabledReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[38]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2762,9 +2840,11 @@ type LocalMountReply struct {
func (x *LocalMountReply) Reset() {
*x = LocalMountReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[39]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[39]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *LocalMountReply) String() string {
@@ -2775,7 +2855,7 @@ func (*LocalMountReply) ProtoMessage() {}
func (x *LocalMountReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[39]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2807,9 +2887,11 @@ type EntityInfoArgs struct {
func (x *EntityInfoArgs) Reset() {
*x = EntityInfoArgs{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[40]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[40]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *EntityInfoArgs) String() string {
@@ -2820,7 +2902,7 @@ func (*EntityInfoArgs) ProtoMessage() {}
func (x *EntityInfoArgs) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[40]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2853,9 +2935,11 @@ type EntityInfoReply struct {
func (x *EntityInfoReply) Reset() {
*x = EntityInfoReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[41]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[41]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *EntityInfoReply) String() string {
@@ -2866,7 +2950,7 @@ func (*EntityInfoReply) ProtoMessage() {}
func (x *EntityInfoReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[41]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2906,9 +2990,11 @@ type GroupsForEntityReply struct {
func (x *GroupsForEntityReply) Reset() {
*x = GroupsForEntityReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[42]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[42]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *GroupsForEntityReply) String() string {
@@ -2919,7 +3005,7 @@ func (*GroupsForEntityReply) ProtoMessage() {}
func (x *GroupsForEntityReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[42]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -2959,9 +3045,11 @@ type PluginEnvReply struct {
func (x *PluginEnvReply) Reset() {
*x = PluginEnvReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[43]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[43]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *PluginEnvReply) String() string {
@@ -2972,7 +3060,7 @@ func (*PluginEnvReply) ProtoMessage() {}
func (x *PluginEnvReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[43]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3011,9 +3099,11 @@ type GeneratePasswordFromPolicyRequest struct {
func (x *GeneratePasswordFromPolicyRequest) Reset() {
*x = GeneratePasswordFromPolicyRequest{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[44]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[44]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *GeneratePasswordFromPolicyRequest) String() string {
@@ -3024,7 +3114,7 @@ func (*GeneratePasswordFromPolicyRequest) ProtoMessage() {}
func (x *GeneratePasswordFromPolicyRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[44]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3056,9 +3146,11 @@ type GeneratePasswordFromPolicyReply struct {
func (x *GeneratePasswordFromPolicyReply) Reset() {
*x = GeneratePasswordFromPolicyReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[45]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[45]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *GeneratePasswordFromPolicyReply) String() string {
@@ -3069,7 +3161,7 @@ func (*GeneratePasswordFromPolicyReply) ProtoMessage() {}
func (x *GeneratePasswordFromPolicyReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[45]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3103,9 +3195,11 @@ type ClusterInfoReply struct {
func (x *ClusterInfoReply) Reset() {
*x = ClusterInfoReply{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[46]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[46]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ClusterInfoReply) String() string {
@@ -3116,7 +3210,7 @@ func (*ClusterInfoReply) ProtoMessage() {}
func (x *ClusterInfoReply) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[46]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3163,9 +3257,11 @@ type GenerateIdentityTokenRequest struct {
func (x *GenerateIdentityTokenRequest) Reset() {
*x = GenerateIdentityTokenRequest{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[47]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[47]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *GenerateIdentityTokenRequest) String() string {
@@ -3176,7 +3272,7 @@ func (*GenerateIdentityTokenRequest) ProtoMessage() {}
func (x *GenerateIdentityTokenRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[47]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3216,9 +3312,11 @@ type GenerateIdentityTokenResponse struct {
func (x *GenerateIdentityTokenResponse) Reset() {
*x = GenerateIdentityTokenResponse{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[48]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[48]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *GenerateIdentityTokenResponse) String() string {
@@ -3229,7 +3327,7 @@ func (*GenerateIdentityTokenResponse) ProtoMessage() {}
func (x *GenerateIdentityTokenResponse) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[48]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3274,9 +3372,11 @@ type Connection struct {
func (x *Connection) Reset() {
*x = Connection{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[49]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[49]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Connection) String() string {
@@ -3287,7 +3387,7 @@ func (*Connection) ProtoMessage() {}
func (x *Connection) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[49]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3344,9 +3444,11 @@ type ConnectionState struct {
func (x *ConnectionState) Reset() {
*x = ConnectionState{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[50]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[50]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ConnectionState) String() string {
@@ -3357,7 +3459,7 @@ func (*ConnectionState) ProtoMessage() {}
func (x *ConnectionState) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[50]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3466,9 +3568,11 @@ type Certificate struct {
func (x *Certificate) Reset() {
*x = Certificate{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[51]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[51]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Certificate) String() string {
@@ -3479,7 +3583,7 @@ func (*Certificate) ProtoMessage() {}
func (x *Certificate) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[51]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3511,9 +3615,11 @@ type CertificateChain struct {
func (x *CertificateChain) Reset() {
*x = CertificateChain{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[52]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[52]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *CertificateChain) String() string {
@@ -3524,7 +3630,7 @@ func (*CertificateChain) ProtoMessage() {}
func (x *CertificateChain) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[52]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -3557,9 +3663,11 @@ type SendEventRequest struct {
func (x *SendEventRequest) Reset() {
*x = SendEventRequest{}
- mi := &file_sdk_plugin_pb_backend_proto_msgTypes[53]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_sdk_plugin_pb_backend_proto_msgTypes[53]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *SendEventRequest) String() string {
@@ -3570,7 +3678,7 @@ func (*SendEventRequest) ProtoMessage() {}
func (x *SendEventRequest) ProtoReflect() protoreflect.Message {
mi := &file_sdk_plugin_pb_backend_proto_msgTypes[53]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -4354,6 +4462,656 @@ func file_sdk_plugin_pb_backend_proto_init() {
if File_sdk_plugin_pb_backend_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_sdk_plugin_pb_backend_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*Empty); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*Header); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*ProtoError); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[3].Exporter = func(v any, i int) any {
+ switch v := v.(*Paths); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[4].Exporter = func(v any, i int) any {
+ switch v := v.(*Request); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[5].Exporter = func(v any, i int) any {
+ switch v := v.(*Auth); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[6].Exporter = func(v any, i int) any {
+ switch v := v.(*TokenEntry); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[7].Exporter = func(v any, i int) any {
+ switch v := v.(*LeaseOptions); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[8].Exporter = func(v any, i int) any {
+ switch v := v.(*Secret); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[9].Exporter = func(v any, i int) any {
+ switch v := v.(*Response); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[10].Exporter = func(v any, i int) any {
+ switch v := v.(*ResponseWrapInfo); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[11].Exporter = func(v any, i int) any {
+ switch v := v.(*RequestWrapInfo); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[12].Exporter = func(v any, i int) any {
+ switch v := v.(*HandleRequestArgs); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[13].Exporter = func(v any, i int) any {
+ switch v := v.(*HandleRequestReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[14].Exporter = func(v any, i int) any {
+ switch v := v.(*InitializeArgs); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[15].Exporter = func(v any, i int) any {
+ switch v := v.(*InitializeReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[16].Exporter = func(v any, i int) any {
+ switch v := v.(*SpecialPathsReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[17].Exporter = func(v any, i int) any {
+ switch v := v.(*HandleExistenceCheckArgs); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[18].Exporter = func(v any, i int) any {
+ switch v := v.(*HandleExistenceCheckReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[19].Exporter = func(v any, i int) any {
+ switch v := v.(*SetupArgs); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[20].Exporter = func(v any, i int) any {
+ switch v := v.(*SetupReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[21].Exporter = func(v any, i int) any {
+ switch v := v.(*TypeReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[22].Exporter = func(v any, i int) any {
+ switch v := v.(*InvalidateKeyArgs); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[23].Exporter = func(v any, i int) any {
+ switch v := v.(*StorageEntry); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[24].Exporter = func(v any, i int) any {
+ switch v := v.(*StorageListArgs); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[25].Exporter = func(v any, i int) any {
+ switch v := v.(*StorageListReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[26].Exporter = func(v any, i int) any {
+ switch v := v.(*StorageGetArgs); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[27].Exporter = func(v any, i int) any {
+ switch v := v.(*StorageGetReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[28].Exporter = func(v any, i int) any {
+ switch v := v.(*StoragePutArgs); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[29].Exporter = func(v any, i int) any {
+ switch v := v.(*StoragePutReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[30].Exporter = func(v any, i int) any {
+ switch v := v.(*StorageDeleteArgs); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[31].Exporter = func(v any, i int) any {
+ switch v := v.(*StorageDeleteReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[32].Exporter = func(v any, i int) any {
+ switch v := v.(*TTLReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[33].Exporter = func(v any, i int) any {
+ switch v := v.(*TaintedReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[34].Exporter = func(v any, i int) any {
+ switch v := v.(*CachingDisabledReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[35].Exporter = func(v any, i int) any {
+ switch v := v.(*ReplicationStateReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[36].Exporter = func(v any, i int) any {
+ switch v := v.(*ResponseWrapDataArgs); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[37].Exporter = func(v any, i int) any {
+ switch v := v.(*ResponseWrapDataReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[38].Exporter = func(v any, i int) any {
+ switch v := v.(*MlockEnabledReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[39].Exporter = func(v any, i int) any {
+ switch v := v.(*LocalMountReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[40].Exporter = func(v any, i int) any {
+ switch v := v.(*EntityInfoArgs); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[41].Exporter = func(v any, i int) any {
+ switch v := v.(*EntityInfoReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[42].Exporter = func(v any, i int) any {
+ switch v := v.(*GroupsForEntityReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[43].Exporter = func(v any, i int) any {
+ switch v := v.(*PluginEnvReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[44].Exporter = func(v any, i int) any {
+ switch v := v.(*GeneratePasswordFromPolicyRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[45].Exporter = func(v any, i int) any {
+ switch v := v.(*GeneratePasswordFromPolicyReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[46].Exporter = func(v any, i int) any {
+ switch v := v.(*ClusterInfoReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[47].Exporter = func(v any, i int) any {
+ switch v := v.(*GenerateIdentityTokenRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[48].Exporter = func(v any, i int) any {
+ switch v := v.(*GenerateIdentityTokenResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[49].Exporter = func(v any, i int) any {
+ switch v := v.(*Connection); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[50].Exporter = func(v any, i int) any {
+ switch v := v.(*ConnectionState); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[51].Exporter = func(v any, i int) any {
+ switch v := v.(*Certificate); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[52].Exporter = func(v any, i int) any {
+ switch v := v.(*CertificateChain); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_sdk_plugin_pb_backend_proto_msgTypes[53].Exporter = func(v any, i int) any {
+ switch v := v.(*SendEventRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/sdk/plugin/pb/backend_grpc.pb.go b/sdk/plugin/pb/backend_grpc.pb.go
index 65d59ae77abc..322e723f1560 100644
--- a/sdk/plugin/pb/backend_grpc.pb.go
+++ b/sdk/plugin/pb/backend_grpc.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
-// - protoc-gen-go-grpc v1.5.1
+// - protoc-gen-go-grpc v1.4.0
// - protoc (unknown)
// source: sdk/plugin/pb/backend.proto
@@ -18,8 +18,8 @@ import (
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
-// Requires gRPC-Go v1.64.0 or later.
-const _ = grpc.SupportPackageIsVersion9
+// Requires gRPC-Go v1.62.0 or later.
+const _ = grpc.SupportPackageIsVersion8
const (
Backend_HandleRequest_FullMethodName = "/pb.Backend/HandleRequest"
@@ -167,7 +167,7 @@ func (c *backendClient) Type(ctx context.Context, in *Empty, opts ...grpc.CallOp
// BackendServer is the server API for Backend service.
// All implementations must embed UnimplementedBackendServer
-// for forward compatibility.
+// for forward compatibility
//
// Backend is the interface that plugins must satisfy. The plugin should
// implement the server for this service. Requests will first run the
@@ -211,12 +211,9 @@ type BackendServer interface {
mustEmbedUnimplementedBackendServer()
}
-// UnimplementedBackendServer must be embedded to have
-// forward compatible implementations.
-//
-// NOTE: this should be embedded by value instead of pointer to avoid a nil
-// pointer dereference when methods are called.
-type UnimplementedBackendServer struct{}
+// UnimplementedBackendServer must be embedded to have forward compatible implementations.
+type UnimplementedBackendServer struct {
+}
func (UnimplementedBackendServer) HandleRequest(context.Context, *HandleRequestArgs) (*HandleRequestReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method HandleRequest not implemented")
@@ -243,7 +240,6 @@ func (UnimplementedBackendServer) Type(context.Context, *Empty) (*TypeReply, err
return nil, status.Errorf(codes.Unimplemented, "method Type not implemented")
}
func (UnimplementedBackendServer) mustEmbedUnimplementedBackendServer() {}
-func (UnimplementedBackendServer) testEmbeddedByValue() {}
// UnsafeBackendServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to BackendServer will
@@ -253,13 +249,6 @@ type UnsafeBackendServer interface {
}
func RegisterBackendServer(s grpc.ServiceRegistrar, srv BackendServer) {
- // If the following call pancis, it indicates UnimplementedBackendServer was
- // embedded by pointer and is nil. This will cause panics if an
- // unimplemented method is ever invoked, so we test this at initialization
- // time to prevent it from happening at runtime later due to I/O.
- if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
- t.testEmbeddedByValue()
- }
s.RegisterService(&Backend_ServiceDesc, srv)
}
@@ -521,7 +510,7 @@ func (c *storageClient) Delete(ctx context.Context, in *StorageDeleteArgs, opts
// StorageServer is the server API for Storage service.
// All implementations must embed UnimplementedStorageServer
-// for forward compatibility.
+// for forward compatibility
//
// Storage is the way that plugins are able read/write data. Plugins should
// implement the client for this service.
@@ -533,12 +522,9 @@ type StorageServer interface {
mustEmbedUnimplementedStorageServer()
}
-// UnimplementedStorageServer must be embedded to have
-// forward compatible implementations.
-//
-// NOTE: this should be embedded by value instead of pointer to avoid a nil
-// pointer dereference when methods are called.
-type UnimplementedStorageServer struct{}
+// UnimplementedStorageServer must be embedded to have forward compatible implementations.
+type UnimplementedStorageServer struct {
+}
func (UnimplementedStorageServer) List(context.Context, *StorageListArgs) (*StorageListReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method List not implemented")
@@ -553,7 +539,6 @@ func (UnimplementedStorageServer) Delete(context.Context, *StorageDeleteArgs) (*
return nil, status.Errorf(codes.Unimplemented, "method Delete not implemented")
}
func (UnimplementedStorageServer) mustEmbedUnimplementedStorageServer() {}
-func (UnimplementedStorageServer) testEmbeddedByValue() {}
// UnsafeStorageServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to StorageServer will
@@ -563,13 +548,6 @@ type UnsafeStorageServer interface {
}
func RegisterStorageServer(s grpc.ServiceRegistrar, srv StorageServer) {
- // If the following call pancis, it indicates UnimplementedStorageServer was
- // embedded by pointer and is nil. This will cause panics if an
- // unimplemented method is ever invoked, so we test this at initialization
- // time to prevent it from happening at runtime later due to I/O.
- if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
- t.testEmbeddedByValue()
- }
s.RegisterService(&Storage_ServiceDesc, srv)
}
@@ -891,7 +869,7 @@ func (c *systemViewClient) GenerateIdentityToken(ctx context.Context, in *Genera
// SystemViewServer is the server API for SystemView service.
// All implementations must embed UnimplementedSystemViewServer
-// for forward compatibility.
+// for forward compatibility
//
// SystemView exposes system configuration information in a safe way for plugins
// to consume. Plugins should implement the client for this service.
@@ -941,12 +919,9 @@ type SystemViewServer interface {
mustEmbedUnimplementedSystemViewServer()
}
-// UnimplementedSystemViewServer must be embedded to have
-// forward compatible implementations.
-//
-// NOTE: this should be embedded by value instead of pointer to avoid a nil
-// pointer dereference when methods are called.
-type UnimplementedSystemViewServer struct{}
+// UnimplementedSystemViewServer must be embedded to have forward compatible implementations.
+type UnimplementedSystemViewServer struct {
+}
func (UnimplementedSystemViewServer) DefaultLeaseTTL(context.Context, *Empty) (*TTLReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method DefaultLeaseTTL not implemented")
@@ -991,7 +966,6 @@ func (UnimplementedSystemViewServer) GenerateIdentityToken(context.Context, *Gen
return nil, status.Errorf(codes.Unimplemented, "method GenerateIdentityToken not implemented")
}
func (UnimplementedSystemViewServer) mustEmbedUnimplementedSystemViewServer() {}
-func (UnimplementedSystemViewServer) testEmbeddedByValue() {}
// UnsafeSystemViewServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to SystemViewServer will
@@ -1001,13 +975,6 @@ type UnsafeSystemViewServer interface {
}
func RegisterSystemViewServer(s grpc.ServiceRegistrar, srv SystemViewServer) {
- // If the following call pancis, it indicates UnimplementedSystemViewServer was
- // embedded by pointer and is nil. This will cause panics if an
- // unimplemented method is ever invoked, so we test this at initialization
- // time to prevent it from happening at runtime later due to I/O.
- if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
- t.testEmbeddedByValue()
- }
s.RegisterService(&SystemView_ServiceDesc, srv)
}
@@ -1362,24 +1329,20 @@ func (c *eventsClient) SendEvent(ctx context.Context, in *SendEventRequest, opts
// EventsServer is the server API for Events service.
// All implementations must embed UnimplementedEventsServer
-// for forward compatibility.
+// for forward compatibility
type EventsServer interface {
SendEvent(context.Context, *SendEventRequest) (*Empty, error)
mustEmbedUnimplementedEventsServer()
}
-// UnimplementedEventsServer must be embedded to have
-// forward compatible implementations.
-//
-// NOTE: this should be embedded by value instead of pointer to avoid a nil
-// pointer dereference when methods are called.
-type UnimplementedEventsServer struct{}
+// UnimplementedEventsServer must be embedded to have forward compatible implementations.
+type UnimplementedEventsServer struct {
+}
func (UnimplementedEventsServer) SendEvent(context.Context, *SendEventRequest) (*Empty, error) {
return nil, status.Errorf(codes.Unimplemented, "method SendEvent not implemented")
}
func (UnimplementedEventsServer) mustEmbedUnimplementedEventsServer() {}
-func (UnimplementedEventsServer) testEmbeddedByValue() {}
// UnsafeEventsServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to EventsServer will
@@ -1389,13 +1352,6 @@ type UnsafeEventsServer interface {
}
func RegisterEventsServer(s grpc.ServiceRegistrar, srv EventsServer) {
- // If the following call pancis, it indicates UnimplementedEventsServer was
- // embedded by pointer and is nil. This will cause panics if an
- // unimplemented method is ever invoked, so we test this at initialization
- // time to prevent it from happening at runtime later due to I/O.
- if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
- t.testEmbeddedByValue()
- }
s.RegisterService(&Events_ServiceDesc, srv)
}
diff --git a/tools/tools.sh b/tools/tools.sh
index b4f05dc1ec94..b447bbbae23f 100755
--- a/tools/tools.sh
+++ b/tools/tools.sh
@@ -45,8 +45,8 @@ install_external() {
github.com/loggerhead/enumer@latest
github.com/rinchsan/gosimports/cmd/gosimports@latest
golang.org/x/tools/cmd/goimports@latest
- google.golang.org/protobuf/cmd/protoc-gen-go@latest
- google.golang.org/grpc/cmd/protoc-gen-go-grpc@latest
+ google.golang.org/protobuf/cmd/protoc-gen-go@v1.34.2
+ google.golang.org/grpc/cmd/protoc-gen-go-grpc@v1.4.0
gotest.tools/gotestsum@latest
mvdan.cc/gofumpt@latest
mvdan.cc/sh/v3/cmd/shfmt@latest
diff --git a/ui/README.md b/ui/README.md
index b6d69f492b70..9b732debdeba 100644
--- a/ui/README.md
+++ b/ui/README.md
@@ -24,19 +24,20 @@
This README outlines the details of collaborating on this Ember application.
-## Ember Version Upgrade Matrix
-
-Respective versions for `ember-cli`, `ember-source` and `ember-data` for each version of Vault that contains an upgrade.
-
-| Vault Version | Ember CLI | Ember Source | Ember Data |
-| ------------- | --------- | ------------ | ---------- |
-| 1.19.x | 5.8.0 | 5.8.0 | 5.3.2 |
-| 1.17.x | 5.4.2 | 5.4.0 | 4.12.4 |
-| 1.15.x | 4.12.1 | 4.12.0 | 4.11.3 |
-| 1.13.x | 4.4.0 | 4.4.4 | 4.5.0 |
-| 1.11.x | 3.28.5 | 3.28.10 | 3.28.6 |
-| 1.10.x | 3.24.0 | 3.24.7 | 3.24.0 |
-| 1.9.x | 3.22.0 | 3.22.0 | 3.22.0 |
+## Ember CLI Version Upgrade Matrix
+
+| Vault Version | Ember Version |
+| ------------- | ------------- |
+| 1.17.x | 5.4.2 |
+| 1.15.x | 4.12.0 |
+| 1.14.x | 4.4.0 |
+| 1.13.x | 4.4.0 |
+| 1.12.x | 3.28.5 |
+| 1.11.x | 3.28.5 |
+| 1.10.x | 3.28.5 |
+| 1.9.x | 3.22.0 |
+| 1.8.x | 3.22.0 |
+| 1.7.x | 3.11.0 |
## Prerequisites
@@ -88,13 +89,10 @@ _All of the commands below assume you're in the `ui/` directory._
[Mirage](https://miragejs.com/docs/getting-started/introduction/) can be helpful for mocking backend endpoints.
Look in [mirage/handlers](mirage/handlers/) for existing mocked backends.
-Run yarn with mirage: `export MIRAGE_DEV_HANDLER= && yarn start`
+Run yarn with mirage: `export MIRAGE_DEV_HANDLER= yarn start`
Where `handlername` is one of the options exported in [mirage/handlers/index](mirage/handlers/index.js)
-To stop using the handler, kill the yarn process (Ctrl+c) and then unset the environment variable.
-`unset MIRAGE_DEV_HANDLER`
-
## Building Vault UI into a Vault Binary
We use the [embed](https://golang.org/pkg/embed/) package from Go >1.20 to build
diff --git a/ui/app/adapters/auth-method.js b/ui/app/adapters/auth-method.js
index 418e0e3366f8..f23146de3afb 100644
--- a/ui/app/adapters/auth-method.js
+++ b/ui/app/adapters/auth-method.js
@@ -23,21 +23,26 @@ export default ApplicationAdapter.extend({
const isUnauthenticated = snapshotRecordArray?.adapterOptions?.unauthenticated;
// sys/internal/ui/mounts returns the actual value of the system TTL
// instead of '0' which just indicates the mount is using system defaults
- if (isUnauthenticated) {
+ const useMountsEndpoint = snapshotRecordArray?.adapterOptions?.useMountsEndpoint;
+ if (isUnauthenticated || useMountsEndpoint) {
const url = `/${this.urlPrefix()}/internal/ui/mounts`;
return this.ajax(url, 'GET', {
- unauthenticated: true,
+ unauthenticated: isUnauthenticated,
})
.then((result) => {
return {
data: result.data.auth,
};
})
- .catch(() => {
- return { data: {} };
+ .catch((e) => {
+ if (isUnauthenticated) return { data: {} };
+
+ if (e instanceof AdapterError) {
+ set(e, 'policyPath', 'sys/internal/ui/mounts');
+ }
+ throw e;
});
}
- // if authenticated, findAll will use GET sys/auth instead
return this.ajax(this.url(), 'GET').catch((e) => {
if (e instanceof AdapterError) {
set(e, 'policyPath', 'sys/auth');
@@ -46,25 +51,6 @@ export default ApplicationAdapter.extend({
});
},
- // findAll makes a network request and supplements the ember-data store with what the API returns.
- // after upgrading to ember-data 5.3.2 the store was becoming cluttered with outdated records, so
- // use query to refresh the store with each request. this is ideal for list views
- query() {
- const url = `/${this.urlPrefix()}/internal/ui/mounts`;
- return this.ajax(url, 'GET')
- .then((result) => {
- return {
- data: result.data.auth,
- };
- })
- .catch((e) => {
- if (e instanceof AdapterError) {
- set(e, 'policyPath', 'sys/internal/ui/mounts');
- }
- throw e;
- });
- },
-
createRecord(store, type, snapshot) {
const serializer = store.serializerFor(type.modelName);
const data = serializer.serialize(snapshot);
diff --git a/ui/app/adapters/generated-item-list.js b/ui/app/adapters/generated-item-list.js
index 2d2efd8a9ea7..5c5d3f2e9145 100644
--- a/ui/app/adapters/generated-item-list.js
+++ b/ui/app/adapters/generated-item-list.js
@@ -4,112 +4,44 @@
*/
import ApplicationAdapter from './application';
+import { task } from 'ember-concurrency';
import { service } from '@ember/service';
-import { sanitizePath } from 'core/utils/sanitize-path';
-import { encodePath } from 'vault/utils/path-encoding-helpers';
-import { tracked } from '@glimmer/tracking';
-export default class GeneratedItemListAdapter extends ApplicationAdapter {
- @service store;
- namespace = 'v1';
+export default ApplicationAdapter.extend({
+ store: service(),
+ namespace: 'v1',
+ urlForItem() {},
+ dynamicApiPath: '',
- // these items are set by calling getNewAdapter in the path-help service.
- @tracked apiPath = '';
- paths = {};
+ getDynamicApiPath: task(function* (id) {
+ // TODO: remove yield at some point.
+ const result = yield this.store.peekRecord('auth-method', id);
+ this.dynamicApiPath = result.apiPath;
+ return;
+ }),
- // These are the paths used for the adapter actions
- get getPath() {
- return this.paths.getPath || '';
- }
- get createPath() {
- return this.paths.createPath || '';
- }
- get deletePath() {
- return this.paths.deletePath || '';
- }
-
- getDynamicApiPath(id) {
- const result = this.store.peekRecord('auth-method', id);
- this.apiPath = result.apiPath;
- return result.apiPath;
- }
-
- async fetchByQuery(store, query, isList) {
+ fetchByQuery: task(function* (store, query, isList) {
const { id } = query;
- const payload = {};
+ const data = {};
if (isList) {
- payload.list = true;
+ data.list = true;
+ yield this.getDynamicApiPath.perform(id);
}
- const path = isList ? this.getDynamicApiPath(id) : '';
- const resp = await this.ajax(this.urlForItem(id, isList, path), 'GET', { data: payload });
- const data = {
- id,
- method: id,
- };
- return { ...resp, ...data };
- }
+ return this.ajax(this.urlForItem(id, isList, this.dynamicApiPath), 'GET', { data }).then((resp) => {
+ const data = {
+ id,
+ method: id,
+ };
+ return { ...resp, ...data };
+ });
+ }),
query(store, type, query) {
- return this.fetchByQuery(store, query, true);
- }
+ return this.fetchByQuery.perform(store, query, true);
+ },
queryRecord(store, type, query) {
- return this.fetchByQuery(store, query);
- }
-
- urlForItem(id, isList, dynamicApiPath) {
- const itemType = sanitizePath(this.getPath);
- let url;
- id = encodePath(id);
- // the apiPath changes when you switch between routes but the apiPath variable does not unless the model is reloaded
- // overwrite apiPath if dynamicApiPath exist.
- // dynamicApiPath comes from the model->adapter
- let apiPath = this.apiPath;
- if (dynamicApiPath) {
- apiPath = dynamicApiPath;
- }
- // isList indicates whether we are viewing the list page
- // of a top-level item such as userpass
- if (isList) {
- url = `${this.buildURL()}/${apiPath}${itemType}/`;
- } else {
- // build the URL for the show page of a nested item
- // such as a userpass group
- url = `${this.buildURL()}/${apiPath}${itemType}/${id}`;
- }
-
- return url;
- }
-
- urlForQueryRecord(id, modelName) {
- return this.urlForItem(id, modelName);
- }
-
- urlForUpdateRecord(id) {
- const itemType = this.createPath.slice(1, this.createPath.indexOf('{') - 1);
- return `${this.buildURL()}/${this.apiPath}${itemType}/${id}`;
- }
-
- urlForCreateRecord(modelType, snapshot) {
- const id = snapshot.record.mutableId; // computed property that returns either id or private settable _id value
- const path = this.createPath.slice(1, this.createPath.indexOf('{') - 1);
- return `${this.buildURL()}/${this.apiPath}${path}/${id}`;
- }
-
- urlForDeleteRecord(id) {
- const path = this.deletePath.slice(1, this.deletePath.indexOf('{') - 1);
- return `${this.buildURL()}/${this.apiPath}${path}/${id}`;
- }
-
- createRecord(store, type, snapshot) {
- return super.createRecord(...arguments).then((response) => {
- // if the server does not return an id and one has not been set on the model we need to set it manually from the mutableId value
- if (!response?.id && !snapshot.record.id) {
- snapshot.record.id = snapshot.record.mutableId;
- snapshot.id = snapshot.record.id;
- }
- return response;
- });
- }
-}
+ return this.fetchByQuery.perform(store, query);
+ },
+});
diff --git a/ui/app/adapters/kmip/config.js b/ui/app/adapters/kmip/config.js
index 80e08b775574..2c659c04a6ee 100644
--- a/ui/app/adapters/kmip/config.js
+++ b/ui/app/adapters/kmip/config.js
@@ -16,18 +16,9 @@ export default BaseAdapter.extend({
return this._url(...arguments);
},
urlForCreateRecord(modelName, snapshot) {
- const id = snapshot.record.mutableId;
- return this._url(id, modelName, snapshot);
+ return this._url(snapshot.id, modelName, snapshot);
},
urlForUpdateRecord() {
return this._url(...arguments);
},
-
- createRecord(store, type, snapshot) {
- return this._super(...arguments).then(() => {
- // saving returns a 204, return object with id to please ember-data...
- const id = snapshot.record.mutableId;
- return { id };
- });
- },
});
diff --git a/ui/app/adapters/kmip/role.js b/ui/app/adapters/kmip/role.js
index 4023ed0f7627..9779ad67a564 100644
--- a/ui/app/adapters/kmip/role.js
+++ b/ui/app/adapters/kmip/role.js
@@ -6,11 +6,10 @@
import BaseAdapter from './base';
import { decamelize } from '@ember/string';
import { getProperties } from '@ember/object';
-import { nonOperationFields } from 'vault/utils/model-helpers/kmip-role-fields';
export default BaseAdapter.extend({
createRecord(store, type, snapshot) {
- const name = snapshot.id || snapshot.record.role;
+ const name = snapshot.id || snapshot.attr('name');
const url = this._url(
type.modelName,
{
@@ -19,11 +18,10 @@ export default BaseAdapter.extend({
},
name
);
- const data = this.serialize(snapshot);
- return this.ajax(url, 'POST', { data }).then(() => {
+ return this.ajax(url, 'POST', { data: this.serialize(snapshot) }).then(() => {
return {
id: name,
- role: name,
+ name,
backend: snapshot.record.backend,
scope: snapshot.record.scope,
};
@@ -31,8 +29,7 @@ export default BaseAdapter.extend({
},
deleteRecord(store, type, snapshot) {
- // records must always have IDs
- const name = snapshot.id;
+ const name = snapshot.id || snapshot.attr('name');
const url = this._url(
type.modelName,
{
@@ -44,35 +41,35 @@ export default BaseAdapter.extend({
return this.ajax(url, 'DELETE');
},
- updateRecord() {
- return this.createRecord(...arguments);
- },
-
serialize(snapshot) {
// the endpoint here won't allow sending `operation_all` and `operation_none` at the same time or with
// other operation_ values, so we manually check for them and send an abbreviated object
const json = snapshot.serialize();
- const keys = nonOperationFields(snapshot.record.editableFields).map(decamelize);
- const nonOp = getProperties(json, keys);
- for (const field in nonOp) {
- if (nonOp[field] == null) {
- delete nonOp[field];
+ const keys = snapshot.record.nonOperationFields.map(decamelize);
+ const nonOperationFields = getProperties(json, keys);
+ for (const field in nonOperationFields) {
+ if (nonOperationFields[field] == null) {
+ delete nonOperationFields[field];
}
}
if (json.operation_all) {
return {
operation_all: true,
- ...nonOp,
+ ...nonOperationFields,
};
}
if (json.operation_none) {
return {
operation_none: true,
- ...nonOp,
+ ...nonOperationFields,
};
}
delete json.operation_none;
delete json.operation_all;
return json;
},
+
+ updateRecord() {
+ return this.createRecord(...arguments);
+ },
});
diff --git a/ui/app/adapters/named-path.js b/ui/app/adapters/named-path.js
index 6b8fb7297483..535cf33e3d9a 100644
--- a/ui/app/adapters/named-path.js
+++ b/ui/app/adapters/named-path.js
@@ -30,7 +30,7 @@ export default class NamedPathAdapter extends ApplicationAdapter {
const [store, { modelName }, snapshot] = arguments;
const name = snapshot.attr('name');
// throw error if user attempts to create a record with same name, otherwise POST request silently overrides (updates) the existing model
- if (store.peekRecord({ type: modelName, id: name }) !== null) {
+ if (store.hasRecordForId(modelName, name)) {
throw new Error(`A record already exists with the name: ${name}`);
} else {
return this._saveRecord(...arguments);
diff --git a/ui/app/app.js b/ui/app/app.js
index 2a4cdd68d14c..9409b0cd916f 100644
--- a/ui/app/app.js
+++ b/ui/app/app.js
@@ -15,21 +15,12 @@ export default class App extends Application {
engines = {
'config-ui': {
dependencies: {
- services: [
- 'auth',
- 'flash-messages',
- 'namespace',
- { 'app-router': 'router' },
- 'store',
- 'pagination',
- 'version',
- 'custom-messages',
- ],
+ services: ['auth', 'flash-messages', 'namespace', 'router', 'store', 'version', 'custom-messages'],
},
},
'open-api-explorer': {
dependencies: {
- services: ['auth', 'flash-messages', 'namespace', { 'app-router': 'router' }, 'version'],
+ services: ['auth', 'flash-messages', 'namespace', 'router', 'version'],
},
},
replication: {
@@ -40,7 +31,7 @@ export default class App extends Application {
'flash-messages',
'namespace',
'replication-mode',
- { 'app-router': 'router' },
+ 'router',
'store',
'version',
'-portal',
@@ -59,9 +50,8 @@ export default class App extends Application {
'flash-messages',
'namespace',
'path-help',
- { 'app-router': 'router' },
+ 'router',
'store',
- 'pagination',
'version',
'secret-mount-path',
],
@@ -72,7 +62,7 @@ export default class App extends Application {
},
kubernetes: {
dependencies: {
- services: [{ 'app-router': 'router' }, 'store', 'secret-mount-path', 'flash-messages'],
+ services: ['router', 'store', 'secret-mount-path', 'flash-messages'],
externalRoutes: {
secrets: 'vault.cluster.secrets.backends',
},
@@ -80,14 +70,7 @@ export default class App extends Application {
},
ldap: {
dependencies: {
- services: [
- { 'app-router': 'router' },
- 'store',
- 'pagination',
- 'secret-mount-path',
- 'flash-messages',
- 'auth',
- ],
+ services: ['router', 'store', 'secret-mount-path', 'flash-messages', 'auth'],
externalRoutes: {
secrets: 'vault.cluster.secrets.backends',
},
@@ -101,10 +84,9 @@ export default class App extends Application {
'download',
'flash-messages',
'namespace',
- { 'app-router': 'router' },
+ 'router',
'secret-mount-path',
'store',
- 'pagination',
'version',
],
externalRoutes: {
@@ -121,10 +103,9 @@ export default class App extends Application {
'flash-messages',
'namespace',
'path-help',
- { 'app-router': 'router' },
+ 'router',
'secret-mount-path',
'store',
- 'pagination',
'version',
],
externalRoutes: {
@@ -136,7 +117,7 @@ export default class App extends Application {
},
sync: {
dependencies: {
- services: ['flash-messages', 'flags', { 'app-router': 'router' }, 'store', 'pagination', 'version'],
+ services: ['flash-messages', 'flags', 'router', 'store', 'version'],
externalRoutes: {
kvSecretOverview: 'vault.cluster.secrets.backend.kv.secret.index',
clientCountOverview: 'vault.cluster.clients',
diff --git a/ui/app/components/alphabet-edit.hbs b/ui/app/components/alphabet-edit.hbs
index 0318ee17c5ea..f35121b10caf 100644
--- a/ui/app/components/alphabet-edit.hbs
+++ b/ui/app/components/alphabet-edit.hbs
@@ -30,7 +30,7 @@
{{#if (eq this.mode "show")}}
- {{#if this.model.updatePath.canDelete}}
+ {{#if this.capabilities.canDelete}}
{{/if}}
- {{#if this.model.updatePath.canUpdate}}
+ {{#if this.capabilities.canUpdate}}
{
this.flashMessages.success(`Successfully distributed key ${key} to ${provider}`);
// update keys on provider model
- this.pagination.clearDataset('keymgmt/key');
+ this.store.clearDataset('keymgmt/key');
const providerModel = this.store.peekRecord('keymgmt/provider', provider);
providerModel.fetchKeys(providerModel.keys?.meta?.currentPage || 1);
this.args.onClose();
diff --git a/ui/app/components/mfa/mfa-login-enforcement-form.js b/ui/app/components/mfa/mfa-login-enforcement-form.js
index 31450b266780..d16603f823ed 100644
--- a/ui/app/components/mfa/mfa-login-enforcement-form.js
+++ b/ui/app/components/mfa/mfa-login-enforcement-form.js
@@ -51,8 +51,6 @@ export default class MfaLoginEnforcementForm extends Component {
@tracked authMethods = [];
@tracked modelErrors;
- mfaMethods = []; // does not change after initial fetch, thus not tracked
-
constructor() {
super(...arguments);
// aggregate different target array properties on model into flat list
@@ -61,7 +59,6 @@ export default class MfaLoginEnforcementForm extends Component {
this.resetTargetState();
// only auth method types that have mounts can be selected as targets -- fetch from sys/auth and map by type
this.fetchAuthMethods();
- this.fetchMfaMethods();
}
async flattenTargets() {
@@ -94,12 +91,7 @@ export default class MfaLoginEnforcementForm extends Component {
}
async fetchAuthMethods() {
const mounts = await this.store.findAll('auth-method');
- this.authMethods = mounts.map((auth) => auth.type).uniq();
- }
-
- async fetchMfaMethods() {
- // mfa_methods is a hasMany on the model, thus returning a PromiseProxyArray. Before it can be accessed on the template we need to resolve it first.
- this.mfaMethods = await this.args.model.mfa_methods;
+ this.authMethods = mounts.map((auth) => auth.type);
}
get selectedTarget() {
diff --git a/ui/app/components/mount-backend-form.hbs b/ui/app/components/mount-backend-form.hbs
index 2aa615429be3..8cd2b888381e 100644
--- a/ui/app/components/mount-backend-form.hbs
+++ b/ui/app/components/mount-backend-form.hbs
@@ -5,7 +5,7 @@
-
-{{#if (and (not-eq this.model.id "root") (or this.model.canUpdate this.model.canDelete))}}
+{{#if (and (not-eq this.model.id "root") (or this.capabilities.canUpdate this.capabilities.canDelete))}}
- {{#if (and (not-eq this.model.id "default") this.model.canDelete)}}
+ {{#if (and (not-eq this.model.id "default") this.capabilities.canDelete)}}
- {{#if (and (not-eq this.model.id "root") (or this.model.canUpdate this.model.canDelete))}}
+ {{#if (and (not-eq this.model.id "root") (or this.capabilities.canUpdate this.capabilities.canDelete))}}
-
- View configuration
-
+
{{#if (not-eq backend.type "cubbyhole")}}
Disable
+ />
{{/if}}
diff --git a/ui/app/utils/model-helpers/database-helpers.js b/ui/app/utils/database-helpers.js
similarity index 100%
rename from ui/app/utils/model-helpers/database-helpers.js
rename to ui/app/utils/database-helpers.js
diff --git a/ui/app/utils/model-helpers/kmip-role-fields.js b/ui/app/utils/model-helpers/kmip-role-fields.js
deleted file mode 100644
index 3503ad58f449..000000000000
--- a/ui/app/utils/model-helpers/kmip-role-fields.js
+++ /dev/null
@@ -1,27 +0,0 @@
-/**
- * Copyright (c) HashiCorp, Inc.
- * SPDX-License-Identifier: BUSL-1.1
- */
-
-import { removeManyFromArray } from 'vault/helpers/remove-from-array';
-
-export const operationFields = (fieldNames) => {
- if (!Array.isArray(fieldNames)) {
- throw new Error('fieldNames must be an array');
- }
- return fieldNames.filter((key) => key.startsWith('operation'));
-};
-
-export const operationFieldsWithoutSpecial = (fieldNames) => {
- const opFields = operationFields(fieldNames);
- return removeManyFromArray(opFields, ['operationAll', 'operationNone']);
-};
-
-export const nonOperationFields = (fieldNames) => {
- const opFields = operationFields(fieldNames);
- return removeManyFromArray(fieldNames, opFields);
-};
-
-export const tlsFields = () => {
- return ['tlsClientKeyBits', 'tlsClientKeyType', 'tlsClientTtl'];
-};
diff --git a/ui/app/utils/openapi-helpers.ts b/ui/app/utils/openapi-helpers.ts
index 91675bf580da..115548c394b5 100644
--- a/ui/app/utils/openapi-helpers.ts
+++ b/ui/app/utils/openapi-helpers.ts
@@ -3,10 +3,11 @@
* SPDX-License-Identifier: BUSL-1.1
*/
-import { debug } from '@ember/debug';
-import { camelize, capitalize, dasherize } from '@ember/string';
+import { dasherize } from '@ember/string';
import { singularize } from 'ember-inflector';
+// TODO: Consolidate with openapi-to-attrs once it's typescript
+
interface Path {
path: string;
itemType: string;
@@ -166,200 +167,3 @@ export function getHelpUrlForModel(modelType: string, backend: string) {
if (!urlFn) return null;
return urlFn(backend);
}
-
-interface Attribute {
- name: string;
- type: string | undefined;
- options: {
- editType?: string;
- fieldGroup?: string;
- fieldValue?: string;
- label?: string;
- readonly?: boolean;
- };
-}
-
-interface OpenApiProp {
- description: string;
- type: string;
- 'x-vault-displayAttrs': {
- name: string;
- value: string | number;
- group: string;
- sensitive: boolean;
- editType?: string;
- description?: string;
- };
- items?: { type: string };
- format?: string;
- isId?: boolean;
- deprecated?: boolean;
- enum?: string[];
-}
-interface MixedAttr {
- type?: string;
- helpText?: string;
- editType?: string;
- fieldGroup: string;
- fieldValue?: string;
- label?: string;
- readonly?: boolean;
- possibleValues?: string[];
- defaultValue?: string | number | (() => string | number);
- sensitive?: boolean;
- readOnly?: boolean;
- [key: string]: unknown;
-}
-
-export const expandOpenApiProps = function (props: Record): Record {
- const attrs: Record = {};
- // expand all attributes
- for (const propName in props) {
- const prop = props[propName];
- if (!prop) continue;
- let { description, items, type, format, isId, deprecated } = prop;
- if (deprecated === true) {
- continue;
- }
- let {
- name,
- value,
- group,
- sensitive,
- editType,
- description: displayDescription,
- } = prop['x-vault-displayAttrs'] || {};
-
- if (type === 'integer') {
- type = 'number';
- }
-
- if (displayDescription) {
- description = displayDescription;
- }
-
- editType = editType || type;
-
- if (format === 'seconds' || format === 'duration') {
- editType = 'ttl';
- } else if (items) {
- editType = items.type + capitalize(type);
- }
-
- const attrDefn: MixedAttr = {
- editType,
- helpText: description,
- possibleValues: prop['enum'],
- fieldValue: isId ? 'mutableId' : undefined,
- fieldGroup: group || 'default',
- readOnly: isId,
- defaultValue: value || undefined,
- };
-
- if (type === 'object' && !!value) {
- attrDefn.defaultValue = () => {
- return value;
- };
- }
-
- if (sensitive) {
- attrDefn.sensitive = true;
- }
-
- // only set a label if we have one from OpenAPI
- // otherwise the propName will be humanized by the form-field component
- if (name) {
- attrDefn.label = name;
- }
-
- // ttls write as a string and read as a number
- // so setting type on them runs the wrong transform
- if (editType !== 'ttl' && type !== 'array') {
- attrDefn.type = type;
- }
-
- // loop to remove empty vals
- for (const attrProp in attrDefn) {
- if (attrDefn[attrProp] == null) {
- delete attrDefn[attrProp];
- }
- }
- attrs[camelize(propName)] = attrDefn;
- }
- return attrs;
-};
-
-/**
- * combineOpenApiAttrs takes attributes defined on an existing models
- * and adds in the attributes found on an OpenAPI response. The values
- * defined on the model should take precedence so we can overwrite
- * attributes from OpenAPI.
- */
-export const combineOpenApiAttrs = function (
- oldAttrs: Map,
- openApiProps: Record
-) {
- const allAttrs: Record = {};
- const attrsArray: Attribute[] = [];
- const newFields: string[] = [];
-
- // First iterate over all the existing attrs and combine with recieved props, if they exist
- oldAttrs.forEach(function (oldAttr, name) {
- const attr: Attribute = { name, type: oldAttr.type, options: oldAttr.options };
- const openApiProp = openApiProps[name];
- if (openApiProp) {
- const { type, ...options } = openApiProp;
- // TODO: previous behavior took the openApi type no matter what
- attr.type = oldAttr.type ?? type;
- if (oldAttr.type && type && type !== oldAttr.type) {
- debug(`mismatched type for ${name} -- ${type} vs ${oldAttr.type}`);
- }
- attr.options = { ...options, ...oldAttr.options };
- }
- attrsArray.push(attr);
- // add to all attrs so we skip in the next part
- allAttrs[name] = true;
- });
-
- // then iterate over all the new props and add them if they haven't already been accounted for
- for (const name in openApiProps) {
- // iterate over each
- if (allAttrs[name]) {
- continue;
- } else {
- const prop = openApiProps[name];
- if (prop) {
- const { type, ...options } = prop;
- newFields.push(name);
- attrsArray.push({ name, type, options });
- }
- }
- }
- return { attrs: attrsArray, newFields };
-};
-
-// interface FieldGroups {
-// default: string[];
-// [key: string]: string[];
-// }
-
-// export const combineFieldGroups = function (
-// currentGroups: Array>,
-// newFields: string[],
-// excludedFields: string[]
-// ) {
-// console.log({ currentGroups, newFields, excludedFields });
-// let allFields: string[] = [];
-// for (const group of currentGroups) {
-// const fields = Object.values(group)[0] || [];
-// allFields = allFields.concat(fields);
-// }
-// const otherFields = newFields.filter((field) => {
-// return !allFields.includes(field) && !excludedFields.includes(field);
-// });
-// if (otherFields.length) {
-// currentGroups[0].default = currentGroups[0].default.concat(otherFields);
-// }
-
-// return currentGroups;
-// };
diff --git a/ui/app/utils/openapi-to-attrs.js b/ui/app/utils/openapi-to-attrs.js
index 899a8946d18e..31406b47e117 100644
--- a/ui/app/utils/openapi-to-attrs.js
+++ b/ui/app/utils/openapi-to-attrs.js
@@ -3,14 +3,119 @@
* SPDX-License-Identifier: BUSL-1.1
*/
-/**
- * combineFieldGroups takes the newFields returned from OpenAPI and adds them to the default field group
- * if they are not already accounted for in other field groups
- * @param {Record[]} currentGroups Field groups, as an array of objects like: [{ default: [] }, { 'TLS options': [] }]
- * @param {string[]} newFields
- * @param {string[]} excludedFields
- * @returns modified currentGroups
- */
+import { attr } from '@ember-data/model';
+import { camelize, capitalize } from '@ember/string';
+
+export const expandOpenApiProps = function (props) {
+ const attrs = {};
+ // expand all attributes
+ for (const propName in props) {
+ const prop = props[propName];
+ let { description, items, type, format, isId, deprecated } = prop;
+ if (deprecated === true) {
+ continue;
+ }
+ let {
+ name,
+ value,
+ group,
+ sensitive,
+ editType,
+ description: displayDescription,
+ } = prop['x-vault-displayAttrs'] || {};
+
+ if (type === 'integer') {
+ type = 'number';
+ }
+
+ if (displayDescription) {
+ description = displayDescription;
+ }
+
+ editType = editType || type;
+
+ if (format === 'seconds' || format === 'duration') {
+ editType = 'ttl';
+ } else if (items) {
+ editType = items.type + capitalize(type);
+ }
+
+ const attrDefn = {
+ editType,
+ helpText: description,
+ possibleValues: prop['enum'],
+ fieldValue: isId ? 'mutableId' : null,
+ fieldGroup: group || 'default',
+ readOnly: isId,
+ defaultValue: value || null,
+ };
+
+ if (type === 'object' && !!value) {
+ attrDefn.defaultValue = () => {
+ return value;
+ };
+ }
+
+ if (sensitive) {
+ attrDefn.sensitive = true;
+ }
+
+ // only set a label if we have one from OpenAPI
+ // otherwise the propName will be humanized by the form-field component
+ if (name) {
+ attrDefn.label = name;
+ }
+
+ // ttls write as a string and read as a number
+ // so setting type on them runs the wrong transform
+ if (editType !== 'ttl' && type !== 'array') {
+ attrDefn.type = type;
+ }
+
+ // loop to remove empty vals
+ for (const attrProp in attrDefn) {
+ if (attrDefn[attrProp] == null) {
+ delete attrDefn[attrProp];
+ }
+ }
+ attrs[camelize(propName)] = attrDefn;
+ }
+ return attrs;
+};
+
+export const combineAttributes = function (oldAttrs, newProps) {
+ const newAttrs = {};
+ const newFields = [];
+ if (oldAttrs) {
+ oldAttrs.forEach(function (value, name) {
+ if (newProps[name]) {
+ newAttrs[name] = attr(newProps[name].type, { ...newProps[name], ...value.options });
+ } else {
+ newAttrs[name] = attr(value.type, value.options);
+ }
+ });
+ }
+ for (const prop in newProps) {
+ if (newAttrs[prop]) {
+ continue;
+ } else {
+ newAttrs[prop] = attr(newProps[prop].type, newProps[prop]);
+ newFields.push(prop);
+ }
+ }
+ return { attrs: newAttrs, newFields };
+};
+
+export const combineFields = function (currentFields, newFields, excludedFields) {
+ const otherFields = newFields.filter((field) => {
+ return !currentFields.includes(field) && !excludedFields.includes(field);
+ });
+ if (otherFields.length) {
+ currentFields = currentFields.concat(otherFields);
+ }
+ return currentFields;
+};
+
export const combineFieldGroups = function (currentGroups, newFields, excludedFields) {
let allFields = [];
for (const group of currentGroups) {
diff --git a/ui/app/utils/model-helpers/validators.js b/ui/app/utils/validators.js
similarity index 100%
rename from ui/app/utils/model-helpers/validators.js
rename to ui/app/utils/validators.js
diff --git a/ui/config/deprecation-workflow.js b/ui/config/deprecation-workflow.js
index 90516f7ad170..cf343738e691 100644
--- a/ui/config/deprecation-workflow.js
+++ b/ui/config/deprecation-workflow.js
@@ -5,11 +5,20 @@
/* global self */
self.deprecationWorkflow = self.deprecationWorkflow || {};
-self.deprecationWorkflow.config = {
- throwOnUnhandled: false,
-};
-
+//self.deprecationWorkflow.config = {
+//throwOnUnhandled: true
+//}
self.deprecationWorkflow.config = {
// current output from deprecationWorkflow.flushDeprecations();
- workflow: [],
+ // deprecations that will not be removed until 5.0.0 are filtered by deprecation-filter initializer rather than silencing below
+ workflow: [
+ { handler: 'silence', matchId: 'ember-data:model-save-promise' },
+ { handler: 'silence', matchId: 'ember-engines.deprecation-camelized-engine-names' },
+ { handler: 'silence', matchId: 'ember-engines.deprecation-router-service-from-host' },
+ { handler: 'silence', matchId: 'ember-modifier.use-modify' },
+ { handler: 'silence', matchId: 'ember-modifier.no-element-property' },
+ { handler: 'silence', matchId: 'ember-modifier.no-args-property' },
+ { handler: 'silence', matchId: 'ember-cli-mirage-config-routes-only-export' },
+ { handler: 'silence', matchId: 'setting-on-hash' },
+ ],
};
diff --git a/ui/docs/client-pagination.md b/ui/docs/client-pagination.md
index 44d425f5932c..7fd0197c6c16 100644
--- a/ui/docs/client-pagination.md
+++ b/ui/docs/client-pagination.md
@@ -1,10 +1,10 @@
# Client-side pagination
-Our custom `pagination` service allows us to paginate LIST responses while maintaining good performance, particularly when the LIST response includes tens of thousands of keys in the data response. It does this by caching the entire response, and then filtering the full response into the datastore for the client. It was originally a custom method in our `store` service that extended the ember-data `store` but now is it's own `pagination` service.
+Our custom extended `store` service allows us to paginate LIST responses while maintaining good performance, particularly when the LIST response includes tens of thousands of keys in the data response. It does this by caching the entire response, and then filtering the full response into the datastore for the client.
## Using pagination
-Rather than use `store.query`, use `pagination.lazyPaginatedQuery`. It generally uses the same inputs, but accepts additional keys in the query object `size`, `page`, `responsePath`, `pageFilter`
+Rather than use `store.query`, use `store.lazyPaginatedQuery`. It generally uses the same inputs, but accepts additional keys in the query object `size`, `page`, `responsePath`, `pageFilter`
### Before
@@ -23,12 +23,12 @@ export default class ExampleRoute extends Route {
```js
export default class ExampleRoute extends Route {
- @service pagination;
+ @service store;
model(params) {
const { page, pageFilter, secret } = params;
const { backend } = this.paramsFor('vault.cluster.secrets.backend');
- return this.pagination.lazyPaginatedQuery('secret', {
+ return this.store.lazyPaginatedQuery('secret', {
backend,
id: secret,
size,
@@ -47,11 +47,11 @@ In order to interrupt the regular serialization when using `lazyPaginatedData`,
## Gotchas
-The data is cached from whenever the original API call is made, which means that if a user views a list and then creates or deletes an item, viewing the list page again will show outdated information unless the cache for the item is cleared first. For this reason, it is best practice to clear the dataset with `pagination.clearDataset(modelName)` after successfully deleting or creating an item.
+The data is cached from whenever the original API call is made, which means that if a user views a list and then creates or deletes an item, viewing the list page again will show outdated information unless the cache for the item is cleared first. For this reason, it is best practice to clear the dataset with `store.clearDataset(modelName)` after successfully deleting or creating an item.
## How it works
-When using the `lazyPaginatedQuery` method, the full response is cached in a [tracked Map](https://github.com/tracked-tools/tracked-built-ins/tree/master) within the service. `pagination.lazyCaches` is actually a Map of [Maps](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map), keyed first on the normalized modelType and then on a stringified version of the base query (all keys except ones related to pagination). So, at the top level `pagination.lazyCaches` looks like this:
+When using the `lazyPaginatedQuery` method, the full response is cached in a [tracked Map](https://github.com/tracked-tools/tracked-built-ins/tree/master) within the service. `store.lazyCaches` is actually a Map of [Maps](https://developer.mozilla.org/en-US/docs/Web/JavaScript/Reference/Global_Objects/Map), keyed first on the normalized modelType and then on a stringified version of the base query (all keys except ones related to pagination). So, at the top level `store.lazyCaches` looks like this:
```
lazyCaches = new Map({
@@ -61,7 +61,7 @@ lazyCaches = new Map({
})
```
-Within each top-level modelType, we need to separate cached responses based on the details of the query. Typically (but not always) this includes the backend name. In list items that can be nested (see KV V2 secrets or namespaces for example) `id` is also provided, so that the keys nested under the given ID is returned. The pagination.lazyCaches may look something like the following after a user navigates to a couple different KV v2 lists, and clicks into the `app/` item:
+Within each top-level modelType, we need to separate cached responses based on the details of the query. Typically (but not always) this includes the backend name. In list items that can be nested (see KV V2 secrets or namespaces for example) `id` is also provided, so that the keys nested under the given ID is returned. The store.lazyCaches may look something like the following after a user navigates to a couple different KV v2 lists, and clicks into the `app/` item:
```
lazyCaches = new Map({
diff --git a/ui/docs/ember-engines.md b/ui/docs/ember-engines.md
index 471e9f0d3f6b..56cd5841e394 100644
--- a/ui/docs/ember-engines.md
+++ b/ui/docs/ember-engines.md
@@ -96,7 +96,7 @@ export default class Engine extends Engine {
modulePrefix = modulePrefix;
Resolver = Resolver;
dependencies = {
- services: ['app-router', 'store', 'secret-mount-path', 'flash-messages'],
+ services: ['router', 'store', 'secret-mount-path', 'flash-messages'],
externalRoutes: ['secrets'],
};
}
@@ -128,7 +128,7 @@ The external route dependencies allow you to link to a route outside of your eng
## Register your engine with our main application:
-In our `app/app.js` file in the engines object, add your engine’s name and dependencies. The `router` service must be referenced via an alias within engines. The pattern is to use `app-router` as the alias, see example below.
+In our `app/app.js` file in the engines object, add your engine’s name and dependencies.
```js
/**
@@ -146,7 +146,7 @@ export default class App extends Application {
engines = {
: {
dependencies: {
- services: [{ 'app-router': 'router' }, 'store', 'secret-mount-path', 'flash-messages', ],
+ services: ['router', 'store', 'secret-mount-path', 'flash-messages', ],
externalRoutes: {
secrets: 'vault.cluster.secrets.backends',
},
diff --git a/ui/docs/model-validations.md b/ui/docs/model-validations.md
index 622aa7debb75..ad0ce0dace6a 100644
--- a/ui/docs/model-validations.md
+++ b/ui/docs/model-validations.md
@@ -1,33 +1,34 @@
# Model Validations Decorator
-
+
The model-validations decorator provides a method on a model class which may be used for validating properties based on a provided rule set.
## API
The decorator expects a validations object as the only argument with the following shape:
-```js
+``` js
const validations = {
- [propertyKeyName]: [{ type, options, message, level, validator }],
+ [propertyKeyName]: [
+ { type, options, message, level, validator }
+ ]
};
```
-
**propertyKeyName** [string] - each key in the validations object should refer to the property on the class to apply the validation to.
-
-**type** [string] - the type of validation to apply. These must be exported from the [validators util](../app/utils/model-helpers/validators.js) for lookup. Type is required if a _validator_ function is not provided.
+
+**type** [string] - the type of validation to apply. These must be exported from the [validators util](../app/utils/validators.js) for lookup. Type is required if a *validator* function is not provided.
**options** [object] - an optional object for the given validator -- min, max, nullable etc.
**message** [string | function] - string added to the errors array and returned in the state object from the validate method if validation fails. A function may also be provided with the model as the lone argument that returns a string. Since this value is typically displayed to the user it should be a complete sentence with proper punctuation.
-**level** [string] _optional_ - string that defaults to 'error'. Currently the only other accepted value is 'warn'.
+**level** [string] *optional* - string that defaults to 'error'. Currently the only other accepted value is 'warn'.
-**validator** [function] _optional_ - a function that may be used in place of type that is invoked in the validate method. This is useful when specific validations are needed which may be dependent on other class properties.
+**validator** [function] *optional* - a function that may be used in place of type that is invoked in the validate method. This is useful when specific validations are needed which may be dependent on other class properties.
This function takes the class context (this) as the only argument and returns true or false.
## Usage
-Each property defined in the validations object supports multiple validations provided as an array. For example, _presence_ and _containsWhiteSpace_ can both be added as validations for a string property.
+Each property defined in the validations object supports multiple validations provided as an array. For example, *presence* and *containsWhiteSpace* can both be added as validations for a string property.
```js
const validations = {
@@ -40,7 +41,6 @@ const validations = {
],
};
```
-
Decorate the model class and pass the validations object as the argument
```js
@@ -48,7 +48,9 @@ import Model, { attr } from '@ember-data/model';
import withModelValidations from 'vault/decorators/model-validations';
const validations = {
- name: [{ type: 'presence', message: 'Name is required.' }],
+ name: [
+ { type: 'presence', message: 'Name is required.' },
+ ],
};
@withModelValidations(validations)
@@ -70,13 +72,12 @@ if (isValid) {
this.errors = state;
}
```
-
**isValid** [boolean] - the validity of the full class. If no properties provided in the validations object are invalid this will be true.
-**state** [object] - the error state of the properties defined in the validations object. This object is keyed by the property names from the validations object and each property contains an _isValid_ and _errors_ value. The _errors_ array will be populated with messages defined in the validations object when validations fail. Since a property can have multiple validations, errors is always returned as an array.
+**state** [object] - the error state of the properties defined in the validations object. This object is keyed by the property names from the validations object and each property contains an *isValid* and *errors* value. The *errors* array will be populated with messages defined in the validations object when validations fail. Since a property can have multiple validations, errors is always returned as an array.
**invalidFormMessage** [string] - message describing the number of errors currently present on the model class.
-
+
```js
const { state } = model.validate();
const { isValid, errors } = state[propertyKeyName];
@@ -86,18 +87,17 @@ if (!isValid) {
```
## Examples
-
### Basic
```js
const validations = {
- foo: [{ type: 'presence', message: 'foo is a required field.' }],
+ foo: [
+ { type: 'presence', message: 'foo is a required field.' }
+ ],
};
@withModelValidations(validations)
-class SomeModel extends Model {
- foo = null;
-}
+class SomeModel extends Model { foo = null; }
const model = new SomeModel();
const { isValid, state } = model.validate();
@@ -106,17 +106,14 @@ console.log(isValid); // false
console.log(state.foo.isValid); // false
console.log(state.foo.errors); // ['foo is a required field']
```
-
### Custom validator
```js
const validations = {
- foo: [
- {
- validator: (model) => (model.bar.includes('test') ? model.foo : false),
- message: 'foo is required if bar includes test.',
- },
- ],
+ foo: [{
+ validator: (model) => model.bar.includes('test') ? model.foo : false,
+ message: 'foo is required if bar includes test.'
+ }],
};
@withModelValidations(validations)
@@ -142,7 +139,7 @@ console.log(state.foo.errors); // []
### Adding class in template based on validation state
-All form validation errors must have a red border around them. Add this by adding a conditional class _has-error-border_ to the element.
+All form validation errors must have a red border around them. Add this by adding a conditional class *has-error-border* to the element.
```js
@action
@@ -158,5 +155,5 @@ async save() {
```
```hbs
-
-```
+
+```
\ No newline at end of file
diff --git a/ui/docs/models.md b/ui/docs/models.md
index e6c94f74bef8..a13a56cbcf2c 100644
--- a/ui/docs/models.md
+++ b/ui/docs/models.md
@@ -4,252 +4,17 @@
- [Models](#models)
- - [Intro](#intro)
- - [Model patterns overview](#model-patterns-overview)
- - [Patterns](#patterns)
- - [Attributes \& field groups](#attributes--field-groups)
- - [Attributes \& field groups example](#attributes--field-groups-example)
- - [Validations](#validations)
- - [@withModelValidations()](#withmodelvalidations)
- - [Capabilities](#capabilities)
- - [Examples](#examples)
- - [Models hydrated by OpenAPI](#models-hydrated-by-openapi)
- - [Using Decorators](#using-decorators)
+ - [Capabilities](#capabilities)
+ - [Decorators](#decorators)
- [@withFormFields()](#withformfields)
+ - [@withModelValidations()](#withmodelvalidations)
-## Intro
+## Capabilities
-We use models primarily as the backing data layer for our forms and for our list/show views. As Ember-Data has matured, our patterns of usage have become outdated. This document serves to outline our current best-practices, since examples within the codebase are often out of date and do not always reflect our best practices or ambitions.
-
-## Model patterns overview
-
-Models can be thought of as the shape of data that an instance of that Model -- a Record -- will have. Models should be as "thin" as possible, holding only data directly relevant to the Record itself. For example, if we have a Model `user` with attributes `firstName` and `lastName`, it _is_ appropriate to have a getter on the Model called `fullName`, because its attributes can be calculated directly from the record's values, and is relevant to the Record itself. However it is _not_ appropriate to store data like which fields are shown on the edit form, because that has no bearing on the Record itself. Field values are a display concern, not related to the values of the record.
-
-Other patterns and where they belong in relation to the Model:
-
-- **Attribute metadata** - this is referring to information defined on a Model's attributes, such as label, edit type, and other information relevant to both forms and the given attribute. We use these heavily in the `FormField` component to show the correct label, help text, and input type. Conceptually, this does not belong on a Model (because the information is not directly related to the data in a Record) but, since we leverage OpenAPI heavily to populate both attributes and their metadata, we are going to keep attribute metadata defined on the attribute in the Model. **TL;DR: Lives on Model**
-
-- **Form and show fields** - the grouping and order of fields that should display on both show routes and create/edit forms, while conceptually related to the Model, is not related to an individual record. Therefore, this information should not be defined on the Model (which has been our previous pattern). To support migration, we have a few helpful decorators and patterns. **TL;DR: Lives in component or model-helper util files**
-
-- **Validations** - While an argument can go either way about this one, we are going to continue defining these on the Model using our handy [withModelValidation decorator](#withmodelvalidations). The state of validation is directly correlated to a given Record which is a strong reason to keep it on the Model. **TL;DR: Lives on Model**
-
-- **[Capabilities](#capabilities)** - Capabilities are calculated by fetching permissions by path -- often multiple paths, based on the same information we need to fetch the Record data (eg. backend, ID). When using `lazyCapabilities` on the model we kick off one API request for each of the paths we need, while using the capabilities service `fetchMultiplePaths` method we can make one request with all the required paths included. Our best practice is to fetch capabilities outside of the Model (perhaps as part of a route model, or on a user action such as dropdown click open). A downside to this approach is that the API may get re-requested per page we check the capability (eg. list view dropdown and then detail view) -- but we can optimize this in the future by first checking the store for `capabilities` of matching path/ID before sending the API request. **TL;DR: Lives in route or component where they are used**
-
-## Patterns
-
-### Attributes & field groups
-
-We use attributes defined on the Model to determine input concerns (label, input type, help text) and field groups to determine the order of the attribute data on the form and detail pages, and are defined in the component they are used in or in a `utils/model-helpers/*` file.
-
-#### Attributes & field groups example
-
-In this example, we have a Model `simple-timer` with a few attributes defined. The `withExpandedAttributes` helper adds a couple items to the Model it's applied to:
-
-- allByKey - a getter which returns all the attributes as keys of an object, and the value is the metadata of the attribute including anything returned from OpenAPI if the model is included in `OPENAPI_POWERED_MODELS`.
-- \_expandGroups - takes an array of group objects and expands the attribute keys into the metadata
-
-In the component where we pass a Record of this Model, we can see how we use it to populate either a flat array of attributes for use in the show view, or to populate groups of fields for rendering on a form.
-
-```js
-// models/simple-timer.js
-@withExpandedAttributes()
-export default class SimpleTimer extends Model {
- @attr('string', {
- editType: 'ttl',
- defaultValue: '3600s',
- label: 'TTL',
- helpText: 'Here is some help text',
- })
- ttl;
-
- @attr('string') name;
- @attr('boolean') restartable; // enterprise only
-}
-```
-
-```js
-// components/simple-timer-display.ts
-export default class SimpleTimerDisplay extends Component {
- @service declare readonly version: VersionService;
-
- // these fields are shown flat in the show mode, iterated over
- // and used in InfoTableRow
- get showFields() {
- let fields = ['name', 'ttl'];
- if (this.version.isEnterprise) {
- fields.push('restartable');
- }
- return fields.map((field) => this.args.model.allByKey[field]);
- }
-
- // these fields are shown grouped in edit mode and is formatted
- // to be used in something like FormFieldGroups
- get fieldGroups() {
- let groups = [{ default: ['name', 'ttl'] }];
- if (this.version.isEnterprise) {
- groups[{ 'Custom options': ['restartable'] }];
- }
- return this.args.model._expandGroups(groups);
- }
-}
-```
-
-### Validations
-
-Validations on used on forms, to present the user with feedback about their form answers before sending the payload to the API. Our best practices are:
-
-- define the validations using the `withModelValidations` decorator
-- trigger the `validate()` method added by the decorator on form submit
-- if there are validation errors:
- - show a message at the bottom of the form saying there were errors with the form
- - add inline-alert next to the inputs that have incorrect data
- - exit the form submit function early
- - do not disable the submit button
-- if there are no validation errors, continue saving as normal
-
-#### [@withModelValidations()](../app/decorators/model-validations.js)
-
-This decorator:
-
-- Adds `validate()` method on model to check attributes are valid before making an API request
-- Provides option to write a custom validation, or use validation method from the [validators util](../app/utils/model-helpers/validators.js) which is referenced by the `type` key
-- Option to add `level: 'warn'` to draw user attention to the input, without preventing form submission
-- Component example [here](../lib/pki/addon/components/pki-generate-root.ts)
-
-```js
-import { withModelValidations } from 'vault/decorators/model-validations';
-
-const validations = {
- // object key is the model's attribute name
- password: [{ type: 'presence', message: 'Password is required' }],
- keyName: [
- {
- validator(model) {
- return model.keyName === 'default' ? false : true;
- },
- message: `Key name cannot be the reserved value 'default'`,
- },
- ],
-};
-
-@withModelValidations(validations)
-export default class FooModel extends Model {
- @attr() password;
- @attr() keyName;
-}
-```
-
-```js
-// form-component.js
-export default class FormComponent extends Component {
- @tracked modelValidations = null;
- @tracked invalidFormAlert = '';
-
- checkFormValidity() {
- interface Validity {
- // only true if all of the state's isValid are also true
- isValid: boolean;
- state: {
- // state is keyed by the attribute names
- [key: string]: {
- errors: string[];
- warnings: string[];
- isValid: boolean;
- }
- }
- invalidFormMessage: string; // eg "There are 2 errors with this form"
- }
- // calling validate() returns Validity
- const { isValid, state, invalidFormMessage } = this.args.model.validate();
- this.modelValidations = state;
- this.invalidFormAlert = invalidFormMessage;
- return isValid;
- }
-
- @action
- submit() {
- // clear errors
- this.modelValidations = null;
- this.invalidFormAlert = null;
-
- // check validity
- const continueSave = this.checkFormValidity();
- if (!continueSave) return;
-
- // continue save ...
- }
-}
-```
-
-### Capabilities
-
-- The API will prevent users from performing disallowed actions, so adding capabilities is purely to improve UX by hiding actions we know the user cannot take. Because of this, we default to showing items if we cannot determine the capabilities for an endpoint.
-- Always test the capability works as expected (never assume the API path 🙂) -- the extra string interpolation can lead to sneaky typos and incorrect returns from the getters
-- Capabilities are checked via the `capabilities-self` endpoint, and registered in the store as a [capabilities Model](../app/models/capabilities.js), with the path as the Record's ID.
-- The path IDs on the capabilities Records should never include the namespace, but when operating within a namespace the paths in the API request payload must be prepended with the namespace so the API will return the proper capabilities (eg. for `kv/data/foo` in the `admin` namespace instead of root)
-- In general we want to check capabilities outside of the Model, but we have a patterns for both ways.
-
-#### Examples
-
-**Single capability check within a component**
-In [this example](../app/components/clients/page-header.js), we have an action that some users can take within the page header. Honestly this capability check could just have easily lived in the route's Model (since the PageHeader always renders on the relevant routes), but here it provides a good example of a check happening on component instantiation, using the args passed to the component:
-
-```js
-// clients/page-header.js
-constructor() {
- super(...arguments);
- this.getExportCapabilities(this.args.namespace);
-}
-
-async getExportCapabilities(ns = '') {
- try {
- const url = ns
- ? `${sanitizePath(ns)}/sys/internal/counters/activity/export`
- : 'sys/internal/counters/activity/export';
- const cap = await this.store.findRecord('capabilities', url);
- this.canDownload = cap.canSudo;
- } catch (e) {
- // if we can't read capabilities, default to show
- this.canDownload = true;
- }
-}
-```
-
-**Multiple capabilities checked at once**
-When there are multiple capabilities paths to check, the recommended approach is to use the [capabilities service's](../app/services/capabilities.ts) `fetchMultiplePaths` method. It will pass all the paths in a single API request instead of making a capabilities-self call for each path as the other techniques do. In [this example](../lib/kv/addon/routes/secret.js), we get the capabilities as part of the route's model hook and then return the relevant `can*` values:
-
-```js
-async fetchCapabilities(backend, path) {
- const metadataPath = `${backend}/metadata/${path}`;
- const dataPath = `${backend}/data/${path}`;
- const subkeysPath = `${backend}/subkeys/${path}`;
- const perms = await this.capabilities.fetchMultiplePaths([metadataPath, dataPath, subkeysPath]);
- // returns values keyed at the path
- return {
- metadata: perms[metadataPath],
- data: perms[dataPath],
- subkeys: perms[subkeysPath],
- };
-}
-
-async model() {
- const backend = this.secretMountPath.currentPath;
- const { name: path } = this.paramsFor('secret');
- const capabilities = await this.fetchCapabilities(backend, path);
- return hash({
- // ...
- canUpdateData: capabilities.data.canUpdate,
- canReadData: capabilities.data.canRead,
- canReadMetadata: capabilities.metadata.canRead,
- canDeleteMetadata: capabilities.metadata.canDelete,
- canUpdateMetadata: capabilities.metadata.canUpdate,
- });
-}
-```
-
-Lastly, we have an example that is common but a pattern that we want to move away from: using `lazyCapabilities` on a Model. The `lazyCapabilities` macro only fetches the capabilities when the attribute is invoked -- so in the example below, only when `canRead` is rendered on the template will the capablities-self call be kicked off.
+- The API will prevent users from performing disallowed actions, adding capabilities is purely to improve UX
+- Always test the capability works as expected (never assume the API path 🙂), the extra string interpolation can lead to sneaky typos and incorrect returns from the getters
```js
import lazyCapabilities, { apiPath } from 'vault/macros/lazy-capabilities';
@@ -272,20 +37,13 @@ export default class FooModel extends Model {
}
```
-### Models hydrated by OpenAPI
-
-In a Model which is hydrated by OpenAPI, it can be cumbersome to keep up with all the changes made by the backend. One pattern available to us is the [`combineFieldGroups`](../app/utils/openapi-to-attrs.js) method, which
-
----
-
-## Using Decorators
+## Decorators
### [@withFormFields()](../app/decorators/model-form-fields.js)
- Sets `allFields`, `formFields` and/or `formFieldGroups` properties on a model class
- `allFields` includes every model attribute (regardless of args passed to decorator)
- `formFields` and `formFieldGroups` only exist if the relevant arg is passed to the decorator
-- `type` of validator should match the keys in [model-helpers/validators.js](../app/utils/model-helpers/validators.js)
```js
import { withFormFields } from 'vault/decorators/model-form-fields';
@@ -348,3 +106,48 @@ model.formFieldGroups = [
},
];
```
+
+### [@withModelValidations()](../app/decorators/model-validations.js)
+
+- Adds `validate()` method on model to check attributes are valid before making an API request
+- Option to write a custom validation, or use validation method from the [validators util](../app/utils/validators.js) which is referenced by the `type` key
+- Option to add `level: 'warn'` to draw user attention to the input, without preventing form submission
+- Component example [here](../lib/pki/addon/components/pki-generate-root.ts)
+
+```js
+import { withModelValidations } from 'vault/decorators/model-validations';
+
+const validations = {
+ // object key is the model's attribute name
+ password: [{ type: 'presence', message: 'Password is required' }],
+ keyName: [
+ {
+ validator(model) {
+ return model.keyName === 'default' ? false : true;
+ },
+ message: `Key name cannot be the reserved value 'default'`,
+ },
+ ],
+};
+
+@withModelValidations(validations)
+export default class FooModel extends Model {}
+
+// calling validate() returns an object:
+model.validate() = {
+ isValid: false,
+ state: {
+ password: {
+ errors: ['Password is required.'],
+ warnings: [],
+ isValid: false,
+ },
+ keyName: {
+ errors: ["Key name cannot be the reserved value 'default'"],
+ warnings: [],
+ isValid: true,
+ },
+ },
+ invalidFormMessage: 'There are 2 errors with this form.',
+};
+```
diff --git a/ui/lib/config-ui/addon/components/messages/page/create-and-edit.js b/ui/lib/config-ui/addon/components/messages/page/create-and-edit.js
index 8ba4c827a75e..b71ce31ba180 100644
--- a/ui/lib/config-ui/addon/components/messages/page/create-and-edit.js
+++ b/ui/lib/config-ui/addon/components/messages/page/create-and-edit.js
@@ -23,9 +23,8 @@ import { isAfter } from 'date-fns';
*/
export default class MessagesList extends Component {
- @service('app-router') router;
+ @service router;
@service store;
- @service pagination;
@service flashMessages;
@service customMessages;
@service namespace;
@@ -76,7 +75,7 @@ export default class MessagesList extends Component {
const { isNew } = this.args.message;
const { id, title } = yield this.args.message.save();
this.flashMessages.success(`Successfully ${isNew ? 'created' : 'updated'} ${title} message.`);
- this.pagination.clearDataset('config-ui/message');
+ this.store.clearDataset('config-ui/message');
this.customMessages.fetchMessages(this.namespace.path);
this.router.transitionTo('vault.cluster.config-ui.messages.message.details', id);
}
diff --git a/ui/lib/config-ui/addon/components/messages/page/details.js b/ui/lib/config-ui/addon/components/messages/page/details.js
index b09ad182dcf5..60d71ca78f7f 100644
--- a/ui/lib/config-ui/addon/components/messages/page/details.js
+++ b/ui/lib/config-ui/addon/components/messages/page/details.js
@@ -19,17 +19,17 @@ import errorMessage from 'vault/utils/error-message';
*/
export default class MessageDetails extends Component {
- @service('app-router') router;
+ @service store;
+ @service router;
@service flashMessages;
@service customMessages;
@service namespace;
- @service pagination;
@action
async deleteMessage() {
try {
+ this.store.clearDataset('config-ui/message');
await this.args.message.destroyRecord(this.args.message.id);
- this.pagination.clearDataset('config-ui/message');
this.router.transitionTo('vault.cluster.config-ui.messages');
this.customMessages.fetchMessages(this.namespace.path);
this.flashMessages.success(`Successfully deleted ${this.args.message.title}.`);
diff --git a/ui/lib/config-ui/addon/components/messages/page/list.hbs b/ui/lib/config-ui/addon/components/messages/page/list.hbs
index 9ec8b4b285df..9b7a7c99447e 100644
--- a/ui/lib/config-ui/addon/components/messages/page/list.hbs
+++ b/ui/lib/config-ui/addon/components/messages/page/list.hbs
@@ -113,13 +113,10 @@
data-test-popup-menu-trigger
/>
{{#if message.canEditCustomMessages}}
- Edit
+
{{/if}}
{{#if message.canDeleteCustomMessages}}
- Delete
+
{{/if}}
{{/if}}
diff --git a/ui/lib/config-ui/addon/components/messages/page/list.js b/ui/lib/config-ui/addon/components/messages/page/list.js
index 5361ee909efc..5b476d6afbf1 100644
--- a/ui/lib/config-ui/addon/components/messages/page/list.js
+++ b/ui/lib/config-ui/addon/components/messages/page/list.js
@@ -23,11 +23,11 @@ import errorMessage from 'vault/utils/error-message';
*/
export default class MessagesList extends Component {
- @service customMessages;
+ @service store;
+ @service router;
@service flashMessages;
@service namespace;
- @service pagination;
- @service('app-router') router;
+ @service customMessages;
@tracked showMaxMessageModal = false;
@tracked messageToDelete = null;
@@ -90,8 +90,8 @@ export default class MessagesList extends Component {
@task
*deleteMessage(message) {
try {
+ this.store.clearDataset('config-ui/message');
yield message.destroyRecord(message.id);
- this.pagination.clearDataset('config-ui/message');
this.router.transitionTo('vault.cluster.config-ui.messages');
this.customMessages.fetchMessages(this.namespace.path);
this.flashMessages.success(`Successfully deleted ${message.title}.`);
diff --git a/ui/lib/config-ui/addon/engine.js b/ui/lib/config-ui/addon/engine.js
index f8b1a0dbccdc..cd9cfb604c66 100644
--- a/ui/lib/config-ui/addon/engine.js
+++ b/ui/lib/config-ui/addon/engine.js
@@ -16,16 +16,7 @@ export default class ConfigUiEngine extends Engine {
modulePrefix = modulePrefix;
Resolver = Resolver;
dependencies = {
- services: [
- 'auth',
- 'store',
- 'pagination',
- 'flash-messages',
- 'namespace',
- 'app-router',
- 'version',
- 'custom-messages',
- ],
+ services: ['auth', 'store', 'flash-messages', 'namespace', 'router', 'version', 'custom-messages'],
};
}
diff --git a/ui/lib/config-ui/addon/routes/messages/index.js b/ui/lib/config-ui/addon/routes/messages/index.js
index de3a6e4be841..ee882b6c3d83 100644
--- a/ui/lib/config-ui/addon/routes/messages/index.js
+++ b/ui/lib/config-ui/addon/routes/messages/index.js
@@ -8,7 +8,7 @@ import { service } from '@ember/service';
import { hash } from 'rsvp';
export default class MessagesRoute extends Route {
- @service pagination;
+ @service store;
queryParams = {
page: {
@@ -38,7 +38,7 @@ export default class MessagesRoute extends Route {
if (status === 'active') active = true;
if (status === 'inactive') active = false;
- const messages = this.pagination
+ const messages = this.store
.lazyPaginatedQuery('config-ui/message', {
authenticated,
pageFilter: filter,
diff --git a/ui/lib/core/addon/components/confirm-action.hbs b/ui/lib/core/addon/components/confirm-action.hbs
index e66ee1eecb90..019d1246fce7 100644
--- a/ui/lib/core/addon/components/confirm-action.hbs
+++ b/ui/lib/core/addon/components/confirm-action.hbs
@@ -7,14 +7,13 @@
{{! Hds component renders
and
-
+
{{metadata.path}}
@@ -100,25 +100,35 @@
/>
{{#if metadata.pathIsDirectory}}
Content
+ />
{{else}}
Overview
- Secret data
+ />
+
{{#if metadata.canReadMetadata}}
-
- View version history
+
{{/if}}
{{#if metadata.canDeleteMetadata}}
Permanently delete
+ />
{{/if}}
{{/if}}
diff --git a/ui/lib/kv/addon/components/page/list.js b/ui/lib/kv/addon/components/page/list.js
index 327ab1f64a94..fe16726e1b41 100644
--- a/ui/lib/kv/addon/components/page/list.js
+++ b/ui/lib/kv/addon/components/page/list.js
@@ -26,8 +26,8 @@ import { pathIsDirectory } from 'kv/utils/kv-breadcrumbs';
export default class KvListPageComponent extends Component {
@service flashMessages;
- @service('app-router') router;
- @service pagination;
+ @service router;
+ @service store;
@tracked secretPath;
@tracked metadataToDelete = null; // set to the metadata intended to delete
@@ -57,7 +57,7 @@ export default class KvListPageComponent extends Component {
try {
// The model passed in is a kv/metadata model
await model.destroyRecord();
- this.pagination.clearDataset('kv/metadata'); // Clear out the pagination cache so that the metadata/list view is updated.
+ this.store.clearDataset('kv/metadata'); // Clear out the store cache so that the metadata/list view is updated.
const message = `Successfully deleted the metadata and all version data of the secret ${model.fullSecretPath}.`;
this.flashMessages.success(message);
// if you've deleted a secret from within a directory, transition to its parent directory.
diff --git a/ui/lib/kv/addon/components/page/secret/details.js b/ui/lib/kv/addon/components/page/secret/details.js
index 404a30bc9ec6..12adabd3506c 100644
--- a/ui/lib/kv/addon/components/page/secret/details.js
+++ b/ui/lib/kv/addon/components/page/secret/details.js
@@ -41,7 +41,7 @@ import { isAdvancedSecret } from 'core/utils/advanced-secret';
export default class KvSecretDetails extends Component {
@service flashMessages;
- @service('app-router') router;
+ @service router;
@service store;
@tracked showJsonView = false;
diff --git a/ui/lib/kv/addon/components/page/secret/edit.js b/ui/lib/kv/addon/components/page/secret/edit.js
index 01e2c66f4b97..c1549f9b458a 100644
--- a/ui/lib/kv/addon/components/page/secret/edit.js
+++ b/ui/lib/kv/addon/components/page/secret/edit.js
@@ -31,7 +31,7 @@ import { isAdvancedSecret } from 'core/utils/advanced-secret';
export default class KvSecretEdit extends Component {
@service controlGroup;
@service flashMessages;
- @service('app-router') router;
+ @service router;
@tracked showJsonView = false;
@tracked showDiff = false;
diff --git a/ui/lib/kv/addon/components/page/secret/metadata/details.js b/ui/lib/kv/addon/components/page/secret/metadata/details.js
index 48a16b725354..2769ae946ad2 100644
--- a/ui/lib/kv/addon/components/page/secret/metadata/details.js
+++ b/ui/lib/kv/addon/components/page/secret/metadata/details.js
@@ -37,9 +37,8 @@ import errorMessage from 'vault/utils/error-message';
export default class KvSecretMetadataDetails extends Component {
@service controlGroup;
@service flashMessages;
- @service('app-router') router;
+ @service router;
@service store;
- @service pagination;
@tracked error = null;
@tracked customMetadataFromData = null;
@@ -55,7 +54,7 @@ export default class KvSecretMetadataDetails extends Component {
const adapter = this.store.adapterFor('kv/metadata');
try {
await adapter.deleteMetadata(backend, path);
- this.pagination.clearDataset('kv/metadata'); // Clear out the store cache so that the metadata/list view is updated.
+ this.store.clearDataset('kv/metadata'); // Clear out the store cache so that the metadata/list view is updated.
this.flashMessages.success(
`Successfully deleted the metadata and all version data for the secret ${path}.`
);
diff --git a/ui/lib/kv/addon/components/page/secret/metadata/version-history.hbs b/ui/lib/kv/addon/components/page/secret/metadata/version-history.hbs
index 92481eafc723..93359dfada56 100644
--- a/ui/lib/kv/addon/components/page/secret/metadata/version-history.hbs
+++ b/ui/lib/kv/addon/components/page/secret/metadata/version-history.hbs
@@ -98,17 +98,19 @@
data-test-popup-menu-trigger
/>
View version {{versionData.version}}
+ />
{{#if (and @metadata.canCreateVersionData (not versionData.destroyed) (not versionData.isSecretDeleted))}}
Create new version from {{versionData.version}}
+ />
{{/if}}
diff --git a/ui/lib/kv/addon/components/page/secret/patch.js b/ui/lib/kv/addon/components/page/secret/patch.js
index bb61f96e5cd3..fb1a4db15218 100644
--- a/ui/lib/kv/addon/components/page/secret/patch.js
+++ b/ui/lib/kv/addon/components/page/secret/patch.js
@@ -37,7 +37,7 @@ import errorMessage from 'vault/utils/error-message';
export default class KvSecretPatch extends Component {
@service controlGroup;
@service flashMessages;
- @service('app-router') router;
+ @service router;
@service store;
@tracked controlGroupError;
diff --git a/ui/lib/kv/addon/components/page/secrets/create.js b/ui/lib/kv/addon/components/page/secrets/create.js
index 33e35cae4048..c4375f300a95 100644
--- a/ui/lib/kv/addon/components/page/secrets/create.js
+++ b/ui/lib/kv/addon/components/page/secrets/create.js
@@ -28,8 +28,8 @@ import errorMessage from 'vault/utils/error-message';
export default class KvSecretCreate extends Component {
@service controlGroup;
@service flashMessages;
- @service('app-router') router;
- @service pagination;
+ @service router;
+ @service store;
@tracked showJsonView = false;
@tracked errorMessage;
@@ -60,7 +60,7 @@ export default class KvSecretCreate extends Component {
try {
// try saving secret data first
yield secret.save();
- this.pagination.clearDataset('kv/metadata'); // Clear out the pagination cache so that the metadata/list view is updated.
+ this.store.clearDataset('kv/metadata'); // Clear out the store cache so that the metadata/list view is updated.
this.flashMessages.success(`Successfully saved secret data for: ${secret.path}.`);
} catch (error) {
let message = errorMessage(error);
diff --git a/ui/lib/kv/addon/engine.js b/ui/lib/kv/addon/engine.js
index 3e2457d0d6d7..bfd19ebefe7f 100644
--- a/ui/lib/kv/addon/engine.js
+++ b/ui/lib/kv/addon/engine.js
@@ -22,10 +22,9 @@ export default class KvEngine extends Engine {
'download',
'flash-messages',
'namespace',
- 'app-router',
+ 'router',
'secret-mount-path',
'store',
- 'pagination',
'version',
],
externalRoutes: ['secrets', 'syncDestination'],
diff --git a/ui/lib/kv/addon/routes/index.js b/ui/lib/kv/addon/routes/index.js
index 904b8cb3912a..28a4566751ed 100644
--- a/ui/lib/kv/addon/routes/index.js
+++ b/ui/lib/kv/addon/routes/index.js
@@ -7,7 +7,7 @@ import Route from '@ember/routing/route';
import { service } from '@ember/service';
export default class KvRoute extends Route {
- @service('app-router') router;
+ @service router;
redirect() {
this.router.transitionTo('vault.cluster.secrets.backend.kv.list');
diff --git a/ui/lib/kv/addon/routes/list-directory.js b/ui/lib/kv/addon/routes/list-directory.js
index 0b6e787f6f66..c0a30a1d4be9 100644
--- a/ui/lib/kv/addon/routes/list-directory.js
+++ b/ui/lib/kv/addon/routes/list-directory.js
@@ -9,8 +9,8 @@ import { hash } from 'rsvp';
import { pathIsDirectory, breadcrumbsForSecret } from 'kv/utils/kv-breadcrumbs';
export default class KvSecretsListRoute extends Route {
- @service pagination;
- @service('app-router') router;
+ @service store;
+ @service router;
@service secretMountPath;
queryParams = {
@@ -23,7 +23,7 @@ export default class KvSecretsListRoute extends Route {
};
async fetchMetadata(backend, pathToSecret, params) {
- return await this.pagination
+ return await this.store
.lazyPaginatedQuery('kv/metadata', {
backend,
responsePath: 'data.keys',
diff --git a/ui/lib/kv/addon/routes/secret/index.js b/ui/lib/kv/addon/routes/secret/index.js
index dc1356810e3c..f048281b2dbd 100644
--- a/ui/lib/kv/addon/routes/secret/index.js
+++ b/ui/lib/kv/addon/routes/secret/index.js
@@ -8,7 +8,7 @@ import { service } from '@ember/service';
import { breadcrumbsForSecret } from 'kv/utils/kv-breadcrumbs';
export default class SecretIndex extends Route {
- @service('app-router') router;
+ @service router;
setupController(controller, resolvedModel) {
super.setupController(controller, resolvedModel);
diff --git a/ui/lib/kv/addon/routes/secret/patch.js b/ui/lib/kv/addon/routes/secret/patch.js
index 210a54a9b0cb..3cb88d1b10f0 100644
--- a/ui/lib/kv/addon/routes/secret/patch.js
+++ b/ui/lib/kv/addon/routes/secret/patch.js
@@ -8,7 +8,7 @@ import { breadcrumbsForSecret } from 'kv/utils/kv-breadcrumbs';
import { service } from '@ember/service';
export default class SecretPatch extends Route {
- @service('app-router') router;
+ @service router;
setupController(controller, resolvedModel) {
super.setupController(controller, resolvedModel);
diff --git a/ui/lib/ldap/addon/components/page/configure.ts b/ui/lib/ldap/addon/components/page/configure.ts
index 6deea648f07e..303659df5367 100644
--- a/ui/lib/ldap/addon/components/page/configure.ts
+++ b/ui/lib/ldap/addon/components/page/configure.ts
@@ -29,7 +29,7 @@ interface SchemaOption {
export default class LdapConfigurePageComponent extends Component {
@service declare readonly flashMessages: FlashMessageService;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@tracked showRotatePrompt = false;
@tracked modelValidations: ValidationMap | null = null;
diff --git a/ui/lib/ldap/addon/components/page/libraries.hbs b/ui/lib/ldap/addon/components/page/libraries.hbs
index b881dff58a4a..01f41696e8b2 100644
--- a/ui/lib/ldap/addon/components/page/libraries.hbs
+++ b/ui/lib/ldap/addon/components/page/libraries.hbs
@@ -58,21 +58,18 @@
data-test-popup-menu-trigger
/>
{{#if library.canEdit}}
- Edit
+
{{/if}}
{{#if library.canRead}}
- Details
+
{{/if}}
{{#if library.canDelete}}
Delete
+ />
{{/if}}
{{/if}}
diff --git a/ui/lib/ldap/addon/components/page/library/create-and-edit.ts b/ui/lib/ldap/addon/components/page/library/create-and-edit.ts
index 7e483ed9e771..0724a1031765 100644
--- a/ui/lib/ldap/addon/components/page/library/create-and-edit.ts
+++ b/ui/lib/ldap/addon/components/page/library/create-and-edit.ts
@@ -23,7 +23,7 @@ interface Args {
export default class LdapCreateAndEditLibraryPageComponent extends Component {
@service declare readonly flashMessages: FlashMessageService;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@tracked modelValidations: ValidationMap | null = null;
@tracked invalidFormMessage = '';
diff --git a/ui/lib/ldap/addon/components/page/library/details.ts b/ui/lib/ldap/addon/components/page/library/details.ts
index e35bb2f56022..c96962821cba 100644
--- a/ui/lib/ldap/addon/components/page/library/details.ts
+++ b/ui/lib/ldap/addon/components/page/library/details.ts
@@ -20,7 +20,7 @@ interface Args {
export default class LdapLibraryDetailsPageComponent extends Component {
@service declare readonly flashMessages: FlashMessageService;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@action
async delete() {
diff --git a/ui/lib/ldap/addon/components/page/library/details/accounts.hbs b/ui/lib/ldap/addon/components/page/library/details/accounts.hbs
index 045f9db5e6f4..c00504a1d718 100644
--- a/ui/lib/ldap/addon/components/page/library/details/accounts.hbs
+++ b/ui/lib/ldap/addon/components/page/library/details/accounts.hbs
@@ -55,7 +55,7 @@
<:content>
{
@service declare readonly flashMessages: FlashMessageService;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@tracked showCheckOutPrompt = false;
@tracked checkOutTtl: string | null = null;
get cliCommand() {
- return `vault lease renew ${this.args.library.backend}/library/${this.args.library.name}/check-out/:lease_id`;
+ return `vault lease renew ad/library/${this.args.library.name}/check-out/:lease_id`;
}
@action
setTtl(data: TtlEvent) {
diff --git a/ui/lib/ldap/addon/components/page/overview.ts b/ui/lib/ldap/addon/components/page/overview.ts
index 1dbfd9a2f6e0..494b98ae49e0 100644
--- a/ui/lib/ldap/addon/components/page/overview.ts
+++ b/ui/lib/ldap/addon/components/page/overview.ts
@@ -31,7 +31,7 @@ interface Option {
}
export default class LdapLibrariesPageComponent extends Component {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@tracked selectedRole: LdapRoleModel | undefined;
diff --git a/ui/lib/ldap/addon/components/page/role/create-and-edit.ts b/ui/lib/ldap/addon/components/page/role/create-and-edit.ts
index c2a5aa139ed9..80fb5d164e5a 100644
--- a/ui/lib/ldap/addon/components/page/role/create-and-edit.ts
+++ b/ui/lib/ldap/addon/components/page/role/create-and-edit.ts
@@ -15,7 +15,7 @@ import type LdapRoleModel from 'vault/models/ldap/role';
import { Breadcrumb, ValidationMap } from 'vault/vault/app-types';
import type FlashMessageService from 'vault/services/flash-messages';
import type RouterService from '@ember/routing/router-service';
-import type PaginationService from 'vault/services/pagination';
+import type StoreService from 'vault/services/store';
interface Args {
model: LdapRoleModel;
@@ -30,8 +30,8 @@ interface RoleTypeOption {
export default class LdapCreateAndEditRolePageComponent extends Component {
@service declare readonly flashMessages: FlashMessageService;
- @service('app-router') declare readonly router: RouterService;
- @service declare readonly pagination: PaginationService;
+ @service declare readonly router: RouterService;
+ @service declare readonly store: StoreService;
@tracked modelValidations: ValidationMap | null = null;
@tracked invalidFormMessage = '';
@@ -71,7 +71,7 @@ export default class LdapCreateAndEditRolePageComponent extends Component
yield model.save();
this.flashMessages.success(`Successfully ${action} the role ${model.name}`);
if (action === 'created') {
- this.pagination.clearDataset('ldap/role');
+ this.store.clearDataset('ldap/role');
}
this.router.transitionTo(
'vault.cluster.secrets.backend.ldap.roles.role.details',
diff --git a/ui/lib/ldap/addon/components/page/role/details.ts b/ui/lib/ldap/addon/components/page/role/details.ts
index be574b16b162..cc6e9f08560b 100644
--- a/ui/lib/ldap/addon/components/page/role/details.ts
+++ b/ui/lib/ldap/addon/components/page/role/details.ts
@@ -14,7 +14,7 @@ import type LdapRoleModel from 'vault/models/ldap/role';
import { Breadcrumb } from 'vault/vault/app-types';
import type FlashMessageService from 'vault/services/flash-messages';
import type RouterService from '@ember/routing/router-service';
-import type PaginationService from 'vault/services/pagination';
+import type StoreService from 'vault/services/store';
interface Args {
model: LdapRoleModel;
@@ -23,15 +23,15 @@ interface Args {
export default class LdapRoleDetailsPageComponent extends Component {
@service declare readonly flashMessages: FlashMessageService;
- @service('app-router') declare readonly router: RouterService;
- @service declare readonly pagination: PaginationService;
+ @service declare readonly router: RouterService;
+ @service declare readonly store: StoreService;
@action
async delete() {
try {
await this.args.model.destroyRecord();
this.flashMessages.success('Role deleted successfully.');
- this.pagination.clearDataset('ldap/role');
+ this.store.clearDataset('ldap/role');
this.router.transitionTo('vault.cluster.secrets.backend.ldap.roles');
} catch (error) {
const message = errorMessage(error, 'Unable to delete role. Please try again or contact support.');
diff --git a/ui/lib/ldap/addon/components/page/roles.hbs b/ui/lib/ldap/addon/components/page/roles.hbs
index cf9747a5dff4..7bf88cd7916c 100644
--- a/ui/lib/ldap/addon/components/page/roles.hbs
+++ b/ui/lib/ldap/addon/components/page/roles.hbs
@@ -59,42 +59,45 @@
/>
{{#if (this.isHierarchical role.name)}}
Content
+ />
{{else}}
{{#if role.canEdit}}
- Edit
+
{{/if}}
{{#if role.canReadCreds}}
-
- Get credentials
-
+
{{/if}}
{{#if role.canRotateStaticCreds}}
Rotate credentials
+ />
{{/if}}
Details
+ />
{{#if role.canDelete}}
Delete
+ />
{{/if}}
{{/if}}
diff --git a/ui/lib/ldap/addon/components/page/roles.ts b/ui/lib/ldap/addon/components/page/roles.ts
index b990e5d6c06a..891069546bc2 100644
--- a/ui/lib/ldap/addon/components/page/roles.ts
+++ b/ui/lib/ldap/addon/components/page/roles.ts
@@ -15,7 +15,7 @@ import type SecretEngineModel from 'vault/models/secret-engine';
import type FlashMessageService from 'vault/services/flash-messages';
import type { Breadcrumb, EngineOwner } from 'vault/vault/app-types';
import type RouterService from '@ember/routing/router-service';
-import type PaginationService from 'vault/services/pagination';
+import type StoreService from 'vault/services/store';
interface Args {
roles: Array;
@@ -27,9 +27,8 @@ interface Args {
export default class LdapRolesPageComponent extends Component {
@service declare readonly flashMessages: FlashMessageService;
- @service('app-router') declare readonly router: RouterService;
- @service declare readonly pagination: PaginationService;
-
+ @service declare readonly router: RouterService;
+ @service declare readonly store: StoreService;
@tracked credsToRotate: LdapRoleModel | null = null;
@tracked roleToDelete: LdapRoleModel | null = null;
@@ -76,7 +75,7 @@ export default class LdapRolesPageComponent extends Component {
try {
const message = `Successfully deleted role ${model.name}.`;
await model.destroyRecord();
- this.pagination.clearDataset('ldap/role');
+ this.store.clearDataset('ldap/role');
this.router.transitionTo('vault.cluster.secrets.backend.ldap.roles');
this.flashMessages.success(message);
} catch (error) {
diff --git a/ui/lib/ldap/addon/engine.js b/ui/lib/ldap/addon/engine.js
index daf761acb2ee..22a4c7caa64d 100644
--- a/ui/lib/ldap/addon/engine.js
+++ b/ui/lib/ldap/addon/engine.js
@@ -14,7 +14,7 @@ export default class LdapEngine extends Engine {
modulePrefix = modulePrefix;
Resolver = Resolver;
dependencies = {
- services: ['app-router', 'store', 'pagination', 'secret-mount-path', 'flash-messages', 'auth'],
+ services: ['router', 'store', 'secret-mount-path', 'flash-messages', 'auth'],
externalRoutes: ['secrets'],
};
}
diff --git a/ui/lib/ldap/addon/routes/libraries/library/check-out.ts b/ui/lib/ldap/addon/routes/libraries/library/check-out.ts
index 174c60034ae6..72701dba11e7 100644
--- a/ui/lib/ldap/addon/routes/libraries/library/check-out.ts
+++ b/ui/lib/ldap/addon/routes/libraries/library/check-out.ts
@@ -24,7 +24,7 @@ interface LdapLibraryCheckOutController extends Controller {
export default class LdapLibraryCheckOutRoute extends Route {
@service declare readonly flashMessages: FlashMessageService;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
accountsRoute = 'vault.cluster.secrets.backend.ldap.libraries.library.details.accounts';
diff --git a/ui/lib/ldap/addon/routes/libraries/library/details/index.ts b/ui/lib/ldap/addon/routes/libraries/library/details/index.ts
index 2b60dd68e6b8..77024023f489 100644
--- a/ui/lib/ldap/addon/routes/libraries/library/details/index.ts
+++ b/ui/lib/ldap/addon/routes/libraries/library/details/index.ts
@@ -9,7 +9,7 @@ import { service } from '@ember/service';
import type RouterService from '@ember/routing/router-service';
export default class LdapLibraryRoute extends Route {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
redirect() {
this.router.transitionTo('vault.cluster.secrets.backend.ldap.libraries.library.details.accounts');
diff --git a/ui/lib/ldap/addon/routes/libraries/library/index.ts b/ui/lib/ldap/addon/routes/libraries/library/index.ts
index 316516913b7d..eb4fbd897f07 100644
--- a/ui/lib/ldap/addon/routes/libraries/library/index.ts
+++ b/ui/lib/ldap/addon/routes/libraries/library/index.ts
@@ -9,7 +9,7 @@ import { service } from '@ember/service';
import type RouterService from '@ember/routing/router-service';
export default class LdapLibraryRoute extends Route {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
redirect() {
this.router.transitionTo('vault.cluster.secrets.backend.ldap.libraries.library.details');
diff --git a/ui/lib/ldap/addon/routes/roles.ts b/ui/lib/ldap/addon/routes/roles.ts
index 7d6edef17942..a37295333296 100644
--- a/ui/lib/ldap/addon/routes/roles.ts
+++ b/ui/lib/ldap/addon/routes/roles.ts
@@ -6,17 +6,17 @@
import Route from '@ember/routing/route';
import { service } from '@ember/service';
-import type PaginationService from 'vault/services/pagination';
import type SecretMountPath from 'vault/services/secret-mount-path';
+import type StoreService from 'vault/services/store';
// Base class for roles/index and roles/subdirectory routes
export default class LdapRolesRoute extends Route {
- @service declare readonly pagination: PaginationService;
@service declare readonly secretMountPath: SecretMountPath;
+ @service declare readonly store: StoreService;
lazyQuery(backendId: string, params: { page?: string; pageFilter: string }, adapterOptions: object) {
const page = Number(params.page) || 1;
- return this.pagination.lazyPaginatedQuery(
+ return this.store.lazyPaginatedQuery(
'ldap/role',
{
backend: backendId,
diff --git a/ui/lib/ldap/addon/routes/roles/role/index.ts b/ui/lib/ldap/addon/routes/roles/role/index.ts
index e2442f8fc362..852289daf1ba 100644
--- a/ui/lib/ldap/addon/routes/roles/role/index.ts
+++ b/ui/lib/ldap/addon/routes/roles/role/index.ts
@@ -9,7 +9,7 @@ import { service } from '@ember/service';
import type RouterService from '@ember/routing/router-service';
export default class LdapRoleRoute extends Route {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
redirect() {
this.router.transitionTo('vault.cluster.secrets.backend.ldap.roles.role.details');
diff --git a/ui/lib/open-api-explorer/addon/engine.js b/ui/lib/open-api-explorer/addon/engine.js
index 400b5e4f86c6..b16e623c86ea 100644
--- a/ui/lib/open-api-explorer/addon/engine.js
+++ b/ui/lib/open-api-explorer/addon/engine.js
@@ -14,7 +14,7 @@ const Eng = Engine.extend({
modulePrefix,
Resolver,
dependencies: {
- services: ['auth', 'flash-messages', 'namespace', 'app-router', 'version'],
+ services: ['auth', 'flash-messages', 'namespace', 'router', 'version'],
},
});
diff --git a/ui/lib/pki/addon/components/page/pki-configuration-details.ts b/ui/lib/pki/addon/components/page/pki-configuration-details.ts
index 78ef13da0d13..149d7b450392 100644
--- a/ui/lib/pki/addon/components/page/pki-configuration-details.ts
+++ b/ui/lib/pki/addon/components/page/pki-configuration-details.ts
@@ -19,7 +19,7 @@ interface Args {
export default class PkiConfigurationDetails extends Component {
@service declare readonly store: Store;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@service declare readonly flashMessages: FlashMessageService;
@service declare readonly version: VersionService;
@tracked showDeleteAllIssuers = false;
diff --git a/ui/lib/pki/addon/components/page/pki-configuration-edit.ts b/ui/lib/pki/addon/components/page/pki-configuration-edit.ts
index 0f8e1de30c57..3545006208f5 100644
--- a/ui/lib/pki/addon/components/page/pki-configuration-edit.ts
+++ b/ui/lib/pki/addon/components/page/pki-configuration-edit.ts
@@ -44,7 +44,7 @@ interface ErrorObject {
message: string;
}
export default class PkiConfigurationEditComponent extends Component {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@service declare readonly flashMessages: FlashMessageService;
@service declare readonly version: VersionService;
diff --git a/ui/lib/pki/addon/components/page/pki-configure-create.ts b/ui/lib/pki/addon/components/page/pki-configure-create.ts
index 8bcb723c650c..fba02ee780d9 100644
--- a/ui/lib/pki/addon/components/page/pki-configure-create.ts
+++ b/ui/lib/pki/addon/components/page/pki-configure-create.ts
@@ -7,7 +7,7 @@ import Component from '@glimmer/component';
import { service } from '@ember/service';
import { tracked } from '@glimmer/tracking';
import type Store from '@ember-data/store';
-import type RouterService from '@ember/routing/router';
+import type Router from '@ember/routing/router';
import type FlashMessageService from 'vault/services/flash-messages';
import type PkiActionModel from 'vault/models/pki/action';
import type { Breadcrumb } from 'vault/vault/app-types';
@@ -26,9 +26,9 @@ interface Args {
* and form submission and cancel actions.
*/
export default class PkiConfigureCreate extends Component {
- @service declare readonly flashMessages: FlashMessageService;
@service declare readonly store: Store;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: Router;
+ @service declare readonly flashMessages: FlashMessageService;
@tracked title = 'Configure PKI';
diff --git a/ui/lib/pki/addon/components/page/pki-issuer-edit.ts b/ui/lib/pki/addon/components/page/pki-issuer-edit.ts
index 6f2576e773c5..1f456efb09c1 100644
--- a/ui/lib/pki/addon/components/page/pki-issuer-edit.ts
+++ b/ui/lib/pki/addon/components/page/pki-issuer-edit.ts
@@ -21,7 +21,7 @@ interface Args {
}
export default class PkiIssuerEditComponent extends Component {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@service declare readonly flashMessages: FlashMessageService;
@tracked usageValues: Array = [];
diff --git a/ui/lib/pki/addon/components/page/pki-issuer-list.hbs b/ui/lib/pki/addon/components/page/pki-issuer-list.hbs
index aa376895527e..2e80e195ccd0 100644
--- a/ui/lib/pki/addon/components/page/pki-issuer-list.hbs
+++ b/ui/lib/pki/addon/components/page/pki-issuer-list.hbs
@@ -108,11 +108,12 @@
data-test-popup-menu-trigger
/>
Details
- Edit
+ />
+
diff --git a/ui/lib/pki/addon/components/page/pki-issuer-rotate-root.ts b/ui/lib/pki/addon/components/page/pki-issuer-rotate-root.ts
index 7b74640a3f0b..693573f23492 100644
--- a/ui/lib/pki/addon/components/page/pki-issuer-rotate-root.ts
+++ b/ui/lib/pki/addon/components/page/pki-issuer-rotate-root.ts
@@ -11,7 +11,7 @@ import { waitFor } from '@ember/test-waiters';
import { task } from 'ember-concurrency';
import errorMessage from 'vault/utils/error-message';
import type Store from '@ember-data/store';
-import type RouterService from '@ember/routing/router';
+import type Router from '@ember/routing/router';
import type FlashMessageService from 'vault/services/flash-messages';
import type SecretMountPath from 'vault/services/secret-mount-path';
import type PkiIssuerModel from 'vault/models/pki/issuer';
@@ -31,10 +31,10 @@ const RADIO_BUTTON_KEY = {
};
export default class PagePkiIssuerRotateRootComponent extends Component {
+ @service declare readonly store: Store;
+ @service declare readonly router: Router;
@service declare readonly flashMessages: FlashMessageService;
@service declare readonly secretMountPath: SecretMountPath;
- @service declare readonly store: Store;
- @service('app-router') declare readonly router: RouterService;
@tracked displayedForm = RADIO_BUTTON_KEY.oldSettings;
@tracked showOldSettings = false;
diff --git a/ui/lib/pki/addon/components/page/pki-key-details.ts b/ui/lib/pki/addon/components/page/pki-key-details.ts
index 756236f3e0f4..d48ef94e4e49 100644
--- a/ui/lib/pki/addon/components/page/pki-key-details.ts
+++ b/ui/lib/pki/addon/components/page/pki-key-details.ts
@@ -15,7 +15,7 @@ interface Args {
}
export default class PkiKeyDetails extends Component {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@service declare readonly flashMessages: FlashMessageService;
@action
diff --git a/ui/lib/pki/addon/components/page/pki-key-list.hbs b/ui/lib/pki/addon/components/page/pki-key-list.hbs
index b071c02612f5..a01596416b31 100644
--- a/ui/lib/pki/addon/components/page/pki-key-list.hbs
+++ b/ui/lib/pki/addon/components/page/pki-key-list.hbs
@@ -54,17 +54,19 @@
/>
{{#if @canRead}}
Details
+ />
{{/if}}
{{#if @canEdit}}
Edit
+ />
{{/if}}
{{/if}}
diff --git a/ui/lib/pki/addon/components/page/pki-overview.ts b/ui/lib/pki/addon/components/page/pki-overview.ts
index daf879d0cca7..508c84eefad0 100644
--- a/ui/lib/pki/addon/components/page/pki-overview.ts
+++ b/ui/lib/pki/addon/components/page/pki-overview.ts
@@ -19,7 +19,7 @@ interface Args {
}
export default class PkiOverview extends Component {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@service declare readonly store: Store;
@tracked rolesValue = '';
diff --git a/ui/lib/pki/addon/components/page/pki-role-details.ts b/ui/lib/pki/addon/components/page/pki-role-details.ts
index e8f3865dbfc7..c48a5ecd996a 100644
--- a/ui/lib/pki/addon/components/page/pki-role-details.ts
+++ b/ui/lib/pki/addon/components/page/pki-role-details.ts
@@ -17,7 +17,7 @@ interface Args {
}
export default class DetailsPage extends Component {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@service declare readonly flashMessages: FlashMessageService;
@service declare readonly secretMountPath: SecretMountPath;
diff --git a/ui/lib/pki/addon/components/page/pki-tidy-status.ts b/ui/lib/pki/addon/components/page/pki-tidy-status.ts
index 8172b5e846ff..c8a0f07f9a08 100644
--- a/ui/lib/pki/addon/components/page/pki-tidy-status.ts
+++ b/ui/lib/pki/addon/components/page/pki-tidy-status.ts
@@ -52,7 +52,7 @@ export default class PkiTidyStatusComponent extends Component {
@service declare readonly secretMountPath: SecretMountPath;
@service declare readonly flashMessages: FlashMessageService;
@service declare readonly version: VersionService;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@tracked tidyOptionsModal = false;
@tracked confirmCancelTidy = false;
diff --git a/ui/lib/pki/addon/components/pki-generate-root.ts b/ui/lib/pki/addon/components/pki-generate-root.ts
index 2b3d8a959381..d8bab62adce1 100644
--- a/ui/lib/pki/addon/components/pki-generate-root.ts
+++ b/ui/lib/pki/addon/components/pki-generate-root.ts
@@ -51,7 +51,7 @@ interface Args {
*/
export default class PkiGenerateRootComponent extends Component {
@service declare readonly flashMessages: FlashMessageService;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@tracked modelValidations: ValidationMap | null = null;
@tracked errorBanner = '';
diff --git a/ui/lib/pki/addon/components/pki-key-form.hbs b/ui/lib/pki/addon/components/pki-key-form.hbs
index b2d45e20121c..d70b538a867e 100644
--- a/ui/lib/pki/addon/components/pki-key-form.hbs
+++ b/ui/lib/pki/addon/components/pki-key-form.hbs
@@ -3,71 +3,62 @@
SPDX-License-Identifier: BUSL-1.1
~}}
-{{! private_key is only available after initial save }}
-{{#if this.generatedKey.privateKey}}
-
-{{else}}
-
-
-
-
- {{#if @model.isNew}}
- {{#each @model.formFieldGroups as |fieldGroup|}}
- {{#each-in fieldGroup as |group fields|}}
- {{#if (eq group "Key parameters")}}
-
- {{else}}
- {{#each fields as |attr|}}
-
- {{/each}}
- {{/if}}
- {{/each-in}}
- {{/each}}
- {{else}}
- {{! only key name is edit-able }}
- {{#let (find-by "name" "keyName" @model.formFields) as |keyName|}}
-
- {{/let}}
- {{#let (find-by "name" "keyType" @model.formFields) as |keyType|}}
-
- {{/let}}
- {{/if}}
-
-
-
-
-
- {{#if this.invalidFormAlert}}
-
-
-
+
+
+
+
+ {{#if @model.isNew}}
+ {{#each @model.formFieldGroups as |fieldGroup|}}
+ {{#each-in fieldGroup as |group fields|}}
+ {{#if (eq group "Key parameters")}}
+
+ {{else}}
+ {{#each fields as |attr|}}
+
+ {{/each}}
+ {{/if}}
+ {{/each-in}}
+ {{/each}}
+ {{else}}
+ {{! only key name is edit-able }}
+ {{#let (find-by "name" "keyName" @model.formFields) as |keyName|}}
+
+ {{/let}}
+ {{#let (find-by "name" "keyType" @model.formFields) as |keyType|}}
+
+ {{/let}}
{{/if}}
-
-{{/if}}
\ No newline at end of file
+
+
+
+
+
+ {{#if this.invalidFormAlert}}
+
+
+
+ {{/if}}
+
\ No newline at end of file
diff --git a/ui/lib/pki/addon/components/pki-key-form.ts b/ui/lib/pki/addon/components/pki-key-form.ts
index 45e48182645b..ce66110e2f15 100644
--- a/ui/lib/pki/addon/components/pki-key-form.ts
+++ b/ui/lib/pki/addon/components/pki-key-form.ts
@@ -39,8 +39,6 @@ export default class PkiKeyForm extends Component {
@tracked invalidFormAlert = '';
@tracked modelValidations: ValidationMap | null = null;
- @tracked generatedKey: PkiKeyModel | null = null;
-
@task
@waitFor
*save(event: Event) {
@@ -53,15 +51,11 @@ export default class PkiKeyForm extends Component {
this.invalidFormAlert = invalidFormMessage;
}
if (!isValid && isNew) return;
- this.generatedKey = yield this.args.model.save({ adapterOptions: { import: false } });
+ yield this.args.model.save({ adapterOptions: { import: false } });
this.flashMessages.success(
`Successfully ${isNew ? 'generated' : 'updated'} key${keyName ? ` ${keyName}.` : '.'}`
);
-
- // only transition to details if there is no private_key data to display
- if (!this.generatedKey?.privateKey) {
- this.args.onSave();
- }
+ this.args.onSave();
} catch (error) {
this.errorBanner = errorMessage(error);
this.invalidFormAlert = 'There was an error submitting this form.';
diff --git a/ui/lib/pki/addon/components/pki-role-generate.ts b/ui/lib/pki/addon/components/pki-role-generate.ts
index 56c327c8f450..3c008b4196de 100644
--- a/ui/lib/pki/addon/components/pki-role-generate.ts
+++ b/ui/lib/pki/addon/components/pki-role-generate.ts
@@ -9,7 +9,7 @@ import { task } from 'ember-concurrency';
import { service } from '@ember/service';
import { tracked } from '@glimmer/tracking';
import errorMessage from 'vault/utils/error-message';
-import type RouterService from '@ember/routing/router';
+import type Router from '@ember/routing/router';
import type Store from '@ember-data/store';
import type FlashMessageService from 'vault/services/flash-messages';
import type DownloadService from 'vault/services/download';
@@ -23,10 +23,10 @@ interface Args {
}
export default class PkiRoleGenerate extends Component {
- @service declare readonly download: DownloadService;
- @service declare readonly flashMessages: FlashMessageService;
+ @service declare readonly router: Router;
@service declare readonly store: Store;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly flashMessages: FlashMessageService;
+ @service declare readonly download: DownloadService;
@tracked errorBanner = '';
@tracked invalidFormAlert = '';
diff --git a/ui/lib/pki/addon/components/pki-tidy-form.ts b/ui/lib/pki/addon/components/pki-tidy-form.ts
index 84c313ca3a5d..1b388b77eade 100644
--- a/ui/lib/pki/addon/components/pki-tidy-form.ts
+++ b/ui/lib/pki/addon/components/pki-tidy-form.ts
@@ -30,7 +30,7 @@ interface PkiTidyBooleans {
}
export default class PkiTidyForm extends Component {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@tracked errorBanner = '';
@tracked invalidFormAlert = '';
diff --git a/ui/lib/pki/addon/engine.js b/ui/lib/pki/addon/engine.js
index 7c0bc3bb556f..803bc5071788 100644
--- a/ui/lib/pki/addon/engine.js
+++ b/ui/lib/pki/addon/engine.js
@@ -22,10 +22,9 @@ export default class PkiEngine extends Engine {
'flash-messages',
'namespace',
'path-help',
- 'app-router',
+ 'router',
'secret-mount-path',
'store',
- 'pagination',
'version',
],
externalRoutes: ['secrets', 'secretsListRootConfiguration', 'externalMountIssuer'],
diff --git a/ui/lib/pki/addon/routes/certificates/index.js b/ui/lib/pki/addon/routes/certificates/index.js
index a0d621687d34..0d21c887e4e3 100644
--- a/ui/lib/pki/addon/routes/certificates/index.js
+++ b/ui/lib/pki/addon/routes/certificates/index.js
@@ -11,9 +11,8 @@ import { getCliMessage } from 'pki/routes/overview';
@withConfig()
export default class PkiCertificatesIndexRoute extends Route {
- @service pagination;
+ @service store;
@service secretMountPath;
- @service store; // used by @withConfig decorator
queryParams = {
page: {
@@ -24,7 +23,7 @@ export default class PkiCertificatesIndexRoute extends Route {
async fetchCertificates(params) {
try {
const page = Number(params.page) || 1;
- return await this.pagination.lazyPaginatedQuery('pki/certificate/base', {
+ return await this.store.lazyPaginatedQuery('pki/certificate/base', {
backend: this.secretMountPath.currentPath,
responsePath: 'data.keys',
page,
diff --git a/ui/lib/pki/addon/routes/index.js b/ui/lib/pki/addon/routes/index.js
index 6dfb959f80af..38c03207a00a 100644
--- a/ui/lib/pki/addon/routes/index.js
+++ b/ui/lib/pki/addon/routes/index.js
@@ -7,7 +7,7 @@ import Route from '@ember/routing/route';
import { service } from '@ember/service';
export default class PkiRoute extends Route {
- @service('app-router') router;
+ @service router;
redirect() {
this.router.transitionTo('vault.cluster.secrets.backend.pki.overview');
diff --git a/ui/lib/pki/addon/routes/issuers/index.js b/ui/lib/pki/addon/routes/issuers/index.js
index 77cdd1f7c434..c63804885093 100644
--- a/ui/lib/pki/addon/routes/issuers/index.js
+++ b/ui/lib/pki/addon/routes/issuers/index.js
@@ -7,12 +7,12 @@ import Route from '@ember/routing/route';
import { service } from '@ember/service';
export default class PkiIssuersListRoute extends Route {
- @service pagination;
+ @service store;
@service secretMountPath;
model(params) {
const page = Number(params.page) || 1;
- return this.pagination
+ return this.store
.lazyPaginatedQuery('pki/issuer', {
backend: this.secretMountPath.currentPath,
responsePath: 'data.keys',
diff --git a/ui/lib/pki/addon/routes/keys/index.js b/ui/lib/pki/addon/routes/keys/index.js
index 432438223a1b..5c669d63f8a2 100644
--- a/ui/lib/pki/addon/routes/keys/index.js
+++ b/ui/lib/pki/addon/routes/keys/index.js
@@ -11,9 +11,8 @@ import { PKI_DEFAULT_EMPTY_STATE_MSG } from 'pki/routes/overview';
@withConfig()
export default class PkiKeysIndexRoute extends Route {
- @service pagination;
@service secretMountPath;
- @service store; // used by @withConfig decorator
+ @service store;
queryParams = {
page: {
@@ -26,7 +25,7 @@ export default class PkiKeysIndexRoute extends Route {
return hash({
hasConfig: this.pkiMountHasConfig,
parentModel: this.modelFor('keys'),
- keyModels: this.pagination
+ keyModels: this.store
.lazyPaginatedQuery('pki/key', {
backend: this.secretMountPath.currentPath,
responsePath: 'data.keys',
diff --git a/ui/lib/pki/addon/routes/roles/create.js b/ui/lib/pki/addon/routes/roles/create.js
index c3a08a5cd58c..b08260fc6bad 100644
--- a/ui/lib/pki/addon/routes/roles/create.js
+++ b/ui/lib/pki/addon/routes/roles/create.js
@@ -36,10 +36,4 @@ export default class PkiRolesCreateRoute extends Route {
{ label: 'create' },
];
}
-
- willTransition() {
- // after upgrading to Ember Data 5.3.2 we saw duplicate records in the store after creating and saving a new role
- // it's unclear why this ghost record is persisting, manually unloading refreshes the store
- this.store.unloadAll('pki/role');
- }
}
diff --git a/ui/lib/pki/addon/routes/roles/index.js b/ui/lib/pki/addon/routes/roles/index.js
index 57f4a0bd7d7f..3dae1186f851 100644
--- a/ui/lib/pki/addon/routes/roles/index.js
+++ b/ui/lib/pki/addon/routes/roles/index.js
@@ -10,9 +10,8 @@ import { hash } from 'rsvp';
import { getCliMessage } from 'pki/routes/overview';
@withConfig()
export default class PkiRolesIndexRoute extends Route {
- @service store; // used by @withConfig decorator
+ @service store;
@service secretMountPath;
- @service pagination;
queryParams = {
page: {
@@ -23,7 +22,7 @@ export default class PkiRolesIndexRoute extends Route {
async fetchRoles(params) {
try {
const page = Number(params.page) || 1;
- return await this.pagination.lazyPaginatedQuery('pki/role', {
+ return await this.store.lazyPaginatedQuery('pki/role', {
backend: this.secretMountPath.currentPath,
responsePath: 'data.keys',
page,
diff --git a/ui/lib/pki/addon/templates/certificates/index.hbs b/ui/lib/pki/addon/templates/certificates/index.hbs
index 60ea533e4786..2866d1ccdd32 100644
--- a/ui/lib/pki/addon/templates/certificates/index.hbs
+++ b/ui/lib/pki/addon/templates/certificates/index.hbs
@@ -38,10 +38,7 @@
@hasChevron={{false}}
data-test-popup-menu-trigger
/>
- Details
+
diff --git a/ui/lib/pki/addon/templates/roles/index.hbs b/ui/lib/pki/addon/templates/roles/index.hbs
index 8f54244a38bc..fc73331abfb2 100644
--- a/ui/lib/pki/addon/templates/roles/index.hbs
+++ b/ui/lib/pki/addon/templates/roles/index.hbs
@@ -44,13 +44,15 @@
data-test-popup-menu-trigger
/>
Details
+ />
Edit
+ />
diff --git a/ui/lib/replication/addon/components/replication-overview-mode.hbs b/ui/lib/replication/addon/components/replication-overview-mode.hbs
deleted file mode 100644
index 424b895fecaf..000000000000
--- a/ui/lib/replication/addon/components/replication-overview-mode.hbs
+++ /dev/null
@@ -1,49 +0,0 @@
-{{!
- Copyright (c) HashiCorp, Inc.
- SPDX-License-Identifier: BUSL-1.1
-~}}
-
-
-
+ {{/if}}
+ {{/if}}
+{{else}}
+ {{#if (eq this.attrsForCurrentMode.mode "initializing")}}
+ The cluster is initializing replication. This may take some time.
+ {{else}}
+
+ {{/if}}
{{/if}}
\ No newline at end of file
diff --git a/ui/lib/replication/addon/templates/index.hbs b/ui/lib/replication/addon/templates/index.hbs
index 9b2c9c326ebc..accee0a95a05 100644
--- a/ui/lib/replication/addon/templates/index.hbs
+++ b/ui/lib/replication/addon/templates/index.hbs
@@ -6,7 +6,6 @@
{{#if (eq this.model.mode "unsupported")}}
- {{! Replication is unsupported in non-enterprise or when using non-transactional storage (eg inmem) }}
@@ -99,36 +98,8 @@
@onSuccess={{this.onEnableSuccess}}
@doTransition={{true}}
/>
- {{else if (not (has-feature "DR Replication"))}}
-
- {{else if (and (eq this.model.dr.mode "primary") (eq this.model.performance.mode "primary"))}}
- {{! Renders when cluster is primary for both replication modes }}
-
-
-
-
-
-
-
{{else}}
- {{! Renders when at least one mode is not enabled }}
-
-
-
- Replication
-
-
-
-
-
-
-
-
+
{{/if}}
\ No newline at end of file
diff --git a/ui/lib/replication/addon/templates/mode/secondaries/index.hbs b/ui/lib/replication/addon/templates/mode/secondaries/index.hbs
index f47edb6433af..5f5b63cf30e8 100644
--- a/ui/lib/replication/addon/templates/mode/secondaries/index.hbs
+++ b/ui/lib/replication/addon/templates/mode/secondaries/index.hbs
@@ -38,16 +38,18 @@
/>
{{#if (eq this.replicationMode "performance")}}
Path filter config
+ />
{{/if}}
{{#if this.model.canRevokeSecondary}}
Revoke
+ />
{{/if}}
{{/if}}
diff --git a/ui/lib/sync/addon/components/secrets/destination-header.ts b/ui/lib/sync/addon/components/secrets/destination-header.ts
index e3293f968ea9..6da771e6155a 100644
--- a/ui/lib/sync/addon/components/secrets/destination-header.ts
+++ b/ui/lib/sync/addon/components/secrets/destination-header.ts
@@ -10,7 +10,7 @@ import errorMessage from 'vault/utils/error-message';
import type SyncDestinationModel from 'vault/models/sync/destination';
import type RouterService from '@ember/routing/router-service';
-import type PaginationService from 'vault/services/pagination';
+import type StoreService from 'vault/services/store';
import type FlashMessageService from 'vault/services/flash-messages';
interface Args {
@@ -18,8 +18,8 @@ interface Args {
}
export default class DestinationsTabsToolbar extends Component {
- @service('app-router') declare readonly router: RouterService;
- @service declare readonly pagination: PaginationService;
+ @service declare readonly router: RouterService;
+ @service declare readonly store: StoreService;
@service declare readonly flashMessages: FlashMessageService;
@action
@@ -28,7 +28,7 @@ export default class DestinationsTabsToolbar extends Component {
const { destination } = this.args;
const message = `Destination ${destination.name} has been queued for deletion.`;
await destination.destroyRecord();
- this.pagination.clearDataset('sync/destination');
+ this.store.clearDataset('sync/destination');
this.router.transitionTo('vault.cluster.sync.secrets.overview');
this.flashMessages.success(message);
} catch (error) {
diff --git a/ui/lib/sync/addon/components/secrets/page/destinations.hbs b/ui/lib/sync/addon/components/secrets/page/destinations.hbs
index 4cee4d329dd3..472716c98033 100644
--- a/ui/lib/sync/addon/components/secrets/page/destinations.hbs
+++ b/ui/lib/sync/addon/components/secrets/page/destinations.hbs
@@ -83,24 +83,27 @@
{{else}}
Details
+ />
{{#if destination.canEdit}}
Edit
+ />
{{/if}}
{{#if destination.canDelete}}
Delete
+ />
{{/if}}
{{/if}}
diff --git a/ui/lib/sync/addon/components/secrets/page/destinations.ts b/ui/lib/sync/addon/components/secrets/page/destinations.ts
index d85cc1577f17..9f8423a7f510 100644
--- a/ui/lib/sync/addon/components/secrets/page/destinations.ts
+++ b/ui/lib/sync/addon/components/secrets/page/destinations.ts
@@ -14,7 +14,7 @@ import { next } from '@ember/runloop';
import type SyncDestinationModel from 'vault/vault/models/sync/destination';
import type RouterService from '@ember/routing/router-service';
-import type PaginationService from 'vault/services/pagination';
+import type StoreService from 'vault/services/store';
import type FlashMessageService from 'vault/services/flash-messages';
import type { EngineOwner } from 'vault/vault/app-types';
import type { SyncDestinationName, SyncDestinationType } from 'vault/vault/helpers/sync-destinations';
@@ -27,8 +27,8 @@ interface Args {
}
export default class SyncSecretsDestinationsPageComponent extends Component {
- @service('app-router') declare readonly router: RouterService;
- @service declare readonly pagination: PaginationService;
+ @service declare readonly router: RouterService;
+ @service declare readonly store: StoreService;
@service declare readonly flashMessages: FlashMessageService;
@tracked destinationToDelete: SyncDestinationModel | null = null;
@@ -98,7 +98,7 @@ export default class SyncSecretsDestinationsPageComponent extends Component {
@service declare readonly flashMessages: FlashMessageService;
- @service('app-router') declare readonly router: RouterService;
- @service declare readonly pagination: PaginationService;
+ @service declare readonly router: RouterService;
+ @service declare readonly store: StoreService;
@tracked modelValidations: ValidationMap | null = null;
@tracked invalidFormMessage = '';
@@ -95,7 +95,7 @@ export default class DestinationsCreateForm extends Component {
// if the user then attempts to update the record the credential will get overwritten with the masked placeholder value
// since the record will be fetched from the details route we can safely unload it to avoid the aforementioned issue
destination.unloadRecord();
- this.pagination.clearDataset('sync/destination');
+ this.store.clearDataset('sync/destination');
}
this.router.transitionTo(
'vault.cluster.sync.secrets.destinations.destination.details',
diff --git a/ui/lib/sync/addon/components/secrets/page/destinations/destination/secrets.hbs b/ui/lib/sync/addon/components/secrets/page/destinations/destination/secrets.hbs
index 06bed403ea7f..a950cfb3599e 100644
--- a/ui/lib/sync/addon/components/secrets/page/destinations/destination/secrets.hbs
+++ b/ui/lib/sync/addon/components/secrets/page/destinations/destination/secrets.hbs
@@ -53,21 +53,26 @@
{{/if}}
{{#if association.canSync}}
-
- Sync now
+
{{/if}}
View secret
+ />
{{#if association.canUnsync}}
Unsync
+ />
{{/if}}
{{/if}}
diff --git a/ui/lib/sync/addon/components/secrets/page/destinations/destination/secrets.ts b/ui/lib/sync/addon/components/secrets/page/destinations/destination/secrets.ts
index abc369095fd6..3325083c2470 100644
--- a/ui/lib/sync/addon/components/secrets/page/destinations/destination/secrets.ts
+++ b/ui/lib/sync/addon/components/secrets/page/destinations/destination/secrets.ts
@@ -13,7 +13,7 @@ import errorMessage from 'vault/utils/error-message';
import SyncDestinationModel from 'vault/vault/models/sync/destination';
import type SyncAssociationModel from 'vault/vault/models/sync/association';
import type RouterService from '@ember/routing/router-service';
-import type PaginationService from 'vault/services/pagination';
+import type StoreService from 'vault/services/store';
import type FlashMessageService from 'vault/services/flash-messages';
import type { EngineOwner } from 'vault/vault/app-types';
@@ -23,8 +23,8 @@ interface Args {
}
export default class SyncSecretsDestinationsPageComponent extends Component {
- @service('app-router') declare readonly router: RouterService;
- @service declare readonly pagination: PaginationService;
+ @service declare readonly router: RouterService;
+ @service declare readonly store: StoreService;
@service declare readonly flashMessages: FlashMessageService;
@tracked secretToUnsync: SyncAssociationModel | null = null;
@@ -41,7 +41,7 @@ export default class SyncSecretsDestinationsPageComponent extends Component {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@service declare readonly store: StoreService;
@service declare readonly flashMessages: FlashMessageService;
- @service declare readonly pagination: PaginationService;
constructor(owner: unknown, args: Args) {
super(owner, args);
@@ -48,7 +46,7 @@ export default class DestinationSyncPageComponent extends Component {
}
willDestroy(): void {
- this.pagination.clearDataset('sync/association');
+ this.store.clearDataset('sync/association');
super.willDestroy();
}
diff --git a/ui/lib/sync/addon/components/secrets/page/overview.hbs b/ui/lib/sync/addon/components/secrets/page/overview.hbs
index f34072815f38..4433e3d8e94d 100644
--- a/ui/lib/sync/addon/components/secrets/page/overview.hbs
+++ b/ui/lib/sync/addon/components/secrets/page/overview.hbs
@@ -142,13 +142,15 @@
Sync secrets
+ />
View synced secrets
+ />
diff --git a/ui/lib/sync/addon/components/secrets/sync-activation-modal.ts b/ui/lib/sync/addon/components/secrets/sync-activation-modal.ts
index 2ccce7463d27..70fee2d750ba 100644
--- a/ui/lib/sync/addon/components/secrets/sync-activation-modal.ts
+++ b/ui/lib/sync/addon/components/secrets/sync-activation-modal.ts
@@ -24,7 +24,7 @@ interface Args {
export default class SyncActivationModal extends Component {
@service declare readonly flashMessages: FlashMessageService;
@service declare readonly store: StoreService;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@tracked hasConfirmedDocs = false;
diff --git a/ui/lib/sync/addon/engine.js b/ui/lib/sync/addon/engine.js
index 20b3a173628b..f6cfda3760c2 100644
--- a/ui/lib/sync/addon/engine.js
+++ b/ui/lib/sync/addon/engine.js
@@ -14,7 +14,7 @@ export default class SyncEngine extends Engine {
modulePrefix = modulePrefix;
Resolver = Resolver;
dependencies = {
- services: ['flash-messages', 'flags', 'app-router', 'store', 'pagination', 'version'],
+ services: ['flash-messages', 'flags', 'router', 'store', 'version'],
externalRoutes: ['kvSecretOverview', 'clientCountOverview'],
};
}
diff --git a/ui/lib/sync/addon/routes/index.ts b/ui/lib/sync/addon/routes/index.ts
index 67243509e3e3..e73816a5e3e5 100644
--- a/ui/lib/sync/addon/routes/index.ts
+++ b/ui/lib/sync/addon/routes/index.ts
@@ -9,7 +9,7 @@ import { service } from '@ember/service';
import type RouterService from '@ember/routing/router-service';
export default class SyncIndexRoute extends Route {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
redirect() {
this.router.transitionTo('vault.cluster.sync.secrets.overview');
diff --git a/ui/lib/sync/addon/routes/secrets.ts b/ui/lib/sync/addon/routes/secrets.ts
index 2fbd5a368224..0a1a6ec296db 100644
--- a/ui/lib/sync/addon/routes/secrets.ts
+++ b/ui/lib/sync/addon/routes/secrets.ts
@@ -10,7 +10,7 @@ import type RouterService from '@ember/routing/router-service';
import type FlagService from 'vault/services/flags';
export default class SyncSecretsRoute extends Route {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@service declare readonly flags: FlagService;
model() {
diff --git a/ui/lib/sync/addon/routes/secrets/destinations/destination.ts b/ui/lib/sync/addon/routes/secrets/destinations/destination.ts
index 919b173fa0ce..21cd8a724459 100644
--- a/ui/lib/sync/addon/routes/secrets/destinations/destination.ts
+++ b/ui/lib/sync/addon/routes/secrets/destinations/destination.ts
@@ -19,7 +19,7 @@ interface RouteParams {
export default class SyncSecretsDestinationsDestinationRoute extends Route {
@service declare readonly store: Store;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@service declare readonly flashMessages: FlashMessageService;
model(params: RouteParams) {
diff --git a/ui/lib/sync/addon/routes/secrets/destinations/destination/index.ts b/ui/lib/sync/addon/routes/secrets/destinations/destination/index.ts
index d68f82ded1a4..660e186f4371 100644
--- a/ui/lib/sync/addon/routes/secrets/destinations/destination/index.ts
+++ b/ui/lib/sync/addon/routes/secrets/destinations/destination/index.ts
@@ -9,7 +9,7 @@ import { service } from '@ember/service';
import type RouterService from '@ember/routing/router-service';
export default class SyncSecretsDestinationsDestinationIndexRoute extends Route {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
redirect() {
this.router.transitionTo('vault.cluster.sync.secrets.destinations.destination.details');
diff --git a/ui/lib/sync/addon/routes/secrets/destinations/destination/secrets.ts b/ui/lib/sync/addon/routes/secrets/destinations/destination/secrets.ts
index f2ba3a91bd63..6fcdda8d99d3 100644
--- a/ui/lib/sync/addon/routes/secrets/destinations/destination/secrets.ts
+++ b/ui/lib/sync/addon/routes/secrets/destinations/destination/secrets.ts
@@ -7,7 +7,7 @@ import Route from '@ember/routing/route';
import { service } from '@ember/service';
import { hash } from 'rsvp';
-import type PaginationService from 'vault/services/pagination';
+import type StoreService from 'vault/services/store';
import type SyncDestinationModel from 'vault/vault/models/sync/destination';
import type SyncAssociationModel from 'vault/vault/models/sync/association';
import type Controller from '@ember/controller';
@@ -27,7 +27,7 @@ interface SyncDestinationSecretsController extends Controller {
}
export default class SyncDestinationSecretsRoute extends Route {
- @service declare readonly pagination: PaginationService;
+ @service declare readonly store: StoreService;
queryParams = {
page: {
@@ -39,7 +39,7 @@ export default class SyncDestinationSecretsRoute extends Route {
const destination = this.modelFor('secrets.destinations.destination') as SyncDestinationModel;
return hash({
destination,
- associations: this.pagination.lazyPaginatedQuery('sync/association', {
+ associations: this.store.lazyPaginatedQuery('sync/association', {
responsePath: 'data.keys',
page: Number(params.page) || 1,
destinationType: destination.type,
diff --git a/ui/lib/sync/addon/routes/secrets/destinations/index.ts b/ui/lib/sync/addon/routes/secrets/destinations/index.ts
index 5508bd849d77..7634cd5a9db1 100644
--- a/ui/lib/sync/addon/routes/secrets/destinations/index.ts
+++ b/ui/lib/sync/addon/routes/secrets/destinations/index.ts
@@ -7,7 +7,7 @@ import Route from '@ember/routing/route';
import { service } from '@ember/service';
import { hash } from 'rsvp';
-import type PaginationService from 'vault/services/pagination';
+import type StoreService from 'vault/services/store';
import type RouterService from '@ember/routing/router-service';
import type { ModelFrom } from 'vault/vault/route';
import type SyncDestinationModel from 'vault/vault/models/sync/destination';
@@ -33,8 +33,8 @@ interface SyncSecretsDestinationsController extends Controller {
}
export default class SyncSecretsDestinationsIndexRoute extends Route {
- @service declare readonly pagination: PaginationService;
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly store: StoreService;
+ @service declare readonly router: RouterService;
queryParams = {
page: {
@@ -73,7 +73,7 @@ export default class SyncSecretsDestinationsIndexRoute extends Route {
async model(params: SyncSecretsDestinationsIndexRouteParams) {
const { name, type, page } = params;
return hash({
- destinations: this.pagination.lazyPaginatedQuery('sync/destination', {
+ destinations: this.store.lazyPaginatedQuery('sync/destination', {
page: Number(page) || 1,
pageFilter: (dataset: Array) => this.filterData(dataset, name, type),
responsePath: 'data.keys',
diff --git a/ui/lib/sync/addon/routes/secrets/overview.ts b/ui/lib/sync/addon/routes/secrets/overview.ts
index 7ebeffc1977b..75b90ec50263 100644
--- a/ui/lib/sync/addon/routes/secrets/overview.ts
+++ b/ui/lib/sync/addon/routes/secrets/overview.ts
@@ -13,7 +13,7 @@ import type StoreService from 'vault/services/store';
import type VersionService from 'vault/services/version';
export default class SyncSecretsOverviewRoute extends Route {
- @service('app-router') declare readonly router: RouterService;
+ @service declare readonly router: RouterService;
@service declare readonly store: StoreService;
@service declare readonly flags: FlagsService;
@service declare readonly version: VersionService;
diff --git a/ui/mirage/handlers/ldap.js b/ui/mirage/handlers/ldap.js
index 47fb433de204..07d772874c71 100644
--- a/ui/mirage/handlers/ldap.js
+++ b/ui/mirage/handlers/ldap.js
@@ -56,6 +56,17 @@ export default function (server) {
return getRecord(schema, req, 'ldapRoles', type);
};
+ // mount
+ server.post('/sys/mounts/:path', () => new Response(204));
+ server.get('/sys/internal/ui/mounts/:path', () => ({
+ data: {
+ accessor: 'ldap_ade94329',
+ type: 'ldap',
+ path: 'ldap-test/',
+ uuid: '35e9119d-5708-4b6b-58d2-f913e27f242d',
+ config: {},
+ },
+ }));
// config
server.post('/:backend/config', (schema, req) => createOrUpdateRecord(schema, req, 'ldapConfigs'));
server.get('/:backend/config', (schema, req) => getRecord(schema, req, 'ldapConfigs'));
@@ -79,60 +90,8 @@ export default function (server) {
server.post('/:backend/library/:name', (schema, req) => createOrUpdateRecord(schema, req, 'ldapLibraries'));
server.get('/:backend/library/:name', (schema, req) => getRecord(schema, req, 'ldapLibraries'));
server.get('/:backend/library', (schema) => listRecords(schema, 'ldapLibraries'));
- server.get('/:backend/library/:name/status', (schema) => {
- const data = schema.db['ldapAccountStatuses'].reduce((prev, curr) => {
- prev[curr.account] = {
- available: curr.available,
- borrower_client_token: curr.borrower_client_token,
- };
- return prev;
- }, {});
- return { data };
- });
- // check-out / check-in
- server.post('/:backend/library/:set_name/check-in', (schema, req) => {
- // Check-in makes an unavailable account available again
- const { service_account_names } = JSON.parse(req.requestBody);
- const dbCollection = schema.db['ldapAccountStatuses'];
- const updated = dbCollection.find(service_account_names).map((f) => ({
- ...f,
- available: true,
- borrower_client_token: undefined,
- }));
- updated.forEach((u) => {
- dbCollection.update(u.id, u);
- });
- return {
- data: {
- check_ins: service_account_names,
- },
- };
- });
- server.post('/:backend/library/:set_name/check-out', (schema, req) => {
- const { set_name, backend } = req.params;
- const dbCollection = schema.db['ldapAccountStatuses'];
- const available = dbCollection.where({ available: true });
- if (available) {
- return Response(404, {}, { errors: ['no accounts available to check out'] });
- }
- const checkOut = {
- ...available[0],
- available: false,
- borrower_client_token: crypto.randomUUID(),
- };
- dbCollection.update(checkOut.id, checkOut);
- return {
- request_id: '364a17d4-e5ab-998b-ceee-b49929229e0c',
- lease_id: `${backend}/library/${set_name}/check-out/aoBsaBEI4PK96VnukubvYDlZ`,
- renewable: true,
- lease_duration: 36000,
- data: {
- password: crypto.randomUUID(),
- service_account_name: checkOut.account,
- },
- wrap_info: null,
- warnings: null,
- auth: null,
- };
- });
+ server.get('/:backend/library/:name/status', () => ({
+ 'bob.johnson': { available: false, borrower_client_token: '8b80c305eb3a7dbd161ef98f10ea60a116ce0910' },
+ 'mary.smith': { available: true },
+ }));
}
diff --git a/ui/mirage/models/ldap-account-status.js b/ui/mirage/models/ldap-account-status.js
deleted file mode 100644
index 557261f6cd78..000000000000
--- a/ui/mirage/models/ldap-account-status.js
+++ /dev/null
@@ -1,13 +0,0 @@
-/**
- * Copyright (c) HashiCorp, Inc.
- * SPDX-License-Identifier: BUSL-1.1
- */
-
-import { Model } from 'miragejs';
-
-export default Model.extend({
- account: '', // should match ID
- library: '',
- available: false,
- borrower_client_token: undefined,
-});
diff --git a/ui/mirage/scenarios/ldap.js b/ui/mirage/scenarios/ldap.js
index cff48bd84df1..98d370f91d9e 100644
--- a/ui/mirage/scenarios/ldap.js
+++ b/ui/mirage/scenarios/ldap.js
@@ -4,7 +4,7 @@
*/
export default function (server) {
- server.create('ldap-config', { path: 'kubernetes', backend: 'ldap-test' });
+ server.create('ldap-config', { path: 'kubernetes' });
server.create('ldap-role', 'static', { name: 'static-role' });
server.create('ldap-role', 'dynamic', { name: 'dynamic-role' });
// hierarchical roles
@@ -14,15 +14,4 @@ export default function (server) {
server.create('ldap-role', 'static', { name: 'my-role' });
server.create('ldap-role', 'dynamic', { name: 'my-role' });
server.create('ldap-library', { name: 'test-library' });
- server.create('ldap-account-status', {
- id: 'bob.johnson',
- account: 'bob.johnson',
- available: false,
- borrower_client_token: '8b80c305eb3a7dbd161ef98f10ea60a116ce0910',
- });
- server.create('ldap-account-status', {
- id: 'mary.smith',
- account: 'mary.smith',
- available: true,
- });
}
diff --git a/ui/package.json b/ui/package.json
index f85053656d50..34d2e392e54c 100644
--- a/ui/package.json
+++ b/ui/package.json
@@ -66,6 +66,7 @@
"@babel/preset-env": "^7.24.6",
"@babel/preset-typescript": "^7.24.6",
"@docfy/ember": "^0.8.5",
+ "@ember-data/legacy-compat": "~4.12.4",
"@ember/legacy-built-in-components": "^0.4.1",
"@ember/optional-features": "^2.0.0",
"@ember/render-modifiers": "^1.0.2",
@@ -105,7 +106,7 @@
"dompurify": "^3.0.2",
"ember-a11y-testing": "^7.0.1",
"ember-basic-dropdown": "^8.0.4",
- "ember-cli": "~5.8.0",
+ "ember-cli": "~5.4.2",
"ember-cli-babel": "^8.2.0",
"ember-cli-clean-css": "^3.0.0",
"ember-cli-content-security-policy": "2.0.3",
@@ -122,7 +123,7 @@
"ember-cli-terser": "^4.0.2",
"ember-composable-helpers": "5.0.0",
"ember-concurrency": "^4.0.2",
- "ember-data": "~5.3.2",
+ "ember-data": "~4.12.4",
"ember-engines": "0.8.23",
"ember-exam": "^9.0.0",
"ember-inflector": "4.0.2",
@@ -135,11 +136,12 @@
"ember-qunit": "^8.0.1",
"ember-resolver": "^11.0.1",
"ember-responsive": "5.0.0",
+ "ember-router-helpers": "^0.4.0",
"ember-service-worker": "meirish/ember-service-worker#configurable-scope",
"ember-sinon-qunit": "^7.4.0",
- "ember-source": "~5.8.0",
+ "ember-source": "~5.4.0",
"ember-style-modifier": "^4.1.0",
- "ember-svg-jar": "2.6.0",
+ "ember-svg-jar": "2.4.4",
"ember-template-lint": "^6.0.0",
"ember-template-lint-plugin-prettier": "^5.0.0",
"ember-test-selectors": "6.0.0",
@@ -182,7 +184,7 @@
"tracked-built-ins": "^3.3.0",
"typescript": "^5.4.5",
"walk-sync": "^2.0.2",
- "webpack": "5.94.0"
+ "webpack": "5.89.0"
},
"resolutions": {
"ansi-html": "^0.0.8",
@@ -194,7 +196,6 @@
"https-proxy-agent": "^2.2.3",
"ini": "^1.3.6",
"kind-of": "^6.0.3",
- "micromatch": "~4.0.8",
"node-notifier": "^8.0.1",
"nth-check": "^2.0.1",
"prismjs": "^1.21.0",
@@ -233,7 +234,7 @@
},
"dependencies": {
"@babel/core": "^7.24.0",
- "@hashicorp/design-system-components": "~4.13.0",
+ "@hashicorp/design-system-components": "~4.7.0",
"@hashicorp/ember-flight-icons": "^5.1.3",
"ember-auto-import": "^2.7.2",
"handlebars": "4.7.7",
diff --git a/ui/tests/acceptance/access/identity/_shared-tests.js b/ui/tests/acceptance/access/identity/_shared-tests.js
index 5659db01298d..124ea932d392 100644
--- a/ui/tests/acceptance/access/identity/_shared-tests.js
+++ b/ui/tests/acceptance/access/identity/_shared-tests.js
@@ -13,7 +13,7 @@ import { GENERAL } from 'vault/tests/helpers/general-selectors';
const SELECTORS = {
identityRow: (name) => `[data-test-identity-row="${name}"]`,
popupMenu: '[data-test-popup-menu-trigger]',
- menuDelete: (name) => `[data-test-identity-row="${name}"] [data-test-popup-menu="delete"]`,
+ menuDelete: '[data-test-popup-menu="delete"]',
};
export const testCRUD = async (name, itemType, assert) => {
await page.visit({ item_type: itemType });
@@ -36,8 +36,8 @@ export const testCRUD = async (name, itemType, assert) => {
);
await click(`${SELECTORS.identityRow(name)} ${SELECTORS.popupMenu}`);
- await waitUntil(() => find(SELECTORS.menuDelete(name)));
- await click(SELECTORS.menuDelete(name));
+ await waitUntil(() => find(SELECTORS.menuDelete));
+ await click(SELECTORS.menuDelete);
await indexPage.confirmDelete();
await settled();
assert.dom(GENERAL.latestFlashContent).includesText('Successfully deleted');
@@ -57,7 +57,7 @@ export const testDeleteFromForm = async (name, itemType, assert) => {
await click('[data-test-tab-subnav="policies"]');
assert.dom('.list-item-row').exists({ count: 1 }, 'One item is under policies');
await click('[data-test-tab-subnav="metadata"]');
- assert.dom('.info-table-row').hasTextContaining('hello goodbye', 'Metadata shows on tab');
+ assert.dom('.info-table-row').hasText('hello goodbye', 'Metadata shows on tab');
await showPage.edit();
assert.strictEqual(
currentRouteName(),
diff --git a/ui/tests/acceptance/access/methods-test.js b/ui/tests/acceptance/access/methods-test.js
index 72e31a150966..414d064acc89 100644
--- a/ui/tests/acceptance/access/methods-test.js
+++ b/ui/tests/acceptance/access/methods-test.js
@@ -3,17 +3,22 @@
* SPDX-License-Identifier: BUSL-1.1
*/
-import { currentRouteName, click, find, findAll, visit } from '@ember/test-helpers';
+import { currentRouteName, click } from '@ember/test-helpers';
import { clickTrigger } from 'ember-power-select/test-support/helpers';
import { module, test } from 'qunit';
import { setupApplicationTest } from 'ember-qunit';
import { setupMirage } from 'ember-cli-mirage/test-support';
+import { create } from 'ember-cli-page-object';
+import page from 'vault/tests/pages/access/methods';
+import authEnable from 'vault/tests/pages/settings/auth/enable';
+import authPage from 'vault/tests/pages/auth';
+import ss from 'vault/tests/pages/components/search-select';
+import consoleClass from 'vault/tests/pages/components/console/ui-panel';
+
import { v4 as uuidv4 } from 'uuid';
-import { GENERAL } from 'vault/tests/helpers/general-selectors';
-import { mountAuthCmd, runCmd } from 'vault/tests/helpers/commands';
-import { login } from 'vault/tests/helpers/auth/auth-helpers';
-const { searchSelect } = GENERAL;
+const consoleComponent = create(consoleClass);
+const searchSelect = create(ss);
module('Acceptance | auth-methods list view', function (hooks) {
setupApplicationTest(hooks);
@@ -21,13 +26,14 @@ module('Acceptance | auth-methods list view', function (hooks) {
hooks.beforeEach(function () {
this.uid = uuidv4();
- return login();
+ return authPage.login();
});
test('it navigates to auth method', async function (assert) {
- await visit('/vault/access/');
+ await page.visit();
assert.strictEqual(currentRouteName(), 'vault.cluster.access.methods', 'navigates to the correct route');
- assert.dom('[data-test-sidebar-nav-link="Authentication Methods"]').hasClass('active');
+ assert.ok(page.methodsLink.isActive, 'the first link is active');
+ assert.strictEqual(page.methodsLink.text, 'Authentication Methods');
});
test('it filters by name and auth type', async function (assert) {
@@ -35,52 +41,50 @@ module('Acceptance | auth-methods list view', function (hooks) {
const authPath1 = `userpass-1-${this.uid}`;
const authPath2 = `userpass-2-${this.uid}`;
const type = 'userpass';
- await visit('/vault/settings/auth/enable');
- await runCmd(mountAuthCmd(type, authPath1));
- await visit('/vault/settings/auth/enable');
- await runCmd(mountAuthCmd(type, authPath2));
- await visit('/vault/access/');
-
+ await authEnable.visit();
+ await authEnable.enable(type, authPath1);
+ await authEnable.visit();
+ await authEnable.enable(type, authPath2);
+ await page.visit();
// filter by auth type
+
await clickTrigger('#filter-by-auth-type');
- await click(searchSelect.option(searchSelect.optionIndex(type)));
- let rows = findAll('[data-test-auth-backend-link]');
+ await searchSelect.options.objectAt(0).click();
+ const rows = document.querySelectorAll('[data-test-auth-backend-link]');
const rowsUserpass = Array.from(rows).filter((row) => row.innerText.includes('userpass'));
assert.strictEqual(rows.length, rowsUserpass.length, 'all rows returned are userpass');
// filter by name
await clickTrigger('#filter-by-auth-name');
- await click(searchSelect.option());
- const selectedItem = find(`#filter-by-auth-name ${searchSelect.selectedOption()}`).innerText;
- const singleRow = findAll('[data-test-auth-backend-link]');
+ const firstItemToSelect = searchSelect.options.objectAt(0).text;
+ await searchSelect.options.objectAt(0).click();
+ const singleRow = document.querySelectorAll('[data-test-auth-backend-link]');
assert.strictEqual(singleRow.length, 1, 'returns only one row');
- assert.dom(singleRow[0]).includesText(selectedItem, 'shows the filtered by auth name');
- // clear filter by name
- await click(`#filter-by-auth-name ${searchSelect.removeSelected}`);
- rows = findAll('[data-test-auth-backend-link]');
- assert.true(rows.length > 1, 'filter has been removed');
+ assert.dom(singleRow[0]).includesText(firstItemToSelect, 'shows the filtered by auth name');
+ // clear filter by engine name
+ await searchSelect.deleteButtons.objectAt(1).click();
+ const rowsAgain = document.querySelectorAll('[data-test-auth-backend-link]');
+ assert.ok(rowsAgain.length > 1, 'filter has been removed');
// cleanup
- await runCmd(`delete sys/auth/${authPath1}`);
- await runCmd(`delete sys/auth/${authPath2}`);
+ await consoleComponent.runCommands([`delete sys/auth/${authPath1}`]);
+ await consoleComponent.runCommands([`delete sys/auth/${authPath2}`]);
});
test('it should show all methods in list view', async function (assert) {
- this.server.get('/sys/internal/ui/mounts', () => ({
+ this.server.get('/sys/auth', () => ({
data: {
- auth: {
- 'token/': { accessor: 'auth_token_263b8b4e', type: 'token' },
- 'userpass/': { accessor: 'auth_userpass_87aca1f8', type: 'userpass' },
- },
+ 'token/': { accessor: 'auth_token_263b8b4e', type: 'token' },
+ 'userpass/': { accessor: 'auth_userpass_87aca1f8', type: 'userpass' },
},
}));
- await visit('/vault/access/');
+ await page.visit();
assert.dom('[data-test-auth-backend-link]').exists({ count: 2 }, 'All auth methods appear in list view');
- await visit('/vault/settings/auth/enable');
+ await authEnable.visit();
await click('[data-test-sidebar-nav-link="OIDC Provider"]');
- await visit('/vault/access/');
+ await page.visit();
assert
.dom('[data-test-auth-backend-link]')
.exists({ count: 2 }, 'All auth methods appear in list view after navigating back');
diff --git a/ui/tests/acceptance/auth-list-test.js b/ui/tests/acceptance/auth-list-test.js
index ce0ff82c66e9..eb28ba685451 100644
--- a/ui/tests/acceptance/auth-list-test.js
+++ b/ui/tests/acceptance/auth-list-test.js
@@ -8,7 +8,7 @@ import { module, test } from 'qunit';
import { setupApplicationTest } from 'ember-qunit';
import { v4 as uuidv4 } from 'uuid';
-import { login, loginNs } from 'vault/tests/helpers/auth/auth-helpers';
+import authPage from 'vault/tests/pages/auth';
import enablePage from 'vault/tests/pages/settings/auth/enable';
import { supportedManagedAuthBackends } from 'vault/helpers/supported-managed-auth-backends';
import { deleteAuthCmd, mountAuthCmd, runCmd, createNS } from 'vault/tests/helpers/commands';
@@ -26,7 +26,7 @@ module('Acceptance | auth backend list', function (hooks) {
setupApplicationTest(hooks);
hooks.beforeEach(async function () {
- await login();
+ await authPage.login();
this.path1 = `userpass-${uuidv4()}`;
this.path2 = `userpass-${uuidv4()}`;
this.user1 = 'user1';
@@ -36,16 +36,16 @@ module('Acceptance | auth backend list', function (hooks) {
});
hooks.afterEach(async function () {
- await login();
+ await authPage.login();
await runCmd([deleteAuthCmd(this.path1), deleteAuthCmd(this.path2)], false);
return;
});
test('userpass secret backend', async function (assert) {
+ assert.expect(5);
// enable a user in first userpass backend
await visit('/vault/access');
await click(SELECTORS.backendLink(this.path1));
- assert.dom(GENERAL.emptyStateTitle).exists('shows empty state');
await click(SELECTORS.createUser);
await fillIn(GENERAL.inputByAttr('username'), this.user1);
await fillIn(GENERAL.inputByAttr('password'), this.user1);
@@ -57,12 +57,12 @@ module('Acceptance | auth backend list', function (hooks) {
// enable a user in second userpass backend
await click(SELECTORS.backendLink(this.path2));
- assert.dom(GENERAL.emptyStateTitle).exists('shows empty state');
await click(SELECTORS.createUser);
await fillIn(GENERAL.inputByAttr('username'), this.user2);
await fillIn(GENERAL.inputByAttr('password'), this.user2);
await click(SELECTORS.saveBtn);
assert.strictEqual(currentURL(), `/vault/access/${this.path2}/item/user`);
+
// Confirm that the user was created. There was a bug where the apiPath was not being updated when toggling between auth routes.
assert.dom(SELECTORS.listItem).hasText(this.user2, 'user2 exists in the list');
@@ -95,7 +95,7 @@ module('Acceptance | auth backend list', function (hooks) {
const itemCount = type === 'token' ? 2 : 3;
await click(`[data-test-auth-backend-link="${path}"] [data-test-popup-menu-trigger]`);
assert
- .dom(`[data-test-auth-backend-link="${path}"] .hds-dropdown-list-item`)
+ .dom('.hds-dropdown-list-item')
.exists({ count: itemCount }, `shows ${itemCount} dropdown items for ${type}`);
// all auth methods should be linkable
@@ -132,7 +132,7 @@ module('Acceptance | auth backend list', function (hooks) {
// Only SAML is enterprise-only for now
const type = 'saml';
const path = `auth-list-${type}-${uid}`;
- await runCmd([mountAuthCmd(type, path), 'refresh']);
+ await enablePage.enable(type, path);
await settled();
await visit('/vault/access');
@@ -150,7 +150,7 @@ module('Acceptance | auth backend list', function (hooks) {
const ns = 'ns-wxyz';
await runCmd(createNS(ns), false);
await settled();
- await loginNs(ns);
+ await authPage.loginNs(ns);
// go directly to token configure route
await visit('/vault/settings/auth/configure/token/options');
await fillIn('[data-test-input="description"]', 'My custom description');
diff --git a/ui/tests/acceptance/auth/test-helper.js b/ui/tests/acceptance/auth/test-helper.js
index 531530d0afe8..dc0c7b8fff45 100644
--- a/ui/tests/acceptance/auth/test-helper.js
+++ b/ui/tests/acceptance/auth/test-helper.js
@@ -3,10 +3,13 @@
* SPDX-License-Identifier: BUSL-1.1
*/
-import { click, currentURL } from '@ember/test-helpers';
+import { click, currentURL, fillIn } from '@ember/test-helpers';
import { GENERAL } from 'vault/tests/helpers/general-selectors';
-import { MOUNT_BACKEND_FORM } from 'vault/tests/helpers/components/mount-backend-form-selectors';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
+
+const SELECTORS = {
+ mountType: (name) => `[data-test-mount-type="${name}"]`,
+ submit: '[data-test-mount-submit]',
+};
const assertFields = (assert, fields, customSelectors = {}) => {
fields.forEach((param) => {
@@ -19,14 +22,16 @@ const assertFields = (assert, fields, customSelectors = {}) => {
};
export default (test) => {
test('it renders mount fields', async function (assert) {
- await click(MOUNT_BACKEND_FORM.mountType(this.type));
+ await click(SELECTORS.mountType(this.type));
await click(GENERAL.toggleGroup('Method Options'));
assertFields(assert, this.mountFields, this.customSelectors);
});
test('it renders tune fields', async function (assert) {
// enable auth method to check tune fields
- await mountBackend(this.type, this.path);
+ await click(SELECTORS.mountType(this.type));
+ await fillIn(GENERAL.inputByAttr('path'), this.path);
+ await click(SELECTORS.submit);
assert.strictEqual(
currentURL(),
`/vault/settings/auth/configure/${this.path}/configuration`,
diff --git a/ui/tests/acceptance/enterprise-kmip-test.js b/ui/tests/acceptance/enterprise-kmip-test.js
index 8d3eac31a8f5..a2f743227640 100644
--- a/ui/tests/acceptance/enterprise-kmip-test.js
+++ b/ui/tests/acceptance/enterprise-kmip-test.js
@@ -16,13 +16,12 @@ import {
import { module, test } from 'qunit';
import { setupApplicationTest } from 'ember-qunit';
-import { login } from 'vault/tests/helpers/auth/auth-helpers';
+import authPage from 'vault/tests/pages/auth';
import scopesPage from 'vault/tests/pages/secrets/backend/kmip/scopes';
import rolesPage from 'vault/tests/pages/secrets/backend/kmip/roles';
import credentialsPage from 'vault/tests/pages/secrets/backend/kmip/credentials';
import mountSecrets from 'vault/tests/pages/settings/mount-secret-backend';
import { GENERAL } from 'vault/tests/helpers/general-selectors';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
import { allEngines } from 'vault/helpers/mountable-secret-engines';
import { mountEngineCmd, runCmd } from 'vault/tests/helpers/commands';
import { v4 as uuidv4 } from 'uuid';
@@ -30,9 +29,18 @@ import { v4 as uuidv4 } from 'uuid';
// port has a lower limit of 1024
const getRandomPort = () => Math.floor(Math.random() * 5000 + 1024);
+const mount = async (backend) => {
+ const res = await runCmd(`write sys/mounts/${backend} type=kmip`);
+ await settled();
+ if (res.includes('Error')) {
+ throw new Error(`Error mounting secrets engine: ${res}`);
+ }
+ return backend;
+};
+
const mountWithConfig = async (backend) => {
const addr = `127.0.0.1:${getRandomPort()}`;
- await runCmd(mountEngineCmd('kmip', backend), false);
+ await mount(backend);
const res = await runCmd(`write ${backend}/config listen_addrs=${addr}`);
if (res.includes('Error')) {
throw new Error(`Error configuring KMIP: ${res}`);
@@ -83,7 +91,7 @@ module('Acceptance | Enterprise | KMIP secrets', function (hooks) {
hooks.beforeEach(async function () {
this.backend = `kmip-${uuidv4()}`;
- await login();
+ await authPage.login();
return;
});
@@ -97,8 +105,8 @@ module('Acceptance | Enterprise | KMIP secrets', function (hooks) {
const engine = allEngines().find((e) => e.type === 'kmip');
await mountSecrets.visit();
- await mountBackend(engine.type, `${engine.type}-${uuidv4()}`);
-
+ await mountSecrets.selectType(engine.type);
+ await mountSecrets.path(this.backend).submit();
assert.strictEqual(
currentRouteName(),
`vault.cluster.secrets.backend.${engine.engineRoute}`,
@@ -108,8 +116,7 @@ module('Acceptance | Enterprise | KMIP secrets', function (hooks) {
});
test('it can configure a KMIP secrets engine', async function (assert) {
- await runCmd(mountEngineCmd('kmip', this.backend));
- const backend = this.backend;
+ const backend = await mount(this.backend);
await scopesPage.visit({ backend });
await settled();
await scopesPage.configurationLink();
@@ -214,11 +221,16 @@ module('Acceptance | Enterprise | KMIP secrets', function (hooks) {
test('it can create a role', async function (assert) {
// moving create scope here to help with flaky test
- const scope = `scope-for-can-create-role`;
- const role = `role-new-role`;
const backend = await mountWithConfig(this.backend);
await settled();
- await runCmd([`write ${backend}/scope/${scope} -force`], true);
+ const scope = `scope-for-can-create-role`;
+ await settled();
+ const res = await runCmd([`write ${backend}/scope/${scope} -force`]);
+ await settled();
+ if (res.includes('Error')) {
+ throw new Error(`Error creating scope: ${res}`);
+ }
+ const role = `role-new-role`;
await rolesPage.visit({ backend, scope });
await settled();
assert.ok(rolesPage.isEmpty, 'renders the empty role page');
@@ -229,15 +241,11 @@ module('Acceptance | Enterprise | KMIP secrets', function (hooks) {
`/vault/secrets/${backend}/kmip/scopes/${scope}/roles/create`,
'links to the role create form'
);
- // check that the role form looks right
- assert.dom(GENERAL.inputByAttr('operationNone')).isChecked('allows role to perform roles by default');
- assert.dom(GENERAL.inputByAttr('operationAll')).isChecked('operationAll is checked by default');
- assert.dom('[data-test-kmip-section]').exists({ count: 2 });
- assert.dom('[data-test-kmip-operations]').exists({ count: 4 });
await rolesPage.roleName(role);
await settled();
- await click(GENERAL.saveButton);
+ await rolesPage.submit();
+ await settled();
assert.strictEqual(
currentURL(),
`/vault/secrets/${backend}/kmip/scopes/${scope}/roles`,
@@ -245,13 +253,6 @@ module('Acceptance | Enterprise | KMIP secrets', function (hooks) {
);
assert.strictEqual(rolesPage.listItemLinks.length, 1, 'renders a single role');
- await rolesPage.visitDetail({ backend, scope, role });
- // check that the role details looks right
- assert.dom('h2').exists({ count: 2 }, 'renders correct section headings');
- assert.dom('[data-test-inline-error-message]').hasText('This role allows all KMIP operations');
- ['Managed Cryptographic Objects', 'Object Attributes', 'Server', 'Other'].forEach((title) => {
- assert.dom(`[data-test-row-label="${title}"]`).exists(`Renders allowed operations row for: ${title}`);
- });
});
test('it navigates to kmip roles view using breadcrumbs', async function (assert) {
@@ -303,7 +304,8 @@ module('Acceptance | Enterprise | KMIP secrets', function (hooks) {
`/vault/secrets/${backend}/kmip/scopes/${scope}/roles/${role}/edit`,
'navigates to role edit'
);
- await click(GENERAL.cancelButton);
+ await rolesPage.cancelLink();
+ await settled();
assert.strictEqual(
currentURL(),
`/vault/secrets/${backend}/kmip/scopes/${scope}/roles/${role}`,
@@ -362,12 +364,12 @@ module('Acceptance | Enterprise | KMIP secrets', function (hooks) {
this.store = this.owner.lookup('service:store');
this.scope = 'my-scope';
this.name = 'my-role';
- await login();
+ await authPage.login();
await runCmd(mountEngineCmd('kmip', this.backend), false);
await runCmd([`write ${this.backend}/scope/${this.scope} -force`]);
await rolesPage.visit({ backend: this.backend, scope: this.scope });
this.setModel = async () => {
- await click(GENERAL.saveButton);
+ await click('[data-test-edit-form-submit]');
await visit(`/vault/secrets/${this.backend}/kmip/scopes/${this.scope}/roles/${this.name}`);
this.model = this.store.peekRecord('kmip/role', this.name);
};
@@ -380,7 +382,7 @@ module('Acceptance | Enterprise | KMIP secrets', function (hooks) {
assert.expect(3);
await click('[data-test-role-create]');
- await fillIn(GENERAL.inputByAttr('role'), this.name);
+ await fillIn(GENERAL.inputByAttr('name'), this.name);
assert.dom(GENERAL.inputByAttr('operationAll')).isChecked('operationAll is checked by default');
await this.setModel();
assert.true(this.model.operationAll, 'operationAll is true');
@@ -391,7 +393,7 @@ module('Acceptance | Enterprise | KMIP secrets', function (hooks) {
assert.expect(4);
await click('[data-test-role-create]');
- await fillIn(GENERAL.inputByAttr('role'), this.name);
+ await fillIn(GENERAL.inputByAttr('name'), this.name);
await click(GENERAL.inputByAttr('operationNone'));
assert
.dom(GENERAL.inputByAttr('operationNone'))
@@ -408,10 +410,9 @@ module('Acceptance | Enterprise | KMIP secrets', function (hooks) {
assert.expect(2);
await click('[data-test-role-create]');
- await fillIn(GENERAL.inputByAttr('role'), this.name);
+ await fillIn(GENERAL.inputByAttr('name'), this.name);
await click(GENERAL.inputByAttr('operationAll'));
await this.setModel();
-
assert.strictEqual(this.model.operationAll, undefined, 'operationAll is unset');
assert.true(this.model.operationNone, 'operationNone is true');
});
@@ -420,7 +421,7 @@ module('Acceptance | Enterprise | KMIP secrets', function (hooks) {
assert.expect(6);
await click('[data-test-role-create]');
- await fillIn(GENERAL.inputByAttr('role'), this.name);
+ await fillIn(GENERAL.inputByAttr('name'), this.name);
await click(GENERAL.inputByAttr('operationAll'));
await click(GENERAL.inputByAttr('operationGet'));
await click(GENERAL.inputByAttr('operationGetAttributes'));
diff --git a/ui/tests/acceptance/enterprise-kmse-test.js b/ui/tests/acceptance/enterprise-kmse-test.js
index 19dc08342870..c1f503b81378 100644
--- a/ui/tests/acceptance/enterprise-kmse-test.js
+++ b/ui/tests/acceptance/enterprise-kmse-test.js
@@ -10,7 +10,6 @@ import authPage from 'vault/tests/pages/auth';
import mountSecrets from 'vault/tests/pages/settings/mount-secret-backend';
import { setupMirage } from 'ember-cli-mirage/test-support';
import { allEngines } from 'vault/helpers/mountable-secret-engines';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
import { runCmd } from '../helpers/commands';
module('Acceptance | Enterprise | keymgmt', function (hooks) {
@@ -28,7 +27,9 @@ module('Acceptance | Enterprise | keymgmt', function (hooks) {
// delete any previous mount with same name
await runCmd([`delete sys/mounts/${engine.type}`]);
await mountSecrets.visit();
- await mountBackend(engine.type, engine.type);
+ await mountSecrets.selectType(engine.type);
+ await mountSecrets.path(engine.type);
+ await mountSecrets.submit();
assert.strictEqual(
currentRouteName(),
diff --git a/ui/tests/acceptance/enterprise-transform-test.js b/ui/tests/acceptance/enterprise-transform-test.js
index 37347a89a981..18d370241455 100644
--- a/ui/tests/acceptance/enterprise-transform-test.js
+++ b/ui/tests/acceptance/enterprise-transform-test.js
@@ -5,7 +5,7 @@
import { module, test } from 'qunit';
import { setupApplicationTest } from 'ember-qunit';
-import { currentURL, click, settled, currentRouteName, visit } from '@ember/test-helpers';
+import { currentURL, click, settled, currentRouteName } from '@ember/test-helpers';
import { create } from 'ember-cli-page-object';
import { selectChoose } from 'ember-power-select/test-support';
import { typeInSearch, clickTrigger } from 'ember-power-select/test-support/helpers';
@@ -19,11 +19,16 @@ import alphabetsPage from 'vault/tests/pages/secrets/backend/transform/alphabets
import searchSelect from 'vault/tests/pages/components/search-select';
import { runCmd } from '../helpers/commands';
import { allEngines } from 'vault/helpers/mountable-secret-engines';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
-import { v4 as uuidv4 } from 'uuid';
const searchSelectComponent = create(searchSelect);
+const mount = async () => {
+ const path = `transform-${Date.now()}`;
+ await mountSecrets.enable('transform', path);
+ await settled();
+ return path;
+};
+
const newTransformation = async (backend, name, submit = false) => {
const transformationName = name || 'foo';
await transformationsPage.visitCreate({ backend });
@@ -70,7 +75,9 @@ module('Acceptance | Enterprise | Transform secrets', function (hooks) {
// delete any previous mount with same name
await runCmd([`delete sys/mounts/${engine.type}`]);
await mountSecrets.visit();
- await mountBackend(engine.type, engine.type);
+ await mountSecrets.selectType(engine.type);
+ await mountSecrets.path(engine.type);
+ await mountSecrets.submit();
assert.strictEqual(
currentRouteName(),
@@ -101,9 +108,7 @@ module('Acceptance | Enterprise | Transform secrets', function (hooks) {
});
test('it can create a transformation and add itself to the role attached', async function (assert) {
- await visit('/vault/settings/mount-secret-backend');
- const backend = `transform-${uuidv4()}`;
- await mountBackend('transform', backend);
+ const backend = await mount();
const transformationName = 'foo';
const roleName = 'foo-role';
await settled();
@@ -153,9 +158,7 @@ module('Acceptance | Enterprise | Transform secrets', function (hooks) {
test('it can create a role and add itself to the transformation attached', async function (assert) {
const roleName = 'my-role';
- await visit('/vault/settings/mount-secret-backend');
- const backend = `transform-${uuidv4()}`;
- await mountBackend('transform', backend);
+ const backend = await mount();
// create transformation without role
await newTransformation(backend, 'a-transformation', true);
await click(`[data-test-secret-breadcrumb="${backend}"] a`);
@@ -194,9 +197,7 @@ module('Acceptance | Enterprise | Transform secrets', function (hooks) {
test('it adds a role to a transformation when added to a role', async function (assert) {
const roleName = 'role-test';
- await visit('/vault/settings/mount-secret-backend');
- const backend = `transform-${uuidv4()}`;
- await mountBackend('transform', backend);
+ const backend = await mount();
const transformation = await newTransformation(backend, 'b-transformation', true);
await newRole(backend, roleName);
await transformationsPage.visitShow({ backend, id: transformation });
@@ -206,9 +207,7 @@ module('Acceptance | Enterprise | Transform secrets', function (hooks) {
test('it shows a message if an update fails after save', async function (assert) {
const roleName = 'role-remove';
- await visit('/vault/settings/mount-secret-backend');
- const backend = `transform-${uuidv4()}`;
- await mountBackend('transform', backend);
+ const backend = await mount();
// Create transformation
const transformation = await newTransformation(backend, 'c-transformation', true);
// create role
@@ -245,9 +244,7 @@ module('Acceptance | Enterprise | Transform secrets', function (hooks) {
test('it allows creation and edit of a template', async function (assert) {
const templateName = 'my-template';
- await visit('/vault/settings/mount-secret-backend');
- const backend = `transform-${uuidv4()}`;
- await mountBackend('transform', backend);
+ const backend = await mount();
await click('[data-test-secret-list-tab="Templates"]');
assert.strictEqual(
@@ -289,9 +286,7 @@ module('Acceptance | Enterprise | Transform secrets', function (hooks) {
test('it allows creation and edit of an alphabet', async function (assert) {
const alphabetName = 'vowels-only';
- await visit('/vault/settings/mount-secret-backend');
- const backend = `transform-${uuidv4()}`;
- await mountBackend('transform', backend);
+ const backend = await mount();
await click('[data-test-secret-list-tab="Alphabets"]');
assert.strictEqual(
diff --git a/ui/tests/acceptance/mfa-method-test.js b/ui/tests/acceptance/mfa-method-test.js
index 24be2f4821ae..499b1d028515 100644
--- a/ui/tests/acceptance/mfa-method-test.js
+++ b/ui/tests/acceptance/mfa-method-test.js
@@ -214,12 +214,10 @@ module('Acceptance | mfa-method', function (hooks) {
'Route transitions to method on save'
);
await click('[data-test-tab="enforcements"]');
- assert.dom('[data-test-list-item]').hasTextContaining('bar', 'Enforcement is listed in method view');
+ assert.dom('[data-test-list-item]').hasText('bar', 'Enforcement is listed in method view');
await click('[data-test-sidebar-nav-link="Multi-Factor Authentication"]');
await click('[data-test-tab="enforcements"]');
- assert
- .dom('[data-test-list-item="bar"]')
- .hasTextContaining('bar', 'Enforcement is listed in enforcements view');
+ assert.dom('[data-test-list-item="bar"]').hasText('bar', 'Enforcement is listed in enforcements view');
await click('[data-test-list-item="bar"]');
await click('[data-test-tab="methods"]');
assert
@@ -244,7 +242,7 @@ module('Acceptance | mfa-method', function (hooks) {
'Route transitions to method on save'
);
await click('[data-test-tab="enforcements"]');
- assert.dom('[data-test-list-item]').hasTextContaining(name, 'Enforcement is listed in method view');
+ assert.dom('[data-test-list-item]').hasText(name, 'Enforcement is listed in method view');
});
test('it should edit methods', async function (assert) {
diff --git a/ui/tests/acceptance/oidc-config/clients-test.js b/ui/tests/acceptance/oidc-config/clients-test.js
index adbb0dbdd6ed..775b2780e277 100644
--- a/ui/tests/acceptance/oidc-config/clients-test.js
+++ b/ui/tests/acceptance/oidc-config/clients-test.js
@@ -72,7 +72,7 @@ module('Acceptance | oidc-config clients', function (hooks) {
assert.dom('[data-test-tab="keys"]').hasClass('active', 'keys tab is active');
assert.strictEqual(currentRouteName(), 'vault.cluster.access.oidc.keys.index');
assert
- .dom('[data-test-oidc-key-linked-block="default"] [data-test-item]')
+ .dom('[data-test-oidc-key-linked-block="default"]')
.hasText('default', 'index page lists default key');
// navigate to default key details from pop-up menu
@@ -132,7 +132,7 @@ module('Acceptance | oidc-config clients', function (hooks) {
// edit key and limit applications
await visit(OIDC_BASE_URL + '/keys');
await click('[data-test-oidc-key-linked-block="test-key"] [data-test-popup-menu-trigger]');
- await click('[data-test-oidc-key-linked-block="test-key"] [data-test-oidc-key-menu-link="edit"]');
+ await click('[data-test-oidc-key-menu-link="edit"]');
assert.strictEqual(
currentRouteName(),
'vault.cluster.access.oidc.keys.key.edit',
diff --git a/ui/tests/acceptance/pki/pki-engine-workflow-test.js b/ui/tests/acceptance/pki/pki-engine-workflow-test.js
index 129441e16fb3..c9df281b8f1c 100644
--- a/ui/tests/acceptance/pki/pki-engine-workflow-test.js
+++ b/ui/tests/acceptance/pki/pki-engine-workflow-test.js
@@ -307,15 +307,11 @@ module('Acceptance | pki workflow', function (hooks) {
await visit(`/vault/secrets/${this.mountPath}/pki/keys`);
await click(PKI_KEYS.generateKey);
assert.strictEqual(currentURL(), `/vault/secrets/${this.mountPath}/pki/keys/create`);
- await fillIn(GENERAL.inputByAttr('type'), 'exported'); // exported keys generated private_key data
+ await fillIn(GENERAL.inputByAttr('type'), 'exported');
await fillIn(GENERAL.inputByAttr('keyType'), 'rsa');
await click(GENERAL.saveButton);
keyId = find(GENERAL.infoRowValue('Key ID')).textContent?.trim();
- assert.strictEqual(
- currentURL(),
- `/vault/secrets/${this.mountPath}/pki/keys/create`,
- 'it does not transition to details private_key data exists'
- );
+ assert.strictEqual(currentURL(), `/vault/secrets/${this.mountPath}/pki/keys/${keyId}/details`);
assert
.dom(PKI_KEYS.nextStepsAlert)
diff --git a/ui/tests/acceptance/raft-storage-test.js b/ui/tests/acceptance/raft-storage-test.js
index 96b2abb427dc..2eee76c78e3a 100644
--- a/ui/tests/acceptance/raft-storage-test.js
+++ b/ui/tests/acceptance/raft-storage-test.js
@@ -64,11 +64,10 @@ module('Acceptance | raft storage', function (hooks) {
return {};
});
- const row = '[data-raft-row]:nth-child(2) [data-test-raft-actions]';
await visit('/vault/storage/raft');
assert.dom('[data-raft-row]').exists({ count: 2 }, '2 raft peers render in table');
- await click(`${row} button`);
- await click(`${row} [data-test-confirm-action-trigger]`);
+ await click('[data-raft-row]:nth-child(2) [data-test-raft-actions] button');
+ await click('[data-test-confirm-action-trigger]');
await click('[data-test-confirm-button]');
assert.dom('[data-raft-row]').exists({ count: 1 }, 'Raft peer successfully removed');
});
diff --git a/ui/tests/acceptance/secrets/backend/alicloud/secret-test.js b/ui/tests/acceptance/secrets/backend/alicloud/secret-test.js
index 975948383aa2..e1f5114e7c22 100644
--- a/ui/tests/acceptance/secrets/backend/alicloud/secret-test.js
+++ b/ui/tests/acceptance/secrets/backend/alicloud/secret-test.js
@@ -11,7 +11,6 @@ import { v4 as uuidv4 } from 'uuid';
import mountSecrets from 'vault/tests/pages/settings/mount-secret-backend';
import backendsPage from 'vault/tests/pages/secrets/backends';
import authPage from 'vault/tests/pages/auth';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
module('Acceptance | alicloud/enable', function (hooks) {
setupApplicationTest(hooks);
@@ -25,7 +24,10 @@ module('Acceptance | alicloud/enable', function (hooks) {
const enginePath = `alicloud-${this.uid}`;
await mountSecrets.visit();
await settled();
- await mountBackend('alicloud', enginePath);
+ await mountSecrets.selectType('alicloud');
+ await settled();
+ await mountSecrets.path(enginePath).submit();
+ await settled();
assert.strictEqual(
currentRouteName(),
diff --git a/ui/tests/acceptance/secrets/backend/aws/aws-configuration-test.js b/ui/tests/acceptance/secrets/backend/aws/aws-configuration-test.js
index 1f11e3f84cc8..605bd26b3c1c 100644
--- a/ui/tests/acceptance/secrets/backend/aws/aws-configuration-test.js
+++ b/ui/tests/acceptance/secrets/backend/aws/aws-configuration-test.js
@@ -16,7 +16,6 @@ import { runCmd } from 'vault/tests/helpers/commands';
import { GENERAL } from 'vault/tests/helpers/general-selectors';
import { overrideResponse } from 'vault/tests/helpers/stubs';
import { SECRET_ENGINE_SELECTORS as SES } from 'vault/tests/helpers/secret-engine/secret-engine-selectors';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
import {
createConfig,
expectedConfigKeys,
@@ -47,7 +46,9 @@ module('Acceptance | aws | configuration', function (hooks) {
const path = `aws-${this.uid}`;
// in this test go through the full mount process. Bypass this step in later tests.
await visit('/vault/settings/mount-secret-backend');
- await mountBackend('aws', path);
+ await click(SES.mountType('aws'));
+ await fillIn(GENERAL.inputByAttr('path'), path);
+ await click(SES.mountSubmit);
await click(SES.configTab);
assert.dom(GENERAL.emptyStateTitle).hasText('AWS not configured');
assert.dom(GENERAL.emptyStateActions).hasText('Configure AWS');
diff --git a/ui/tests/acceptance/secrets/backend/engines-test.js b/ui/tests/acceptance/secrets/backend/engines-test.js
index dea147fc539b..79b657b77c93 100644
--- a/ui/tests/acceptance/secrets/backend/engines-test.js
+++ b/ui/tests/acceptance/secrets/backend/engines-test.js
@@ -3,55 +3,69 @@
* SPDX-License-Identifier: BUSL-1.1
*/
-import { click, find, findAll, currentRouteName, visit } from '@ember/test-helpers';
+import { currentRouteName, settled } from '@ember/test-helpers';
import { clickTrigger } from 'ember-power-select/test-support/helpers';
+import { create } from 'ember-cli-page-object';
import { module, test } from 'qunit';
+import consoleClass from 'vault/tests/pages/components/console/ui-panel';
import { setupApplicationTest } from 'ember-qunit';
import { v4 as uuidv4 } from 'uuid';
-import { GENERAL } from 'vault/tests/helpers/general-selectors';
-import { deleteEngineCmd, mountEngineCmd, runCmd } from 'vault/tests/helpers/commands';
-import { login } from 'vault/tests/helpers/auth/auth-helpers';
+import mountSecrets from 'vault/tests/pages/settings/mount-secret-backend';
+import backendsPage from 'vault/tests/pages/secrets/backends';
+import authPage from 'vault/tests/pages/auth';
+import ss from 'vault/tests/pages/components/search-select';
-const SELECTORS = {
- backendLink: (path) =>
- path ? `[data-test-secrets-backend-link="${path}"]` : '[data-test-secrets-backend-link]',
-};
+const consoleComponent = create(consoleClass);
+const searchSelect = create(ss);
module('Acceptance | secret-engine list view', function (hooks) {
setupApplicationTest(hooks);
hooks.beforeEach(function () {
this.uid = uuidv4();
- return login();
+ return authPage.login();
});
test('it allows you to disable an engine', async function (assert) {
// first mount an engine so we can disable it.
const enginePath = `alicloud-disable-${this.uid}`;
- await runCmd(mountEngineCmd('alicloud', enginePath));
- await visit('/vault/secrets');
- assert.dom(SELECTORS.backendLink(enginePath)).exists();
- const row = SELECTORS.backendLink(enginePath);
- await click(`${row} ${GENERAL.menuTrigger}`);
- await click(`${row} ${GENERAL.confirmTrigger}`);
- await click(GENERAL.confirmButton);
+ await mountSecrets.enable('alicloud', enginePath);
+ await settled();
+ assert.ok(backendsPage.rows.filterBy('path', `${enginePath}/`)[0], 'shows the mounted engine');
+
+ await backendsPage.visit();
+ await settled();
+ const row = backendsPage.rows.filterBy('path', `${enginePath}/`)[0];
+ await row.menu();
+ await settled();
+ await backendsPage.disableButton();
+ await settled();
+ await backendsPage.confirmDisable();
+ await settled();
assert.strictEqual(
currentRouteName(),
'vault.cluster.secrets.backends',
'redirects to the backends page'
);
- assert.dom(SELECTORS.backendLink(enginePath)).doesNotExist('does not show the disabled engine');
+ assert.strictEqual(
+ backendsPage.rows.filterBy('path', `${enginePath}/`).length,
+ 0,
+ 'does not show the disabled engine'
+ );
});
test('it adds disabled css styling to unsupported secret engines', async function (assert) {
assert.expect(2);
// first mount engine that is not supported
const enginePath = `nomad-${this.uid}`;
- await runCmd(mountEngineCmd('nomad', enginePath));
- await visit('/vault/secrets');
- const rows = findAll(SELECTORS.backendLink());
+ await mountSecrets.enable('nomad', enginePath);
+ await settled();
+ await backendsPage.visit();
+ await settled();
+
+ const rows = document.querySelectorAll('[data-test-secrets-backend-link]');
const rowUnsupported = Array.from(rows).filter((row) => row.innerText.includes('nomad'));
const rowSupported = Array.from(rows).filter((row) => row.innerText.includes('cubbyhole'));
assert
@@ -63,7 +77,7 @@ module('Acceptance | secret-engine list view', function (hooks) {
assert.dom(rowSupported[0]).hasClass('linked-block', `linked-block class is added to supported engines.`);
// cleanup
- await runCmd(deleteEngineCmd(enginePath));
+ await consoleComponent.runCommands([`delete sys/mounts/${enginePath}`]);
});
test('it filters by name and engine type', async function (assert) {
@@ -71,31 +85,32 @@ module('Acceptance | secret-engine list view', function (hooks) {
const enginePath1 = `aws-1-${this.uid}`;
const enginePath2 = `aws-2-${this.uid}`;
- await await runCmd(mountEngineCmd('aws', enginePath1));
- await await runCmd(mountEngineCmd('aws', enginePath2));
- await visit('/vault/secrets');
+ await mountSecrets.enable('aws', enginePath1);
+ await mountSecrets.enable('aws', enginePath2);
+ await backendsPage.visit();
+ await settled();
// filter by type
await clickTrigger('#filter-by-engine-type');
- await click(GENERAL.searchSelect.option());
+ await searchSelect.options.objectAt(0).click();
- const rows = findAll(SELECTORS.backendLink());
+ const rows = document.querySelectorAll('[data-test-secrets-backend-link]');
const rowsAws = Array.from(rows).filter((row) => row.innerText.includes('aws'));
assert.strictEqual(rows.length, rowsAws.length, 'all rows returned are aws');
// filter by name
await clickTrigger('#filter-by-engine-name');
- const firstItemToSelect = find(GENERAL.searchSelect.option()).innerText;
- await click(GENERAL.searchSelect.option());
+ const firstItemToSelect = searchSelect.options.objectAt(0).text;
+ await searchSelect.options.objectAt(0).click();
const singleRow = document.querySelectorAll('[data-test-secrets-backend-link]');
assert.strictEqual(singleRow.length, 1, 'returns only one row');
assert.dom(singleRow[0]).includesText(firstItemToSelect, 'shows the filtered by name engine');
// clear filter by engine name
- await click(`#filter-by-engine-name ${GENERAL.searchSelect.removeSelected}`);
+ await searchSelect.deleteButtons.objectAt(1).click();
const rowsAgain = document.querySelectorAll('[data-test-secrets-backend-link]');
assert.ok(rowsAgain.length > 1, 'filter has been removed');
// cleanup
- await runCmd(deleteEngineCmd(enginePath1));
- await runCmd(deleteEngineCmd(enginePath2));
+ await consoleComponent.runCommands([`delete sys/mounts/${enginePath1}`]);
+ await consoleComponent.runCommands([`delete sys/mounts/${enginePath2}`]);
});
});
diff --git a/ui/tests/acceptance/secrets/backend/gcpkms/secrets-test.js b/ui/tests/acceptance/secrets/backend/gcpkms/secrets-test.js
index efe0221581e6..fba46e0bdd1a 100644
--- a/ui/tests/acceptance/secrets/backend/gcpkms/secrets-test.js
+++ b/ui/tests/acceptance/secrets/backend/gcpkms/secrets-test.js
@@ -11,7 +11,6 @@ import { v4 as uuidv4 } from 'uuid';
import mountSecrets from 'vault/tests/pages/settings/mount-secret-backend';
import backendsPage from 'vault/tests/pages/secrets/backends';
import authPage from 'vault/tests/pages/auth';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
module('Acceptance | gcpkms/enable', function (hooks) {
setupApplicationTest(hooks);
@@ -26,8 +25,9 @@ module('Acceptance | gcpkms/enable', function (hooks) {
const enginePath = `gcpkms-${this.uid}`;
await mountSecrets.visit();
await settled();
- await mountBackend('gcpkms', enginePath);
-
+ await mountSecrets.selectType('gcpkms');
+ await mountSecrets.path(enginePath).submit();
+ await settled();
assert.strictEqual(
currentRouteName(),
'vault.cluster.secrets.backends',
diff --git a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-create-test.js b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-create-test.js
index 3615445d7390..606267b14fa1 100644
--- a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-create-test.js
+++ b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-create-test.js
@@ -48,17 +48,17 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
const backend = this.backend;
await visit(`/vault/secrets/${backend}/kv/list`);
assert.dom(PAGE.list.item()).exists({ count: 1 }, 'single secret exists on list');
- assert.dom(`${PAGE.list.item('app/')} [data-test-path]`).hasText('app/', 'expected list item');
+ assert.dom(PAGE.list.item('app/')).hasText('app/', 'expected list item');
await click(PAGE.list.createSecret);
await fillIn(FORM.inputByAttr('path'), 'jk');
await click(FORM.cancelBtn);
assert.dom(PAGE.list.item()).exists({ count: 1 }, 'same amount of secrets');
- assert.dom(`${PAGE.list.item('app/')} [data-test-path]`).hasText('app/', 'expected list item');
+ assert.dom(PAGE.list.item('app/')).hasText('app/', 'expected list item');
await click(PAGE.list.createSecret);
await fillIn(FORM.inputByAttr('path'), 'psych');
await click(PAGE.breadcrumbAtIdx(1));
assert.dom(PAGE.list.item()).exists({ count: 1 }, 'same amount of secrets');
- assert.dom(`${PAGE.list.item('app/')} [data-test-path]`).hasText('app/', 'expected list item');
+ assert.dom(PAGE.list.item('app/')).hasText('app/', 'expected list item');
});
test('cancel on new version rolls back model (a)', async function (assert) {
const backend = this.backend;
@@ -500,17 +500,17 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
const backend = this.backend;
await visit(`/vault/secrets/${backend}/kv/list`);
assert.dom(PAGE.list.item()).exists({ count: 1 }, 'single secret exists on list');
- assert.dom(`${PAGE.list.item('app/')} [data-test-path]`).hasText('app/', 'expected list item');
+ assert.dom(PAGE.list.item('app/')).hasText('app/', 'expected list item');
await click(PAGE.list.createSecret);
await fillIn(FORM.inputByAttr('path'), 'jk');
await click(FORM.cancelBtn);
assert.dom(PAGE.list.item()).exists({ count: 1 }, 'same amount of secrets');
- assert.dom(`${PAGE.list.item('app/')} [data-test-path]`).hasText('app/', 'expected list item');
+ assert.dom(PAGE.list.item('app/')).hasText('app/', 'expected list item');
await click(PAGE.list.createSecret);
await fillIn(FORM.inputByAttr('path'), 'psych');
await click(PAGE.breadcrumbAtIdx(1));
assert.dom(PAGE.list.item()).exists({ count: 1 }, 'same amount of secrets');
- assert.dom(`${PAGE.list.item('app/')} [data-test-path]`).hasText('app/', 'expected list item');
+ assert.dom(PAGE.list.item('app/')).hasText('app/', 'expected list item');
});
test('cancel on new version rolls back model (dlr)', async function (assert) {
const backend = this.backend;
@@ -649,17 +649,17 @@ module('Acceptance | kv-v2 workflow | secret and version create', function (hook
const backend = this.backend;
await visit(`/vault/secrets/${backend}/kv/list`);
assert.dom(PAGE.list.item()).exists({ count: 1 }, 'single secret exists on list');
- assert.dom(`${PAGE.list.item('app/')} [data-test-path]`).hasText('app/', 'expected list item');
+ assert.dom(PAGE.list.item('app/')).hasText('app/', 'expected list item');
await click(PAGE.list.createSecret);
await fillIn(FORM.inputByAttr('path'), 'jk');
await click(FORM.cancelBtn);
assert.dom(PAGE.list.item()).exists({ count: 1 }, 'same amount of secrets');
- assert.dom(`${PAGE.list.item('app/')} [data-test-path]`).hasText('app/', 'expected list item');
+ assert.dom(PAGE.list.item('app/')).hasText('app/', 'expected list item');
await click(PAGE.list.createSecret);
await fillIn(FORM.inputByAttr('path'), 'psych');
await click(PAGE.breadcrumbAtIdx(1));
assert.dom(PAGE.list.item()).exists({ count: 1 }, 'same amount of secrets');
- assert.dom(`${PAGE.list.item('app/')} [data-test-path]`).hasText('app/', 'expected list item');
+ assert.dom(PAGE.list.item('app/')).hasText('app/', 'expected list item');
});
test('cancel on new version rolls back model (mm)', async function (assert) {
const backend = this.backend;
diff --git a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-edge-cases-test.js b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-edge-cases-test.js
index 9ed16d8f00d7..200b76d48092 100644
--- a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-edge-cases-test.js
+++ b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-edge-cases-test.js
@@ -299,34 +299,22 @@ module('Acceptance | kv-v2 workflow | edge cases', function (hooks) {
codemirror().getValue(),
`{
\"\": \"\"
-}`,
- 'JSON editor displays correct empty object'
+}`
);
codemirror().setValue('{ "foo3": { "name": "bar3" } }');
await click(FORM.saveBtn);
// Details view
await click(PAGE.secretTab('Secret'));
- assert.dom(FORM.toggleJson).isNotDisabled('JSON toggle is not disabled');
- assert.dom(FORM.toggleJson).isChecked("JSON toggle is checked 'on'");
-
- assert
- .dom(GENERAL.codeBlock('secret-data'))
- .hasText('Version data { "foo3": { "name": "bar3" } }', 'Values are displayed in the details view');
+ assert.dom(FORM.toggleJson).isNotDisabled();
+ assert.dom(FORM.toggleJson).isChecked();
+ assert.false(codemirror().getValue().includes('*'), 'Values are not obscured on details view');
// New version view
await click(PAGE.detail.createNewVersion);
assert.dom(FORM.toggleJson).isNotDisabled();
assert.dom(FORM.toggleJson).isChecked();
- assert.deepEqual(
- codemirror().getValue(),
- `{
- "foo3": {
- "name": "bar3"
- }
-}`,
- 'Values are displayed in the new version view'
- );
+ assert.false(codemirror().getValue().includes('*'), 'Values are not obscured on edit view');
});
test('on enter the JSON editor cursor goes to the next line', async function (assert) {
@@ -371,16 +359,12 @@ module('Acceptance | kv-v2 workflow | edge cases', function (hooks) {
await click(PAGE.secretTab('Secret'));
await click(PAGE.detail.versionDropdown);
await click(`${PAGE.detail.version(1)} a`);
- assert
- .dom(GENERAL.codeBlock('secret-data'))
- .hasText(`Version data ${expectedDataV1}`, 'Version one data is displayed');
+ assert.strictEqual(codemirror().getValue(), expectedDataV1, 'Version one data is displayed');
// Navigate back the second version and make sure the secret data is correct
await click(PAGE.detail.versionDropdown);
await click(`${PAGE.detail.version(2)} a`);
- assert
- .dom(GENERAL.codeBlock('secret-data'))
- .hasText(`Version data ${expectedDataV2}`, 'Version two data is displayed');
+ assert.strictEqual(codemirror().getValue(), expectedDataV2, 'Version two data is displayed');
});
test('does not register as advanced when value includes {', async function (assert) {
diff --git a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-navigation-test.js b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-navigation-test.js
index e1ac5dc932a7..25520e2f7380 100644
--- a/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-navigation-test.js
+++ b/ui/tests/acceptance/secrets/backend/kv/kv-v2-workflow-navigation-test.js
@@ -141,16 +141,14 @@ module('Acceptance | kv-v2 workflow | navigation', function (hooks) {
assert
.dom(PAGE.breadcrumbCurrentAtIdx(2))
.hasText('per%centfu ll', 'the current breadcrumb is value of the secret path');
-
await click(PAGE.breadcrumbAtIdx(1));
- assert
- .dom(`${PAGE.list.item(pathWithSpace)} [data-test-path]`)
- .hasText(pathWithSpace, 'the list item is shown correctly');
+
+ assert.dom(`${PAGE.list.item(pathWithSpace)}`).hasText(pathWithSpace, 'the list item is shown correctly');
await typeIn(PAGE.list.filter, 'per%');
await click('[data-test-kv-list-filter-submit]');
assert
- .dom(`${PAGE.list.item(pathWithSpace)} [data-test-path]`)
+ .dom(`${PAGE.list.item(pathWithSpace)}`)
.hasText(pathWithSpace, 'the list item is shown correctly after filtering');
await click(PAGE.list.item(pathWithSpace));
@@ -181,14 +179,12 @@ module('Acceptance | kv-v2 workflow | navigation', function (hooks) {
.hasText('centfu ll', 'the current breadcrumb is value centfu ll');
await click(PAGE.breadcrumbAtIdx(1));
- assert
- .dom(`${PAGE.list.item('per%/')} [data-test-path]`)
- .hasText('per%/', 'the directory item is shown correctly');
+ assert.dom(`${PAGE.list.item('per%/')}`).hasText('per%/', 'the directory item is shown correctly');
await typeIn(PAGE.list.filter, 'per%/');
await click('[data-test-kv-list-filter-submit]');
assert
- .dom(`${PAGE.list.item('centfu ll')} [data-test-path]`)
+ .dom(`${PAGE.list.item('centfu ll')}`)
.hasText('centfu ll', 'the list item is shown correctly after filtering');
await click(PAGE.list.item('centfu ll'));
@@ -224,7 +220,7 @@ module('Acceptance | kv-v2 workflow | navigation', function (hooks) {
await click(PAGE.breadcrumbAtIdx(2));
assert
- .dom(`${PAGE.list.item('foo%2fbar/')} [data-test-path]`)
+ .dom(`${PAGE.list.item('foo%2fbar/')}`)
.hasText('foo%2fbar/', 'the directory item is shown correctly');
await click(PAGE.list.item('foo%2fbar/'));
diff --git a/ui/tests/acceptance/secrets/backend/kv/secret-test.js b/ui/tests/acceptance/secrets/backend/kv/secret-test.js
index b182a2ddd146..03034cf6b6d7 100644
--- a/ui/tests/acceptance/secrets/backend/kv/secret-test.js
+++ b/ui/tests/acceptance/secrets/backend/kv/secret-test.js
@@ -19,9 +19,7 @@ import { writeSecret, writeVersionedSecret } from 'vault/tests/helpers/kv/kv-run
import { runCmd } from 'vault/tests/helpers/commands';
import { PAGE } from 'vault/tests/helpers/kv/kv-selectors';
import codemirror from 'vault/tests/helpers/codemirror';
-import { MOUNT_BACKEND_FORM } from 'vault/tests/helpers/components/mount-backend-form-selectors';
import { GENERAL } from 'vault/tests/helpers/general-selectors';
-import { SECRET_ENGINE_SELECTORS as SS } from 'vault/tests/helpers/secret-engine/secret-engine-selectors';
const deleteEngine = async function (enginePath, assert) {
await logout.visit();
@@ -50,14 +48,15 @@ module('Acceptance | secrets/secret/create, read, delete', function (hooks) {
const enginePath = `kv-secret-${this.uid}`;
const maxVersion = '101';
await mountSecrets.visit();
- await click(MOUNT_BACKEND_FORM.mountType('kv'));
- await fillIn(GENERAL.inputByAttr('path'), enginePath);
+ await click('[data-test-mount-type="kv"]');
+
+ await fillIn('[data-test-input="path"]', enginePath);
await fillIn('[data-test-input="maxVersions"]', maxVersion);
await click('[data-test-input="casRequired"]');
await click('[data-test-toggle-label="Automate secret deletion"]');
await fillIn('[data-test-select="ttl-unit"]', 's');
await fillIn('[data-test-ttl-value="Automate secret deletion"]', '1');
- await click(GENERAL.saveButton);
+ await click('[data-test-mount-submit="true"]');
await click(PAGE.secretTab('Configuration'));
@@ -138,11 +137,8 @@ module('Acceptance | secrets/secret/create, read, delete', function (hooks) {
this.backend = `kv-v1-${this.uid}`;
// mount version 1 engine
await mountSecrets.visit();
- await click(MOUNT_BACKEND_FORM.mountType('kv'));
- await fillIn(GENERAL.inputByAttr('path'), this.backend);
- await click(GENERAL.toggleGroup('Method Options'));
- await mountSecrets.version(1);
- await click(GENERAL.saveButton);
+ await mountSecrets.selectType('kv');
+ await mountSecrets.path(this.backend).toggleOptions().version(1).submit();
});
hooks.afterEach(async function () {
await runCmd([`delete sys/mounts/${this.backend}`]);
@@ -184,8 +180,9 @@ module('Acceptance | secrets/secret/create, read, delete', function (hooks) {
assert.dom('[data-test-secret-link]').exists({ count: 2 });
// delete the items
- await click(SS.secretLinkMenu('1/2/3/4'));
- await click(SS.secretLinkMenuDelete('1/2/3/4'));
+ await listPage.secrets.objectAt(0).menuToggle();
+ await settled();
+ await listPage.delete();
await listPage.confirmDelete();
await settled();
assert.strictEqual(currentRouteName(), 'vault.cluster.secrets.backend.list');
@@ -222,7 +219,7 @@ module('Acceptance | secrets/secret/create, read, delete', function (hooks) {
'(',
')',
'"',
- // "'",
+ //"'",
'!',
'#',
'$',
@@ -246,8 +243,8 @@ module('Acceptance | secrets/secret/create, read, delete', function (hooks) {
await runCmd([...commands, 'refresh']);
for (const path of paths) {
await listPage.visit({ backend, id: path });
- assert.dom(SS.secretLinkATag()).hasText('2', `${path}: secret is displayed properly`);
- await click(SS.secretLink());
+ assert.ok(listPage.secrets.filterBy('text', '2')[0], `${path}: secret is displayed properly`);
+ await listPage.secrets.filterBy('text', '2')[0].click();
assert.strictEqual(
currentRouteName(),
'vault.cluster.secrets.backend.show',
@@ -304,8 +301,8 @@ module('Acceptance | secrets/secret/create, read, delete', function (hooks) {
await listPage.create();
await editPage.createSecret(`${path}/2`, 'foo', 'bar');
await listPage.visit({ backend, id: path });
- assert.dom(SS.secretLinkATag()).hasText('2', `${path}: secret is displayed properly`);
- await click(SS.secretLink());
+ assert.ok(listPage.secrets.filterBy('text', '2')[0], `${path}: secret is displayed properly`);
+ await listPage.secrets.filterBy('text', '2')[0].click();
assert.strictEqual(
currentRouteName(),
'vault.cluster.secrets.backend.show',
diff --git a/ui/tests/acceptance/secrets/backend/ldap/libraries-test.js b/ui/tests/acceptance/secrets/backend/ldap/libraries-test.js
index 4e9cc5a57059..dd3d0e0725a6 100644
--- a/ui/tests/acceptance/secrets/backend/ldap/libraries-test.js
+++ b/ui/tests/acceptance/secrets/backend/ldap/libraries-test.js
@@ -6,13 +6,11 @@
import { module, test } from 'qunit';
import { setupApplicationTest } from 'ember-qunit';
import { setupMirage } from 'ember-cli-mirage/test-support';
-import { v4 as uuidv4 } from 'uuid';
import ldapMirageScenario from 'vault/mirage/scenarios/ldap';
import ldapHandlers from 'vault/mirage/handlers/ldap';
import authPage from 'vault/tests/pages/auth';
import { click } from '@ember/test-helpers';
import { isURL, visitURL } from 'vault/tests/helpers/ldap/ldap-helpers';
-import { deleteEngineCmd, mountEngineCmd, runCmd } from 'vault/tests/helpers/commands';
module('Acceptance | ldap | libraries', function (hooks) {
setupApplicationTest(hooks);
@@ -21,41 +19,21 @@ module('Acceptance | ldap | libraries', function (hooks) {
hooks.beforeEach(async function () {
ldapHandlers(this.server);
ldapMirageScenario(this.server);
- this.backend = `ldap-test-${uuidv4()}`;
await authPage.login();
- // mount & configure
- await runCmd([
- mountEngineCmd('ldap', this.backend),
- `write ${this.backend}/config binddn=foo bindpass=bar url=http://localhost:8208`,
- ]);
- return visitURL('libraries', this.backend);
- });
-
- hooks.afterEach(async function () {
- await runCmd(deleteEngineCmd(this.backend));
- });
-
- test('it should show libraries on overview page', async function (assert) {
- await visitURL('overview', this.backend);
- assert.dom('[data-test-libraries-count]').hasText('1');
+ return visitURL('libraries');
});
test('it should transition to create library route on toolbar link click', async function (assert) {
await click('[data-test-toolbar-action="library"]');
- assert.true(
- isURL('libraries/create', this.backend),
- 'Transitions to library create route on toolbar link click'
- );
+ assert.true(isURL('libraries/create'), 'Transitions to library create route on toolbar link click');
});
test('it should transition to library details route on list item click', async function (assert) {
await click('[data-test-list-item-link] a');
assert.true(
- isURL('libraries/test-library/details/accounts', this.backend),
+ isURL('libraries/test-library/details/accounts'),
'Transitions to library details accounts route on list item click'
);
- assert.dom('[data-test-account-name]').exists({ count: 2 }, 'lists the accounts');
- assert.dom('[data-test-checked-out-account]').exists({ count: 1 }, 'lists the checked out accounts');
});
test('it should transition to routes from list item action menu', async function (assert) {
@@ -66,7 +44,7 @@ module('Acceptance | ldap | libraries', function (hooks) {
await click(`[data-test-${action}]`);
const uri = action === 'details' ? 'details/accounts' : action;
assert.true(
- isURL(`libraries/test-library/${uri}`, this.backend),
+ isURL(`libraries/test-library/${uri}`),
`Transitions to ${action} route on list item action menu click`
);
await click('[data-test-breadcrumb="libraries"] a');
@@ -77,13 +55,13 @@ module('Acceptance | ldap | libraries', function (hooks) {
await click('[data-test-list-item-link] a');
await click('[data-test-tab="config"]');
assert.true(
- isURL('libraries/test-library/details/configuration', this.backend),
+ isURL('libraries/test-library/details/configuration'),
'Transitions to configuration route on tab click'
);
await click('[data-test-tab="accounts"]');
assert.true(
- isURL('libraries/test-library/details/accounts', this.backend),
+ isURL('libraries/test-library/details/accounts'),
'Transitions to accounts route on tab click'
);
});
@@ -91,9 +69,6 @@ module('Acceptance | ldap | libraries', function (hooks) {
test('it should transition to routes from library details toolbar links', async function (assert) {
await click('[data-test-list-item-link] a');
await click('[data-test-edit]');
- assert.true(
- isURL('libraries/test-library/edit', this.backend),
- 'Transitions to credentials route from toolbar link'
- );
+ assert.true(isURL('libraries/test-library/edit'), 'Transitions to credentials route from toolbar link');
});
});
diff --git a/ui/tests/acceptance/secrets/backend/ldap/overview-test.js b/ui/tests/acceptance/secrets/backend/ldap/overview-test.js
index c2210d57b8dc..8fe90cccf2e1 100644
--- a/ui/tests/acceptance/secrets/backend/ldap/overview-test.js
+++ b/ui/tests/acceptance/secrets/backend/ldap/overview-test.js
@@ -6,15 +6,12 @@
import { module, test } from 'qunit';
import { setupApplicationTest } from 'ember-qunit';
import { setupMirage } from 'ember-cli-mirage/test-support';
-import { v4 as uuidv4 } from 'uuid';
import ldapMirageScenario from 'vault/mirage/scenarios/ldap';
import ldapHandlers from 'vault/mirage/handlers/ldap';
import authPage from 'vault/tests/pages/auth';
-import { click, visit } from '@ember/test-helpers';
+import { click, fillIn, visit } from '@ember/test-helpers';
import { selectChoose } from 'ember-power-select/test-support';
import { isURL, visitURL } from 'vault/tests/helpers/ldap/ldap-helpers';
-import { deleteEngineCmd, mountEngineCmd, runCmd } from 'vault/tests/helpers/commands';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
module('Acceptance | ldap | overview', function (hooks) {
setupApplicationTest(hooks);
@@ -22,99 +19,77 @@ module('Acceptance | ldap | overview', function (hooks) {
hooks.beforeEach(async function () {
ldapHandlers(this.server);
- this.backend = `ldap-test-${uuidv4()}`;
- this.mountAndConfig = (backend) => {
- return runCmd([
- mountEngineCmd('ldap', backend),
- `write ${backend}/config binddn=foo bindpass=bar url=http://localhost:8208`,
- ]);
- };
return authPage.login();
});
test('it should transition to ldap overview on mount success', async function (assert) {
- const backend = 'ldap-test-mount';
await visit('/vault/secrets');
await click('[data-test-enable-engine]');
- await mountBackend('ldap', backend);
- assert.true(isURL('overview', backend), 'Transitions to ldap overview route on mount success');
- assert.dom('[data-test-header-title]').hasText(backend);
- // cleanup mounted engine
- await visit('/vault/secrets');
- await runCmd(deleteEngineCmd(backend));
+ await click('[data-test-mount-type="ldap"]');
+ await fillIn('[data-test-input="path"]', 'ldap-test');
+ await click('[data-test-mount-submit]');
+ assert.true(isURL('overview'), 'Transitions to ldap overview route on mount success');
});
test('it should transition to routes on tab link click', async function (assert) {
assert.expect(4);
- await this.mountAndConfig(this.backend);
- await visitURL('overview', this.backend);
+ await visitURL('overview');
for (const tab of ['roles', 'libraries', 'config', 'overview']) {
await click(`[data-test-tab="${tab}"]`);
const route = tab === 'config' ? 'configuration' : tab;
- assert.true(isURL(route, this.backend), `Transitions to ${route} route on tab link click`);
+ assert.true(isURL(route), `Transitions to ${route} route on tab link click`);
}
});
test('it should transition to configuration route when engine is not configured', async function (assert) {
- await runCmd(mountEngineCmd('ldap', this.backend));
- await visitURL('overview', this.backend);
+ await visitURL('overview');
await click('[data-test-config-cta] a');
- assert.true(isURL('configure', this.backend), 'Transitions to configure route on cta link click');
+ assert.true(isURL('configure'), 'Transitions to configure route on cta link click');
- await click(`[data-test-breadcrumb="${this.backend}"] a`);
+ await click('[data-test-breadcrumb="ldap-test"] a');
await click('[data-test-toolbar-action="config"]');
- assert.true(isURL('configure', this.backend), 'Transitions to configure route on toolbar link click');
+ assert.true(isURL('configure'), 'Transitions to configure route on toolbar link click');
});
// including a test for the configuration route here since it is the only one needed
test('it should transition to configuration edit on toolbar link click', async function (assert) {
ldapMirageScenario(this.server);
- await this.mountAndConfig(this.backend);
- await visitURL('overview', this.backend);
+ await visitURL('overview');
await click('[data-test-tab="config"]');
await click('[data-test-toolbar-config-action]');
- assert.true(isURL('configure', this.backend), 'Transitions to configure route on toolbar link click');
+ assert.true(isURL('configure'), 'Transitions to configure route on toolbar link click');
});
test('it should transition to create role route on card action link click', async function (assert) {
ldapMirageScenario(this.server);
- await this.mountAndConfig(this.backend);
- await visitURL('overview', this.backend);
+ await visitURL('overview');
await click('[data-test-overview-card="Roles"] a');
- assert.true(
- isURL('roles/create', this.backend),
- 'Transitions to role create route on card action link click'
- );
+ assert.true(isURL('roles/create'), 'Transitions to role create route on card action link click');
});
test('it should transition to create library route on card action link click', async function (assert) {
ldapMirageScenario(this.server);
- await this.mountAndConfig(this.backend);
- await visitURL('overview', this.backend);
+ await visitURL('overview');
await click('[data-test-overview-card="Libraries"] a');
- assert.true(
- isURL('libraries/create', this.backend),
- 'Transitions to library create route on card action link click'
- );
+ assert.true(isURL('libraries/create'), 'Transitions to library create route on card action link click');
});
test('it should transition to role credentials route on generate credentials action', async function (assert) {
ldapMirageScenario(this.server);
- await this.mountAndConfig(this.backend);
- await visitURL('overview', this.backend);
+ await visitURL('overview');
await selectChoose('.search-select', 'static-role');
await click('[data-test-generate-credential-button]');
assert.true(
- isURL('roles/static/static-role/credentials', this.backend),
+ isURL('roles/static/static-role/credentials'),
'Transitions to role credentials route on generate credentials action'
);
- await click(`[data-test-breadcrumb="${this.backend}"] a`);
+ await click('[data-test-breadcrumb="ldap-test"] a');
await selectChoose('.search-select', 'dynamic-role');
await click('[data-test-generate-credential-button]');
assert.true(
- isURL('roles/dynamic/dynamic-role/credentials', this.backend),
+ isURL('roles/dynamic/dynamic-role/credentials'),
'Transitions to role credentials route on generate credentials action'
);
});
diff --git a/ui/tests/acceptance/secrets/backend/ldap/roles-test.js b/ui/tests/acceptance/secrets/backend/ldap/roles-test.js
index 4aa6d07e79d1..bcdf358ce095 100644
--- a/ui/tests/acceptance/secrets/backend/ldap/roles-test.js
+++ b/ui/tests/acceptance/secrets/backend/ldap/roles-test.js
@@ -6,14 +6,12 @@
import { module, test } from 'qunit';
import { setupApplicationTest } from 'ember-qunit';
import { setupMirage } from 'ember-cli-mirage/test-support';
-import { v4 as uuidv4 } from 'uuid';
import ldapMirageScenario from 'vault/mirage/scenarios/ldap';
import ldapHandlers from 'vault/mirage/handlers/ldap';
import authPage from 'vault/tests/pages/auth';
import { click, fillIn, waitFor } from '@ember/test-helpers';
import { assertURL, isURL, visitURL } from 'vault/tests/helpers/ldap/ldap-helpers';
import { GENERAL } from 'vault/tests/helpers/general-selectors';
-import { deleteEngineCmd, mountEngineCmd, runCmd } from 'vault/tests/helpers/commands';
import { LDAP_SELECTORS } from 'vault/tests/helpers/ldap/ldap-selectors';
module('Acceptance | ldap | roles', function (hooks) {
@@ -23,26 +21,14 @@ module('Acceptance | ldap | roles', function (hooks) {
hooks.beforeEach(async function () {
ldapHandlers(this.server);
ldapMirageScenario(this.server);
- this.backend = `ldap-test-${uuidv4()}`;
+ this.backend = 'ldap-test';
await authPage.login();
- // mount & configure
- await runCmd([
- mountEngineCmd('ldap', this.backend),
- `write ${this.backend}/config binddn=foo bindpass=bar url=http://localhost:8208`,
- ]);
- return visitURL('roles', this.backend);
- });
-
- hooks.afterEach(async function () {
- await runCmd(deleteEngineCmd(this.backend));
+ return visitURL('roles');
});
test('it should transition to create role route on toolbar link click', async function (assert) {
await click('[data-test-toolbar-action="role"]');
- assert.true(
- isURL('roles/create', this.backend),
- 'Transitions to role create route on toolbar link click'
- );
+ assert.true(isURL('roles/create'), 'Transitions to role create route on toolbar link click');
});
test('it should transition to role details route on list item click', async function (assert) {
@@ -77,7 +63,7 @@ module('Acceptance | ldap | roles', function (hooks) {
await click(LDAP_SELECTORS.action(action));
const uri = action === 'get-creds' ? 'credentials' : action;
assert.true(
- isURL(`roles/dynamic/dynamic-role/${uri}`, this.backend),
+ isURL(`roles/dynamic/dynamic-role/${uri}`),
`Transitions to ${uri} route on list item action menu click`
);
await click(GENERAL.breadcrumbLink('Roles'));
@@ -88,16 +74,13 @@ module('Acceptance | ldap | roles', function (hooks) {
await click(LDAP_SELECTORS.roleItem('dynamic', 'dynamic-role'));
await click('[data-test-get-credentials]');
assert.true(
- isURL('roles/dynamic/dynamic-role/credentials', this.backend),
+ isURL('roles/dynamic/dynamic-role/credentials'),
'Transitions to credentials route from toolbar link'
);
await click('[data-test-breadcrumb="dynamic-role"] a');
await click('[data-test-edit]');
- assert.true(
- isURL('roles/dynamic/dynamic-role/edit', this.backend),
- 'Transitions to edit route from toolbar link'
- );
+ assert.true(isURL('roles/dynamic/dynamic-role/edit'), 'Transitions to edit route from toolbar link');
});
test('it should clear roles page filter value on route exit', async function (assert) {
@@ -105,7 +88,6 @@ module('Acceptance | ldap | roles', function (hooks) {
assert
.dom('[data-test-filter-input]')
.hasValue('foo', 'Roles page filter value set after model refresh and rerender');
- await waitFor(GENERAL.emptyStateTitle);
await click('[data-test-tab="libraries"]');
await click('[data-test-tab="roles"]');
assert.dom('[data-test-filter-input]').hasNoValue('Roles page filter value cleared on route exit');
diff --git a/ui/tests/acceptance/secrets/backend/ssh/configuration-test.js b/ui/tests/acceptance/secrets/backend/ssh/configuration-test.js
index 0ad832839ec3..27d3c31ffca8 100644
--- a/ui/tests/acceptance/secrets/backend/ssh/configuration-test.js
+++ b/ui/tests/acceptance/secrets/backend/ssh/configuration-test.js
@@ -3,7 +3,7 @@
* SPDX-License-Identifier: BUSL-1.1
*/
-import { click, currentURL, visit, waitFor } from '@ember/test-helpers';
+import { click, fillIn, currentURL, visit, waitFor } from '@ember/test-helpers';
import { module, test } from 'qunit';
import { setupApplicationTest } from 'ember-qunit';
import { v4 as uuidv4 } from 'uuid';
@@ -14,7 +14,6 @@ import { setupMirage } from 'ember-cli-mirage/test-support';
import { runCmd } from 'vault/tests/helpers/commands';
import { GENERAL } from 'vault/tests/helpers/general-selectors';
import { SECRET_ENGINE_SELECTORS as SES } from 'vault/tests/helpers/secret-engine/secret-engine-selectors';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
import { configUrl } from 'vault/tests/helpers/secret-engine/secret-engine-helpers';
import { overrideResponse } from 'vault/tests/helpers/stubs';
@@ -32,7 +31,9 @@ module('Acceptance | ssh | configuration', function (hooks) {
const sshPath = `ssh-${this.uid}`;
// in this test go through the full mount process. Bypass this step in later tests.
await visit('/vault/settings/mount-secret-backend');
- await mountBackend('ssh', sshPath);
+ await click(SES.mountType('ssh'));
+ await fillIn(GENERAL.inputByAttr('path'), sshPath);
+ await click(SES.mountSubmit);
await click(SES.configTab);
assert.dom(GENERAL.emptyStateTitle).hasText('SSH not configured');
assert.dom(GENERAL.emptyStateActions).hasText('Configure SSH');
diff --git a/ui/tests/acceptance/secrets/backend/ssh/roles-test.js b/ui/tests/acceptance/secrets/backend/ssh/roles-test.js
index d684a1a2a027..7b493640792e 100644
--- a/ui/tests/acceptance/secrets/backend/ssh/roles-test.js
+++ b/ui/tests/acceptance/secrets/backend/ssh/roles-test.js
@@ -106,7 +106,6 @@ module('Acceptance | ssh | roles', function (hooks) {
},
},
];
-
test('it creates roles, generates keys and deletes roles', async function (assert) {
assert.expect(28);
const sshPath = `ssh-${this.uid}`;
@@ -151,7 +150,8 @@ module('Acceptance | ssh | roles', function (hooks) {
await settled(); // eslint-disable-line
role.assertAfterGenerate(assert, sshPath);
- await click(GENERAL.backButton);
+ // click the "Back" button
+ await click(SES.backButton);
assert.dom('[data-test-secret-generate-form]').exists(`${role.type}: back takes you back to the form`);
await click(GENERAL.cancelButton);
@@ -175,22 +175,13 @@ module('Acceptance | ssh | roles', function (hooks) {
await runCmd(`delete sys/mounts/${sshPath}`);
});
module('Acceptance | ssh | otp role', function () {
- const createOTPRole = async (name) => {
- await fillIn(GENERAL.inputByAttr('name'), name);
- await fillIn(GENERAL.inputByAttr('keyType'), name);
- await click(GENERAL.toggleGroup('Options'));
- await fillIn(GENERAL.inputByAttr('keyType'), 'otp');
- await fillIn(GENERAL.inputByAttr('defaultUser'), 'admin');
- await fillIn(GENERAL.inputByAttr('cidrList'), '0.0.0.0/0');
- await click(SES.ssh.createRole);
- };
test('it deletes a role from list view', async function (assert) {
assert.expect(2);
const path = `ssh-${this.uid}`;
await enablePage.enable('ssh', path);
await settled();
await editPage.visitRoot({ backend: path });
- await createOTPRole('role');
+ await editPage.createOTPRole('role');
await settled();
await showPage.visit({ backend: path, id: 'role' });
await settled();
@@ -212,7 +203,7 @@ module('Acceptance | ssh | roles', function (hooks) {
await enablePage.enable('ssh', path);
await settled();
await editPage.visitRoot({ backend: path });
- await createOTPRole('role');
+ await editPage.createOTPRole('role');
await settled();
assert.strictEqual(
currentRouteName(),
@@ -231,11 +222,11 @@ module('Acceptance | ssh | roles', function (hooks) {
'navs to the credentials page'
);
- await fillIn(GENERAL.inputByAttr('username'), 'admin');
- await fillIn(GENERAL.inputByAttr('ip'), '192.168.1.1');
- await click(GENERAL.saveButton);
+ await generatePage.generateOTP();
+ await settled();
assert.ok(generatePage.warningIsPresent, 'shows warning');
- await click(GENERAL.backButton);
+ await generatePage.back();
+ await settled();
assert.ok(generatePage.userIsPresent, 'clears generate, shows user input');
assert.ok(generatePage.ipIsPresent, 'clears generate, shows ip input');
// cleanup
diff --git a/ui/tests/acceptance/settings-test.js b/ui/tests/acceptance/settings-test.js
index 811bfd3bf0d1..a2c005a4af83 100644
--- a/ui/tests/acceptance/settings-test.js
+++ b/ui/tests/acceptance/settings-test.js
@@ -3,7 +3,7 @@
* SPDX-License-Identifier: BUSL-1.1
*/
-import { currentURL, visit, click, fillIn } from '@ember/test-helpers';
+import { currentURL, find, visit, settled, click } from '@ember/test-helpers';
import { module, test } from 'qunit';
import { setupApplicationTest } from 'ember-qunit';
import { v4 as uuidv4 } from 'uuid';
@@ -13,7 +13,6 @@ import mountSecrets from 'vault/tests/pages/settings/mount-secret-backend';
import authPage from 'vault/tests/pages/auth';
import { deleteEngineCmd, mountEngineCmd, runCmd } from 'vault/tests/helpers/commands';
import { GENERAL } from 'vault/tests/helpers/general-selectors';
-import { MOUNT_BACKEND_FORM } from 'vault/tests/helpers/components/mount-backend-form-selectors';
const { searchSelect } = GENERAL;
@@ -32,24 +31,22 @@ module('Acceptance | secret engine mount settings', function (hooks) {
// mount unsupported backend
await visit('/vault/settings/mount-secret-backend');
- assert.strictEqual(
- currentURL(),
- '/vault/settings/mount-secret-backend',
- 'navigates to the mount secret backend page'
- );
- await click(MOUNT_BACKEND_FORM.mountType(type));
- await fillIn(GENERAL.inputByAttr('path'), path);
- await click(GENERAL.toggleGroup('Method Options'));
- await mountSecrets.enableDefaultTtl().defaultTTLUnit('s').defaultTTLVal(100);
- await click(GENERAL.saveButton);
-
- assert
- .dom(`${GENERAL.flashMessage}.is-success`)
- .includesText(
- `Success Successfully mounted the ${type} secrets engine at ${path}`,
- 'flash message is shown after mounting'
- );
+ assert.strictEqual(currentURL(), '/vault/settings/mount-secret-backend');
+ await mountSecrets.selectType(type);
+ await mountSecrets
+ .path(path)
+ .toggleOptions()
+ .enableDefaultTtl()
+ .defaultTTLUnit('s')
+ .defaultTTLVal(100)
+ .submit();
+ await settled();
+ assert.ok(
+ find('[data-test-flash-message]').textContent.trim(),
+ `Successfully mounted '${type}' at '${path}'!`
+ );
+ await settled();
assert.strictEqual(currentURL(), `/vault/secrets`, 'redirects to secrets page');
// cleanup
await runCmd(deleteEngineCmd(path));
diff --git a/ui/tests/acceptance/settings/auth/enable-test.js b/ui/tests/acceptance/settings/auth/enable-test.js
index ab483f4d2f44..3f553c2cedbc 100644
--- a/ui/tests/acceptance/settings/auth/enable-test.js
+++ b/ui/tests/acceptance/settings/auth/enable-test.js
@@ -3,14 +3,15 @@
* SPDX-License-Identifier: BUSL-1.1
*/
-import { click, currentRouteName, visit } from '@ember/test-helpers';
+import { click, currentRouteName, settled } from '@ember/test-helpers';
import { module, test } from 'qunit';
import { setupApplicationTest } from 'ember-qunit';
import { v4 as uuidv4 } from 'uuid';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
-import { login } from 'vault/tests/helpers/auth/auth-helpers';
-import { deleteAuthCmd, runCmd } from 'vault/tests/helpers/commands';
+
import { GENERAL } from 'vault/tests/helpers/general-selectors';
+import page from 'vault/tests/pages/settings/auth/enable';
+import listPage from 'vault/tests/pages/access/methods';
+import { login } from 'vault/tests/helpers/auth/auth-helpers';
module('Acceptance | settings/auth/enable', function (hooks) {
setupApplicationTest(hooks);
@@ -24,30 +25,30 @@ module('Acceptance | settings/auth/enable', function (hooks) {
// always force the new mount to the top of the list
const path = `aaa-approle-${this.uid}`;
const type = 'approle';
- await visit('/vault/settings/auth/enable');
+ await page.visit();
assert.strictEqual(currentRouteName(), 'vault.cluster.settings.auth.enable');
- await mountBackend(type, path);
- assert
- .dom(GENERAL.latestFlashContent)
- .hasText(`Successfully mounted the ${type} auth method at ${path}.`);
+ await page.enable(type, path);
+ await settled();
+ assert.strictEqual(
+ page.flash.latestMessage,
+ `Successfully mounted the ${type} auth method at ${path}.`,
+ 'success flash shows'
+ );
assert.strictEqual(
currentRouteName(),
'vault.cluster.settings.auth.configure.section',
'redirects to the auth config page'
);
- await visit('/vault/access/');
- assert.dom(`[data-test-auth-backend-link=${path}]`).exists('mount is present in the list');
-
- // cleanup
- await runCmd(deleteAuthCmd(path));
+ await listPage.visit();
+ assert.ok(listPage.findLinkById(path), 'mount is present in the list');
});
test('it renders default config details', async function (assert) {
const path = `approle-config-${this.uid}`;
const type = 'approle';
- await visit('/vault/settings/auth/enable');
- await mountBackend(type, path);
+ await page.visit();
+ await page.enable(type, path);
// the config details is updated to query mount details from sys/internal/ui/mounts
// but we still want these forms to continue using sys/auth which returns 0 for default ttl values
// check tune form (right after enabling)
@@ -63,8 +64,5 @@ module('Acceptance | settings/auth/enable', function (hooks) {
await click('[data-test-configure-link]');
assert.dom(GENERAL.toggleInput('Default Lease TTL')).isNotChecked('default lease ttl is still unset');
assert.dom(GENERAL.toggleInput('Max Lease TTL')).isNotChecked('max lease ttl is still unset');
-
- // cleanup
- await runCmd(deleteAuthCmd(path));
});
});
diff --git a/ui/tests/acceptance/settings/mount-secret-backend-test.js b/ui/tests/acceptance/settings/mount-secret-backend-test.js
index 8894067f1667..ee1e6e8d91d3 100644
--- a/ui/tests/acceptance/settings/mount-secret-backend-test.js
+++ b/ui/tests/acceptance/settings/mount-secret-backend-test.js
@@ -30,10 +30,9 @@ import { mountableEngines } from 'vault/helpers/mountable-secret-engines'; // al
import { supportedSecretBackends } from 'vault/helpers/supported-secret-backends';
import { GENERAL } from 'vault/tests/helpers/general-selectors';
import { SECRET_ENGINE_SELECTORS as SES } from 'vault/tests/helpers/secret-engine/secret-engine-selectors';
-import { MOUNT_BACKEND_FORM } from 'vault/tests/helpers/components/mount-backend-form-selectors';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
import { SELECTORS as OIDC } from 'vault/tests/helpers/oidc-config';
import { adminOidcCreateRead, adminOidcCreate } from 'vault/tests/helpers/secret-engine/policy-generator';
+import { WIF_ENGINES } from 'vault/helpers/mountable-secret-engines';
const consoleComponent = create(consoleClass);
@@ -60,17 +59,17 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
await page.visit();
assert.strictEqual(currentRouteName(), 'vault.cluster.settings.mount-secret-backend');
- await click(MOUNT_BACKEND_FORM.mountType('kv'));
- await fillIn(GENERAL.inputByAttr('path'), path);
- await click(GENERAL.toggleGroup('Method Options'));
+ await page.selectType('kv');
await page
+ .path(path)
+ .toggleOptions()
.enableDefaultTtl()
.defaultTTLUnit('h')
.defaultTTLVal(defaultTTLHours)
.enableMaxTtl()
.maxTTLUnit('h')
- .maxTTLVal(maxTTLHours);
- await click(GENERAL.saveButton);
+ .maxTTLVal(maxTTLHours)
+ .submit();
await configPage.visit({ backend: path });
assert.strictEqual(configPage.defaultTTL, `${this.calcDays(defaultTTLHours)}`, 'shows the proper TTL');
assert.strictEqual(configPage.maxTTL, `${this.calcDays(maxTTLHours)}`, 'shows the proper max TTL');
@@ -83,16 +82,16 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
await page.visit();
- assert.strictEqual(
- currentRouteName(),
- 'vault.cluster.settings.mount-secret-backend',
- 'navigates to mount page'
- );
- await click(MOUNT_BACKEND_FORM.mountType('kv'));
- await fillIn(GENERAL.inputByAttr('path'), path);
- await click(GENERAL.toggleGroup('Method Options'));
- await page.enableDefaultTtl().enableMaxTtl().maxTTLUnit('h').maxTTLVal(maxTTLHours);
- await click(GENERAL.saveButton);
+ assert.strictEqual(currentRouteName(), 'vault.cluster.settings.mount-secret-backend');
+ await page.selectType('kv');
+ await page
+ .path(path)
+ .toggleOptions()
+ .enableDefaultTtl()
+ .enableMaxTtl()
+ .maxTTLUnit('h')
+ .maxTTLVal(maxTTLHours)
+ .submit();
await configPage.visit({ backend: path });
assert.strictEqual(configPage.defaultTTL, '1 month 1 day', 'shows system default TTL');
assert.strictEqual(configPage.maxTTL, `${this.calcDays(maxTTLHours)}`, 'shows the proper max TTL');
@@ -101,7 +100,7 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
test('it sets the max ttl after pki chosen, resets after', async function (assert) {
await page.visit();
assert.strictEqual(currentRouteName(), 'vault.cluster.settings.mount-secret-backend');
- await click(MOUNT_BACKEND_FORM.mountType('pki'));
+ await page.selectType('pki');
assert.dom('[data-test-input="maxLeaseTtl"]').exists();
assert
.dom('[data-test-input="maxLeaseTtl"] [data-test-ttl-toggle]')
@@ -110,8 +109,8 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
assert.dom('[data-test-input="maxLeaseTtl"] [data-test-select="ttl-unit"]').hasValue('d');
// Go back and choose a different type
- await click(GENERAL.backButton);
- await click(MOUNT_BACKEND_FORM.mountType('database'));
+ await page.back();
+ await page.selectType('database');
assert.dom('[data-test-input="maxLeaseTtl"]').exists('3650');
assert
.dom('[data-test-input="maxLeaseTtl"] [data-test-ttl-toggle]')
@@ -132,12 +131,13 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
await page.visit();
assert.strictEqual(currentRouteName(), 'vault.cluster.settings.mount-secret-backend');
- await mountBackend('kv', path);
+ await page.selectType('kv');
+ await page.path(path).submit();
await page.secretList();
await settled();
await page.enableEngine();
- await mountBackend('kv', path);
-
+ await page.selectType('kv');
+ await page.path(path).submit();
assert.dom('[data-test-message-error-description]').containsText(`path is already in use at ${path}`);
assert.strictEqual(currentRouteName(), 'vault.cluster.settings.mount-secret-backend');
@@ -185,11 +185,9 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
await authPage.login(userToken);
// create the engine
await mountSecrets.visit();
- await click(MOUNT_BACKEND_FORM.mountType('kv'));
- await fillIn(GENERAL.inputByAttr('path'), enginePath);
- await mountSecrets.setMaxVersion(101);
- await click(GENERAL.saveButton);
-
+ await mountSecrets.selectType('kv');
+ await mountSecrets.path(enginePath).setMaxVersion(101).submit();
+ await settled();
assert
.dom('[data-test-flash-message]')
.containsText(
@@ -215,8 +213,8 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
`delete sys/mounts/${engine.type}`,
]);
await mountSecrets.visit();
- await mountBackend(engine.type, engine.type);
-
+ await mountSecrets.selectType(engine.type);
+ await mountSecrets.path(engine.type).submit();
assert.strictEqual(
currentRouteName(),
`vault.cluster.secrets.backend.${engine.engineRoute}`,
@@ -242,13 +240,12 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
`delete sys/mounts/${engine.type}`,
]);
await mountSecrets.visit();
- await click(MOUNT_BACKEND_FORM.mountType(engine.type));
- await fillIn(GENERAL.inputByAttr('path'), engine.type);
+ await mountSecrets.selectType(engine.type);
+ await mountSecrets.path(engine.type);
if (engine.type === 'kv') {
- await click(GENERAL.toggleGroup('Method Options'));
- await mountSecrets.version(1);
+ await mountSecrets.toggleOptions().version(1);
}
- await click(GENERAL.saveButton);
+ await mountSecrets.submit();
assert.strictEqual(
currentRouteName(),
@@ -272,7 +269,8 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
`delete sys/mounts/${engine.type}`,
]);
await mountSecrets.visit();
- await mountBackend(engine.type, engine.type);
+ await mountSecrets.selectType(engine.type);
+ await mountSecrets.path(engine.type).submit();
assert.strictEqual(
currentRouteName(),
@@ -290,7 +288,9 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
`delete sys/mounts/${v2}`,
]);
await mountSecrets.visit();
- await mountBackend('kv', v2);
+ await mountSecrets.selectType('kv');
+ await mountSecrets.path(v2).submit();
+
assert.strictEqual(currentURL(), `/vault/secrets/${v2}/kv/list`, `${v2} navigates to list url`);
assert.strictEqual(
currentRouteName(),
@@ -304,11 +304,8 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
`delete sys/mounts/${v1}`,
]);
await mountSecrets.visit();
- await click(MOUNT_BACKEND_FORM.mountType('kv'));
- await fillIn(GENERAL.inputByAttr('path'), v1);
- await click(GENERAL.toggleGroup('Method Options'));
- await mountSecrets.version(1);
- await click(GENERAL.saveButton);
+ await mountSecrets.selectType('kv');
+ await mountSecrets.path(v1).toggleOptions().version(1).submit();
assert.strictEqual(currentURL(), `/vault/secrets/${v1}/list`, `${v1} navigates to list url`);
assert.strictEqual(
@@ -323,20 +320,24 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
// create an oidc/key
await runCmd(`write identity/oidc/key/some-key allowed_client_ids="*"`);
- await page.visit();
- await click(MOUNT_BACKEND_FORM.mountType('aws')); // only testing aws of the WIF engines as the functionality for all others WIF engines in this form are the same
- await click(GENERAL.toggleGroup('Method Options'));
- assert.dom('[data-test-search-select-with-modal]').exists('Search select with modal component renders');
- await clickTrigger('#key');
- const dropdownOptions = findAll('[data-option-index]').map((o) => o.innerText);
- assert.ok(dropdownOptions.includes('some-key'), 'search select options show some-key');
- await click(GENERAL.searchSelect.option(GENERAL.searchSelect.optionIndex('some-key')));
- assert
- .dom(GENERAL.searchSelect.selectedOption())
- .hasText('some-key', 'some-key was selected and displays in the search select');
- await click(GENERAL.backButton);
- // Choose a non-wif engine
- await click(MOUNT_BACKEND_FORM.mountType('ssh'));
+ for (const engine of WIF_ENGINES) {
+ await page.visit();
+ await page.selectType(engine);
+ await click(GENERAL.toggleGroup('Method Options'));
+ assert
+ .dom('[data-test-search-select-with-modal]')
+ .exists('Search select with modal component renders');
+ await clickTrigger('#key');
+ const dropdownOptions = findAll('[data-option-index]').map((o) => o.innerText);
+ assert.ok(dropdownOptions.includes('some-key'), 'search select options show some-key');
+ await click(GENERAL.searchSelect.option(GENERAL.searchSelect.optionIndex('some-key')));
+ assert
+ .dom(GENERAL.searchSelect.selectedOption())
+ .hasText('some-key', 'some-key was selected and displays in the search select');
+ }
+ // Go back and choose a non-wif engine type
+ await page.back();
+ await page.selectType('ssh');
assert
.dom('[data-test-search-select-with-modal]')
.doesNotExist('for type ssh, the modal field does not render.');
@@ -345,84 +346,75 @@ module('Acceptance | settings/mount-secret-backend', function (hooks) {
});
test('it allows a user with permissions to oidc/key to create an identity_token_key', async function (assert) {
- logout.visit();
- const engine = 'aws'; // only testing aws of the WIF engines as the functionality for all others WIF engines in this form are the same
- await authPage.login();
- const path = `secrets-adminPolicy-${engine}`;
- const newKey = `key-${engine}-${uuidv4()}`;
- const secrets_admin_policy = adminOidcCreateRead(path);
- const secretsAdminToken = await runCmd(
- tokenWithPolicyCmd(`secrets-admin-${path}`, secrets_admin_policy)
- );
-
- await logout.visit();
- await authPage.login(secretsAdminToken);
- await visit('/vault/settings/mount-secret-backend');
- await click(MOUNT_BACKEND_FORM.mountType(engine));
- await fillIn(GENERAL.inputByAttr('path'), path);
- await click(GENERAL.toggleGroup('Method Options'));
- await clickTrigger('#key');
- // create new key
- await fillIn(GENERAL.searchSelect.searchInput, newKey);
- await click(GENERAL.searchSelect.options);
- assert.dom('#search-select-modal').exists(`modal with form opens for engine ${engine}`);
- assert
- .dom('[data-test-modal-title]')
- .hasText('Create new key', `Create key modal renders for engine: ${engine}`);
-
- await click(OIDC.keySaveButton);
- assert.dom('#search-select-modal').doesNotExist(`modal disappears onSave for engine ${engine}`);
- assert.dom(GENERAL.searchSelect.selectedOption()).hasText(newKey, `${newKey} is now selected`);
-
- await click(GENERAL.saveButton);
- await visit(`/vault/secrets/${path}/configuration`);
- await click(SES.configurationToggle);
- assert
- .dom(GENERAL.infoRowValue('Identity Token Key'))
- .hasText(newKey, `shows identity token key on configuration page for engine: ${engine}`);
-
- // cleanup
- await runCmd(`delete sys/mounts/${path}`);
- await runCmd(`delete identity/oidc/key/some-key`);
- await runCmd(`delete identity/oidc/key/${newKey}`);
- await logout.visit();
+ for (const engine of WIF_ENGINES) {
+ const path = `secrets-adminPolicy-${engine}`;
+ const newKey = `key-${uuidv4()}`;
+ const secrets_admin_policy = adminOidcCreateRead(path);
+ const secretsAdminToken = await runCmd(
+ tokenWithPolicyCmd(`secrets-admin-${path}`, secrets_admin_policy)
+ );
+
+ await logout.visit();
+ await authPage.login(secretsAdminToken);
+ await page.visit();
+ await page.selectType(engine);
+ await page.path(path);
+ await click(GENERAL.toggleGroup('Method Options'));
+ await clickTrigger('#key');
+ // create new key
+ await fillIn(GENERAL.searchSelect.searchInput, newKey);
+ await click(GENERAL.searchSelect.options);
+ assert.dom('#search-select-modal').exists('modal with form opens');
+ assert.dom('[data-test-modal-title]').hasText('Create new key', 'Create key modal renders');
+
+ await click(OIDC.keySaveButton);
+ assert.dom('#search-select-modal').doesNotExist('modal disappears onSave');
+ assert.dom(GENERAL.searchSelect.selectedOption()).hasText(newKey, `${newKey} is now selected`);
+
+ await page.submit();
+ await visit(`/vault/secrets/${path}/configuration`);
+ await click(SES.configurationToggle);
+ assert
+ .dom(GENERAL.infoRowValue('Identity Token Key'))
+ .hasText(newKey, 'shows identity token key on configuration page');
+ // cleanup
+ await runCmd(`delete sys/mounts/${path}`);
+ await runCmd(`delete identity/oidc/key/some-key`);
+ await runCmd(`delete identity/oidc/key/${newKey}`);
+ }
});
test('it allows user with NO access to oidc/key to manually input an identity_token_key', async function (assert) {
- await logout.visit();
- const engine = 'aws'; // only testing aws of the WIF engines as the functionality for all others WIF engines in this form are the same
- await authPage.login();
- const path = `secrets-noOidcAdmin-${engine}`;
- const secretsNoOidcAdminPolicy = adminOidcCreate(path);
- const secretsNoOidcAdminToken = await runCmd(
- tokenWithPolicyCmd(`secrets-noOidcAdmin-${path}`, secretsNoOidcAdminPolicy)
- );
- // create an oidc/key that they can then use even if they can't read it.
- await runCmd(`write identity/oidc/key/general-key allowed_client_ids="*"`);
-
- await logout.visit();
- await authPage.login(secretsNoOidcAdminToken);
- await page.visit();
- await click(MOUNT_BACKEND_FORM.mountType(engine));
- await fillIn(GENERAL.inputByAttr('path'), path);
- await click(GENERAL.toggleGroup('Method Options'));
- // type-in fallback component to create new key
- await typeIn(GENERAL.inputSearch('key'), 'general-key');
- await click(GENERAL.saveButton);
- assert
- .dom(GENERAL.latestFlashContent)
- .hasText(`Successfully mounted the ${engine} secrets engine at ${path}.`);
-
- await visit(`/vault/secrets/${path}/configuration`);
-
- await click(SES.configurationToggle);
- assert
- .dom(GENERAL.infoRowValue('Identity Token Key'))
- .hasText('general-key', `shows identity token key on configuration page for engine: ${engine}`);
-
- // cleanup
- await runCmd(`delete sys/mounts/${path}`);
- await logout.visit();
+ for (const engine of WIF_ENGINES) {
+ const path = `secrets-noOidcAdmin-${engine}`;
+ const secretsNoOidcAdminPolicy = adminOidcCreate(path);
+ const secretsNoOidcAdminToken = await runCmd(
+ tokenWithPolicyCmd(`secrets-noOidcAdmin-${path}`, secretsNoOidcAdminPolicy)
+ );
+ // create an oidc/key that they can then use even if they can't read it.
+ await runCmd(`write identity/oidc/key/general-key allowed_client_ids="*"`);
+
+ await logout.visit();
+ await authPage.login(secretsNoOidcAdminToken);
+ await page.visit();
+ await page.selectType(engine);
+ await page.path(path);
+ await click(GENERAL.toggleGroup('Method Options'));
+ // type-in fallback component to create new key
+ await typeIn(GENERAL.inputSearch('key'), 'general-key');
+ await page.submit();
+ assert
+ .dom(GENERAL.latestFlashContent)
+ .hasText(`Successfully mounted the aws secrets engine at ${path}.`);
+
+ await visit(`/vault/secrets/${path}/configuration`);
+ await click(SES.configurationToggle);
+ assert
+ .dom(GENERAL.infoRowValue('Identity Token Key'))
+ .hasText('general-key', 'shows identity token key on configuration page');
+ // cleanup
+ await runCmd(`delete sys/mounts/${path}`);
+ }
});
});
});
diff --git a/ui/tests/helpers/auth/auth-form-selectors.ts b/ui/tests/helpers/auth/auth-form-selectors.ts
index b5a3730ea922..eb8c77ce9dd1 100644
--- a/ui/tests/helpers/auth/auth-form-selectors.ts
+++ b/ui/tests/helpers/auth/auth-form-selectors.ts
@@ -12,5 +12,4 @@ export const AUTH_FORM = {
input: (item: string) => `[data-test-${item}]`, // i.e. jwt, role, token, password or username
mountPathInput: '[data-test-auth-form-mount-path]',
moreOptions: '[data-test-auth-form-options-toggle]',
- namespaceInput: '[data-test-auth-form-ns-input]',
};
diff --git a/ui/tests/helpers/auth/auth-helpers.ts b/ui/tests/helpers/auth/auth-helpers.ts
index 544bdcb2a063..65632c066861 100644
--- a/ui/tests/helpers/auth/auth-helpers.ts
+++ b/ui/tests/helpers/auth/auth-helpers.ts
@@ -14,16 +14,7 @@ export const login = async (token = rootToken) => {
await logout();
await visit('/vault/auth?with=token');
await fillIn(AUTH_FORM.input('token'), token);
- return click(AUTH_FORM.login);
-};
-
-export const loginNs = async (ns: string, token = rootToken) => {
- // make sure we're always logged out and logged back in
- await logout();
- await visit('/vault/auth?with=token');
- await fillIn(AUTH_FORM.namespaceInput, ns);
- await fillIn(AUTH_FORM.input('token'), token);
- return click(AUTH_FORM.login);
+ return await click(AUTH_FORM.login);
};
export const logout = async () => {
diff --git a/ui/tests/helpers/components/mount-backend-form-helpers.js b/ui/tests/helpers/components/mount-backend-form-helpers.js
deleted file mode 100644
index 8928009a4c72..000000000000
--- a/ui/tests/helpers/components/mount-backend-form-helpers.js
+++ /dev/null
@@ -1,19 +0,0 @@
-/**
- * Copyright (c) HashiCorp, Inc.
- * SPDX-License-Identifier: BUSL-1.1
- */
-
-import { fillIn, click } from '@ember/test-helpers';
-import { MOUNT_BACKEND_FORM } from 'vault/tests/helpers/components/mount-backend-form-selectors';
-import { GENERAL } from 'vault/tests/helpers/general-selectors';
-
-export const mountBackend = async (type, path) => {
- await click(MOUNT_BACKEND_FORM.mountType(type));
- if (path) {
- await fillIn(GENERAL.inputByAttr('path'), path);
- await click(GENERAL.saveButton);
- } else {
- // save with default path
- await click(GENERAL.saveButton);
- }
-};
diff --git a/ui/tests/helpers/components/mount-backend-form-selectors.ts b/ui/tests/helpers/components/mount-backend-form-selectors.ts
deleted file mode 100644
index 2000201d4447..000000000000
--- a/ui/tests/helpers/components/mount-backend-form-selectors.ts
+++ /dev/null
@@ -1,10 +0,0 @@
-/**
- * Copyright (c) HashiCorp, Inc.
- * SPDX-License-Identifier: BUSL-1.1
- */
-
-/** Ideally we wouldn't have one selector for one file.
- However, given the coupled nature of mounting both secret engines and auth methods in one form, and the organization of our helpers, I've opted to keep this as is. This selector spans multiple test, is component scoped and it's used by both secret engines and auth methods. */
-export const MOUNT_BACKEND_FORM = {
- mountType: (name: string) => `[data-test-mount-type="${name}"]`,
-};
diff --git a/ui/tests/helpers/general-selectors.ts b/ui/tests/helpers/general-selectors.ts
index c9159350d3d4..deff4ef53fc3 100644
--- a/ui/tests/helpers/general-selectors.ts
+++ b/ui/tests/helpers/general-selectors.ts
@@ -94,8 +94,6 @@ export const GENERAL = {
navLink: (label: string) => `[data-test-sidebar-nav-link="${label}"]`,
cancelButton: '[data-test-cancel]',
saveButton: '[data-test-save]',
- backButton: '[data-test-back-button]',
- codeBlock: (label: string) => `[data-test-code-block="${label}"]`,
codemirror: `[data-test-component="code-mirror-modifier"]`,
codemirrorTextarea: `[data-test-component="code-mirror-modifier"] textarea`,
};
diff --git a/ui/tests/helpers/kv/kv-selectors.js b/ui/tests/helpers/kv/kv-selectors.js
index 67b766fdf9f9..3c0b4af42c46 100644
--- a/ui/tests/helpers/kv/kv-selectors.js
+++ b/ui/tests/helpers/kv/kv-selectors.js
@@ -90,8 +90,8 @@ export const PAGE = {
},
paths: {
copyButton: (label) => `${PAGE.infoRowValue(label)} button`,
- codeSnippet: (section) => `[data-test-code-block="${section}"] code`,
- snippetCopy: (section) => `[data-test-code-block="${section}"] button`,
+ codeSnippet: (section) => `[data-test-commands="${section}"] code`,
+ snippetCopy: (section) => `[data-test-commands="${section}"] button`,
},
};
diff --git a/ui/tests/helpers/secret-engine/policy-generator.ts b/ui/tests/helpers/secret-engine/policy-generator.ts
index d8a08e55d387..5f758bbb8875 100644
--- a/ui/tests/helpers/secret-engine/policy-generator.ts
+++ b/ui/tests/helpers/secret-engine/policy-generator.ts
@@ -3,7 +3,7 @@
* SPDX-License-Identifier: BUSL-1.1
*/
-// This policy can mount a secret engine
+// This is policy can mount a secret engine
// and list and create oidc keys, relevant for setting identity_key_token for WIF
export const adminOidcCreateRead = (mountPath: string) => {
return `
diff --git a/ui/tests/helpers/secret-engine/secret-engine-helpers.js b/ui/tests/helpers/secret-engine/secret-engine-helpers.js
index e970bd0afa23..2a9c88601c4d 100644
--- a/ui/tests/helpers/secret-engine/secret-engine-helpers.js
+++ b/ui/tests/helpers/secret-engine/secret-engine-helpers.js
@@ -206,49 +206,3 @@ export const fillInAwsConfig = async (situation = 'withAccess') => {
await fillIn(GENERAL.ttl.input('Identity token TTL'), '7200');
}
};
-
-// Example usage
-// createLongJson (2, 3) will create a json object with 2 original keys, each with 3 nested keys
-// {
-// "key-0": {
-// "nested-key-0": {
-// "nested-key-1": {
-// "nested-key-2": "nested-value"
-// }
-// }
-// },
-// "key-1": {
-// "nested-key-0": {
-// "nested-key-1": {
-// "nested-key-2": "nested-value"
-// }
-// }
-// }
-// }
-
-export function createLongJson(lines = 10, nestLevel = 3) {
- const keys = Array.from({ length: nestLevel }, (_, i) => `nested-key-${i}`);
- const jsonObject = {};
-
- for (let i = 0; i < lines; i++) {
- nestLevel > 0
- ? (jsonObject[`key-${i}`] = createNestedObject({}, keys, 'nested-value'))
- : (jsonObject[`key-${i}`] = 'non-nested-value');
- }
- return jsonObject;
-}
-
-function createNestedObject(obj = {}, keys, value) {
- let current = obj;
-
- for (let i = 0; i < keys.length - 1; i++) {
- const key = keys[i];
- if (!current[key]) {
- current[key] = {};
- }
- current = current[key];
- }
-
- current[keys[keys.length - 1]] = value;
- return obj;
-}
diff --git a/ui/tests/helpers/secret-engine/secret-engine-selectors.ts b/ui/tests/helpers/secret-engine/secret-engine-selectors.ts
index 4349d5ca9e63..eca2e967e400 100644
--- a/ui/tests/helpers/secret-engine/secret-engine-selectors.ts
+++ b/ui/tests/helpers/secret-engine/secret-engine-selectors.ts
@@ -4,6 +4,7 @@
*/
export const SECRET_ENGINE_SELECTORS = {
+ backButton: '[data-test-back-button]',
configTab: '[data-test-configuration-tab]',
configure: '[data-test-secret-backend-configure]',
configureTitle: (type: string) => `[data-test-backend-configure-title="${type}"]`,
@@ -14,13 +15,10 @@ export const SECRET_ENGINE_SELECTORS = {
title: '[data-test-backend-error-title]',
},
generateLink: '[data-test-backend-credentials]',
+ mountType: (name: string) => `[data-test-mount-type="${name}"]`,
+ mountSubmit: '[data-test-mount-submit]',
secretHeader: '[data-test-secret-header]',
secretLink: (name: string) => (name ? `[data-test-secret-link="${name}"]` : '[data-test-secret-link]'),
- secretLinkMenu: (name: string) => `[data-test-secret-link="${name}"] [data-test-popup-menu-trigger]`,
- secretLinkMenuDelete: (name: string) =>
- `[data-test-secret-link="${name}"] [data-test-confirm-action-trigger]`,
- secretLinkATag: (name: string) =>
- name ? `[data-test-secret-item-link="${name}"]` : '[data-test-secret-item-link]',
viewBackend: '[data-test-backend-view-link]',
warning: '[data-test-warning]',
aws: {
diff --git a/ui/tests/integration/components/auth/page-test.js b/ui/tests/integration/components/auth/page-test.js
index d6d88e8a5a10..45628cd3915a 100644
--- a/ui/tests/integration/components/auth/page-test.js
+++ b/ui/tests/integration/components/auth/page-test.js
@@ -198,7 +198,7 @@ module('Integration | Component | auth | page ', function (hooks) {
'To finish signing in, you will need to complete an additional MFA step. Please wait... Back to login',
'renders okta number challenge on submit'
);
- await click(GENERAL.backButton);
+ await click('[data-test-back-button]');
assert.dom(AUTH_FORM.form).exists('renders auth form on return to login');
assert.dom(GENERAL.selectByAttr('auth-method')).hasValue('okta', 'preserves method type on back');
});
diff --git a/ui/tests/integration/components/json-editor-test.js b/ui/tests/integration/components/json-editor-test.js
index 56f4a674f8d9..100b7d8ef9b8 100644
--- a/ui/tests/integration/components/json-editor-test.js
+++ b/ui/tests/integration/components/json-editor-test.js
@@ -11,7 +11,6 @@ import hbs from 'htmlbars-inline-precompile';
import jsonEditor from '../../pages/components/json-editor';
import sinon from 'sinon';
import { setRunOptions } from 'ember-a11y-testing/test-support';
-import { createLongJson } from 'vault/tests/helpers/secret-engine/secret-engine-helpers';
const component = create(jsonEditor);
@@ -30,7 +29,6 @@ module('Integration | Component | json-editor', function (hooks) {
this.set('onFocusOut', sinon.spy());
this.set('json_blob', JSON_BLOB);
this.set('bad_json_blob', BAD_JSON_BLOB);
- this.set('long_json', JSON.stringify(createLongJson(), null, `\t`));
this.set('hashi-read-only-theme', 'hashi-read-only auto-height');
setRunOptions({
rules: {
@@ -38,8 +36,6 @@ module('Integration | Component | json-editor', function (hooks) {
label: { enabled: false },
// TODO: investigate and fix Codemirror styling
'color-contrast': { enabled: false },
- // failing on .CodeMirror-scroll
- 'scrollable-region-focusable': { enabled: false },
},
});
});
@@ -133,31 +129,4 @@ module('Integration | Component | json-editor', function (hooks) {
'even after hitting enter the value is still set correctly'
);
});
-
- test('no viewportMargin renders only default 10 lines of data on the DOM', async function (assert) {
- await render(hbs`
-
- `);
- assert
- .dom('.CodeMirror-code')
- .doesNotIncludeText('key-9', 'Without viewportMargin, user cannot search for key-9');
- });
-
- test('when viewportMargin is set user is able to search a long secret', async function (assert) {
- await render(hbs`
-
- `);
- assert
- .dom('.CodeMirror-code')
- .containsText('key-9', 'With viewportMargin set, user can search for key-9');
- });
});
diff --git a/ui/tests/integration/components/kv/kv-data-fields-test.js b/ui/tests/integration/components/kv/kv-data-fields-test.js
index ac3d5ed8b606..e9e18d99f368 100644
--- a/ui/tests/integration/components/kv/kv-data-fields-test.js
+++ b/ui/tests/integration/components/kv/kv-data-fields-test.js
@@ -11,9 +11,6 @@ import { hbs } from 'ember-cli-htmlbars';
import { fillIn, render, click } from '@ember/test-helpers';
import codemirror from 'vault/tests/helpers/codemirror';
import { PAGE, FORM } from 'vault/tests/helpers/kv/kv-selectors';
-import { GENERAL } from 'vault/tests/helpers/general-selectors';
-import { setRunOptions } from 'ember-a11y-testing/test-support';
-import { createLongJson } from 'vault/tests/helpers/secret-engine/secret-engine-helpers';
module('Integration | Component | kv-v2 | KvDataFields', function (hooks) {
setupRenderingTest(hooks);
@@ -25,12 +22,6 @@ module('Integration | Component | kv-v2 | KvDataFields', function (hooks) {
this.backend = 'my-kv-engine';
this.path = 'my-secret';
this.secret = this.store.createRecord('kv/data', { backend: this.backend });
- setRunOptions({
- rules: {
- // failing on .CodeMirror-scroll
- 'scrollable-region-focusable': { enabled: false },
- },
- });
});
test('it updates the secret model', async function (assert) {
@@ -97,7 +88,7 @@ module('Integration | Component | kv-v2 | KvDataFields', function (hooks) {
assert.dom(PAGE.infoRowValue('foo')).hasText('bar', 'secret value shows after toggle');
});
- test('it shows hds codeblock when viewing secret details of complex secret', async function (assert) {
+ test('it shows readonly json editor when viewing secret details of complex secret', async function (assert) {
this.secret.secretData = {
foo: {
bar: 'baz',
@@ -109,24 +100,7 @@ module('Integration | Component | kv-v2 | KvDataFields', function (hooks) {
owner: this.engine,
});
assert.dom(PAGE.infoRowValue('foo')).doesNotExist('does not render rows of secret data');
- assert.dom(GENERAL.codeBlock('secret-data')).exists('hds codeBlock exists');
- assert
- .dom(GENERAL.codeBlock('secret-data'))
- .hasText(`Version data { "foo": { "bar": "baz" } } `, 'Json data is displayed');
- });
-
- test('it defaults to a viewportMargin 10 when there is no secret data', async function (assert) {
- await render(hbs``, { owner: this.engine });
- assert.strictEqual(codemirror().options.viewportMargin, 10, 'viewportMargin defaults to 10');
- });
-
- test('it calculates viewportMargin based on secret size', async function (assert) {
- this.secret.secretData = createLongJson(100);
- await render(hbs``, { owner: this.engine });
- assert.strictEqual(
- codemirror().options.viewportMargin,
- 100,
- 'viewportMargin is set to 100 matching the height of the json'
- );
+ assert.dom('[data-test-component="code-mirror-modifier"]').hasClass('readonly-codemirror');
+ assert.dom('[data-test-component="code-mirror-modifier"]').includesText(`{ "foo": { "bar": "baz" }}`);
});
});
diff --git a/ui/tests/integration/components/kv/kv-patch/editor/form-test.js b/ui/tests/integration/components/kv/kv-patch/editor/form-test.js
index 004e8a29dad7..4096feb945be 100644
--- a/ui/tests/integration/components/kv/kv-patch/editor/form-test.js
+++ b/ui/tests/integration/components/kv/kv-patch/editor/form-test.js
@@ -11,7 +11,7 @@ import { hbs } from 'ember-cli-htmlbars';
import sinon from 'sinon';
import { GENERAL } from 'vault/tests/helpers/general-selectors';
import { FORM } from 'vault/tests/helpers/kv/kv-selectors';
-import { NON_STRING_WARNING, WHITESPACE_WARNING } from 'vault/utils/model-helpers/validators';
+import { NON_STRING_WARNING, WHITESPACE_WARNING } from 'vault/utils/validators';
module('Integration | Component | kv | kv-patch/editor/form', function (hooks) {
setupRenderingTest(hooks);
@@ -96,14 +96,14 @@ module('Integration | Component | kv | kv-patch/editor/form', function (hooks) {
await this.renderComponent();
assert.dom(GENERAL.toggleInput('Reveal subkeys')).isNotChecked('toggle is initially unchecked');
- assert.dom(GENERAL.codeBlock('subkeys')).doesNotExist();
+ assert.dom('[data-test-subkeys]').doesNotExist();
await click(GENERAL.toggleInput('Reveal subkeys'));
assert.dom(GENERAL.toggleInput('Reveal subkeys')).isChecked();
- assert.dom(GENERAL.codeBlock('subkeys')).hasText(JSON.stringify(this.subkeys, null, 2));
+ assert.dom('[data-test-subkeys]').hasText(JSON.stringify(this.subkeys, null, 2));
await click(GENERAL.toggleInput('Reveal subkeys'));
assert.dom(GENERAL.toggleInput('Reveal subkeys')).isNotChecked();
- assert.dom(GENERAL.codeBlock('subkeys')).doesNotExist('unchecking re-hides subkeys');
+ assert.dom('[data-test-subkeys]').doesNotExist('unchecking re-hides subkeys');
});
test('it enables and disables inputs', async function (assert) {
diff --git a/ui/tests/integration/components/kv/kv-patch/json-form-test.js b/ui/tests/integration/components/kv/kv-patch/json-form-test.js
index 37703e2bd985..166ea8d2fa7a 100644
--- a/ui/tests/integration/components/kv/kv-patch/json-form-test.js
+++ b/ui/tests/integration/components/kv/kv-patch/json-form-test.js
@@ -59,14 +59,14 @@ module('Integration | Component | kv | kv-patch/editor/json-form', function (hoo
await this.renderComponent();
assert.dom(GENERAL.toggleInput('Reveal subkeys')).isNotChecked('toggle is initially unchecked');
- assert.dom(GENERAL.codeBlock('subkeys')).doesNotExist();
+ assert.dom('[data-test-subkeys]').doesNotExist();
await click(GENERAL.toggleInput('Reveal subkeys'));
assert.dom(GENERAL.toggleInput('Reveal subkeys')).isChecked();
- assert.dom(GENERAL.codeBlock('subkeys')).hasText(JSON.stringify(this.subkeys, null, 2));
+ assert.dom('[data-test-subkeys]').hasText(JSON.stringify(this.subkeys, null, 2));
await click(GENERAL.toggleInput('Reveal subkeys'));
assert.dom(GENERAL.toggleInput('Reveal subkeys')).isNotChecked();
- assert.dom(GENERAL.codeBlock('subkeys')).doesNotExist('unchecking re-hides subkeys');
+ assert.dom('[data-test-subkeys]').doesNotExist('unchecking re-hides subkeys');
});
test('it renders linting errors', async function (assert) {
diff --git a/ui/tests/integration/components/kv/page/kv-page-list-test.js b/ui/tests/integration/components/kv/page/kv-page-list-test.js
index 0eddc52e352e..e4828d58e374 100644
--- a/ui/tests/integration/components/kv/page/kv-page-list-test.js
+++ b/ui/tests/integration/components/kv/page/kv-page-list-test.js
@@ -70,7 +70,6 @@ module('Integration | Component | kv | Page::List', function (hooks) {
@failedDirectoryQuery={{this.failedDirectoryQuery}}
@breadcrumbs={{this.breadcrumbs}}
@meta={{this.model.meta}}
- @currentRouteParams={{array this.backend}}
/>`,
{
owner: this.engine,
diff --git a/ui/tests/integration/components/kv/page/kv-page-secret-details-test.js b/ui/tests/integration/components/kv/page/kv-page-secret-details-test.js
index 298d1fd330a3..430ff3765f08 100644
--- a/ui/tests/integration/components/kv/page/kv-page-secret-details-test.js
+++ b/ui/tests/integration/components/kv/page/kv-page-secret-details-test.js
@@ -7,14 +7,13 @@ import { module, test } from 'qunit';
import { setupRenderingTest } from 'ember-qunit';
import { setupEngine } from 'ember-engines/test-support';
import { setupMirage } from 'ember-cli-mirage/test-support';
-import { click, render } from '@ember/test-helpers';
+import { click, find, render } from '@ember/test-helpers';
import { hbs } from 'ember-cli-htmlbars';
import { kvDataPath } from 'vault/utils/kv-path';
-import { FORM, PAGE } from 'vault/tests/helpers/kv/kv-selectors';
+import { FORM, PAGE, parseJsonEditor } from 'vault/tests/helpers/kv/kv-selectors';
import { syncStatusResponse } from 'vault/mirage/handlers/sync';
import { encodePath } from 'vault/utils/path-encoding-helpers';
import { baseSetup } from 'vault/tests/helpers/kv/kv-run-commands';
-import { GENERAL } from 'vault/tests/helpers/general-selectors';
module('Integration | Component | kv-v2 | Page::Secret::Details', function (hooks) {
setupRenderingTest(hooks);
@@ -127,24 +126,19 @@ module('Integration | Component | kv-v2 | Page::Secret::Details', function (hook
await click(FORM.toggleMasked);
assert.dom(PAGE.infoRowValue('foo')).hasText('bar', 'renders secret value');
await click(FORM.toggleJson);
- assert.dom(GENERAL.codeBlock('secret-data')).hasText(
- `Version data {
- "foo": "bar"
-}`,
- 'json editor renders secret data'
- );
+ assert.propEqual(parseJsonEditor(find), this.secretData, 'json editor renders secret data');
assert
.dom(PAGE.detail.versionTimestamp)
.includesText(`Version ${this.version} created`, 'renders version and time created');
});
- test('it renders hds codeblock view when secret is complex', async function (assert) {
+ test('it renders json view when secret is complex', async function (assert) {
assert.expect(4);
await this.renderComponent(this.modelComplex);
assert.dom(PAGE.infoRowValue('foo')).doesNotExist('does not render rows of secret data');
assert.dom(FORM.toggleJson).isChecked();
assert.dom(FORM.toggleJson).isNotDisabled();
- assert.dom(GENERAL.codeBlock('secret-data')).exists('hds codeBlock exists');
+ assert.dom('[data-test-component="code-mirror-modifier"]').exists('shows json editor');
});
test('it renders deleted empty state', async function (assert) {
diff --git a/ui/tests/integration/components/ldap/page/library/details/accounts-test.js b/ui/tests/integration/components/ldap/page/library/details/accounts-test.js
index c7f7aa6ebbeb..94b0048cc512 100644
--- a/ui/tests/integration/components/ldap/page/library/details/accounts-test.js
+++ b/ui/tests/integration/components/ldap/page/library/details/accounts-test.js
@@ -11,7 +11,6 @@ import { render, click, fillIn } from '@ember/test-helpers';
import hbs from 'htmlbars-inline-precompile';
import { allowAllCapabilitiesStub } from 'vault/tests/helpers/stubs';
import sinon from 'sinon';
-import { GENERAL } from 'vault/tests/helpers/general-selectors';
module('Integration | Component | ldap | Page::Library::Details::Accounts', function (hooks) {
setupRenderingTest(hooks);
@@ -77,10 +76,7 @@ module('Integration | Component | ldap | Page::Library::Details::Accounts', func
assert.dom('[data-test-checked-out-card]').exists('Accounts checked out card renders');
assert
- .dom(`${GENERAL.codeBlock('accounts')} code`)
- .hasText(
- 'vault lease renew ldap-test/library/test-library/check-out/:lease_id',
- 'Renew cli command renders with backend path'
- );
+ .dom('[data-test-accounts-code-block] code')
+ .hasText('vault lease renew ad/library/test-library/check-out/:lease_id', 'Renew cli command renders');
});
});
diff --git a/ui/tests/integration/components/ldap/page/roles-test.js b/ui/tests/integration/components/ldap/page/roles-test.js
index b51c16fccc61..d29a7e7f416d 100644
--- a/ui/tests/integration/components/ldap/page/roles-test.js
+++ b/ui/tests/integration/components/ldap/page/roles-test.js
@@ -113,9 +113,7 @@ module('Integration | Component | ldap | Page::Roles', function (hooks) {
assert.dom('[data-test-delete]').hasText('Delete', 'Details link renders in menu');
await click('[data-test-popup-menu-trigger]:last-of-type');
- assert
- .dom('[data-test-popup-menu-trigger]:last-of-type [data-test-rotate-creds]')
- .doesNotExist('Rotate credentials link is hidden for dynamic type');
+ assert.dom('[data-test-rotate-creds]').doesNotExist('Rotate credentials link is hidden for dynamic type');
});
test('it should filter roles', async function (assert) {
diff --git a/ui/tests/integration/components/mount-backend-form-test.js b/ui/tests/integration/components/mount-backend-form-test.js
index 5579d36ea599..2f4dff2df3b1 100644
--- a/ui/tests/integration/components/mount-backend-form-test.js
+++ b/ui/tests/integration/components/mount-backend-form-test.js
@@ -6,17 +6,19 @@
import { later, _cancelTimers as cancelTimers } from '@ember/runloop';
import { module, test } from 'qunit';
import { setupRenderingTest } from 'ember-qunit';
-import { render, settled, click, typeIn, fillIn } from '@ember/test-helpers';
+import { render, settled, click, typeIn } from '@ember/test-helpers';
import { setupMirage } from 'ember-cli-mirage/test-support';
import { allowAllCapabilitiesStub, noopStub } from 'vault/tests/helpers/stubs';
import { GENERAL } from 'vault/tests/helpers/general-selectors';
-import { MOUNT_BACKEND_FORM } from 'vault/tests/helpers/components/mount-backend-form-selectors';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
-import { methods } from 'vault/helpers/mountable-auth-methods';
-import { mountableEngines, WIF_ENGINES } from 'vault/helpers/mountable-secret-engines';
import hbs from 'htmlbars-inline-precompile';
+
+import { create } from 'ember-cli-page-object';
+import mountBackendForm from '../../pages/components/mount-backend-form';
+
import sinon from 'sinon';
+const component = create(mountBackendForm);
+
module('Integration | Component | mount backend form', function (hooks) {
setupRenderingTest(hooks);
setupMirage(hooks);
@@ -43,56 +45,51 @@ module('Integration | Component | mount backend form', function (hooks) {
});
test('it renders default state', async function (assert) {
- assert.expect(15);
await render(
hbs``
);
- assert
- .dom(GENERAL.title)
- .hasText('Enable an Authentication Method', 'renders auth header in default state');
-
- for (const method of methods()) {
- assert
- .dom(MOUNT_BACKEND_FORM.mountType(method.type))
- .hasText(method.displayName, `renders type:${method.displayName} picker`);
- }
+ assert.strictEqual(
+ component.header,
+ 'Enable an Authentication Method',
+ 'renders auth header in default state'
+ );
+ assert.ok(component.types.length > 0, 'renders type picker');
});
test('it changes path when type is changed', async function (assert) {
await render(
hbs``
);
-
- await click(MOUNT_BACKEND_FORM.mountType('aws'));
- assert.dom(GENERAL.inputByAttr('path')).hasValue('aws', 'sets the value of the type');
- await click(GENERAL.backButton);
- await click(MOUNT_BACKEND_FORM.mountType('approle'));
- assert.dom(GENERAL.inputByAttr('path')).hasValue('approle', 'updates the value of the type');
+ await component.selectType('aws');
+ assert.strictEqual(component.pathValue, 'aws', 'sets the value of the type');
+ await component.back();
+ await component.selectType('approle');
+ assert.strictEqual(component.pathValue, 'approle', 'updates the value of the type');
});
test('it keeps path value if the user has changed it', async function (assert) {
await render(
hbs``
);
- await click(MOUNT_BACKEND_FORM.mountType('approle'));
+ await component.selectType('approle');
assert.strictEqual(this.model.type, 'approle', 'Updates type on model');
- assert.dom(GENERAL.inputByAttr('path')).hasValue('approle', 'defaults to approle (first in the list)');
- await fillIn(GENERAL.inputByAttr('path'), 'newpath');
+ assert.strictEqual(component.pathValue, 'approle', 'defaults to approle (first in the list)');
+ await component.path('newpath');
assert.strictEqual(this.model.path, 'newpath', 'Updates path on model');
- await click(GENERAL.backButton);
+ await component.back();
assert.strictEqual(this.model.type, '', 'Clears type on back');
assert.strictEqual(this.model.path, 'newpath', 'Path is still newPath');
- await click(MOUNT_BACKEND_FORM.mountType('aws'));
+ await component.selectType('aws');
assert.strictEqual(this.model.type, 'aws', 'Updates type on model');
- assert.dom(GENERAL.inputByAttr('path')).hasValue('newpath', 'keeps custom path value');
+ assert.strictEqual(component.pathValue, 'newpath', 'keeps custom path value');
});
test('it does not show a selected token type when first mounting an auth method', async function (assert) {
await render(
hbs``
);
- await click(MOUNT_BACKEND_FORM.mountType('github'));
- await click(GENERAL.toggleGroup('Method Options'));
+ await component.selectType('github');
+ await component.toggleOptions();
assert
.dom('[data-test-input="config.tokenType"]')
.hasValue('', 'token type does not have a default value.');
@@ -116,7 +113,7 @@ module('Integration | Component | mount backend form', function (hooks) {
await render(
hbs``
);
- await mountBackend('approle', 'foo');
+ await component.mount('approle', 'foo');
later(() => cancelTimers(), 50);
await settled();
@@ -134,45 +131,40 @@ module('Integration | Component | mount backend form', function (hooks) {
this.model.set('config', this.store.createRecord('mount-config'));
});
- test('it renders secret engine specific headers', async function (assert) {
- assert.expect(17);
+ test('it renders secret specific headers', async function (assert) {
await render(
hbs``
);
- assert.dom(GENERAL.title).hasText('Enable a Secrets Engine', 'renders secrets header');
- for (const method of mountableEngines()) {
- assert
- .dom(MOUNT_BACKEND_FORM.mountType(method.type))
- .hasText(method.displayName, `renders type:${method.displayName} picker`);
- }
+ assert.strictEqual(component.header, 'Enable a Secrets Engine', 'renders secrets header');
+ assert.ok(component.types.length > 0, 'renders type picker');
});
test('it changes path when type is changed', async function (assert) {
await render(
hbs``
);
- await click(MOUNT_BACKEND_FORM.mountType('azure'));
- assert.dom(GENERAL.inputByAttr('path')).hasValue('azure', 'sets the value of the type');
- await click(GENERAL.backButton);
- await click(MOUNT_BACKEND_FORM.mountType('nomad'));
- assert.dom(GENERAL.inputByAttr('path')).hasValue('nomad', 'updates the value of the type');
+ await component.selectType('azure');
+ assert.strictEqual(component.pathValue, 'azure', 'sets the value of the type');
+ await component.back();
+ await component.selectType('nomad');
+ assert.strictEqual(component.pathValue, 'nomad', 'updates the value of the type');
});
test('it keeps path value if the user has changed it', async function (assert) {
await render(
hbs``
);
- await click(MOUNT_BACKEND_FORM.mountType('kv'));
+ await component.selectType('kv');
assert.strictEqual(this.model.type, 'kv', 'Updates type on model');
- assert.dom(GENERAL.inputByAttr('path')).hasValue('kv', 'path matches mount type');
- await fillIn(GENERAL.inputByAttr('path'), 'newpath');
+ assert.strictEqual(component.pathValue, 'kv', 'path matches mount type');
+ await component.path('newpath');
assert.strictEqual(this.model.path, 'newpath', 'Updates path on model');
- await click(GENERAL.backButton);
+ await component.back();
assert.strictEqual(this.model.type, '', 'Clears type on back');
assert.strictEqual(this.model.path, 'newpath', 'path is still newpath');
- await click(MOUNT_BACKEND_FORM.mountType('ssh'));
+ await component.selectType('ssh');
assert.strictEqual(this.model.type, 'ssh', 'Updates type on model');
- assert.dom(GENERAL.inputByAttr('path')).hasValue('newpath', 'path stays the same');
+ assert.strictEqual(component.pathValue, 'newpath', 'path stays the same');
});
test('it calls mount success', async function (assert) {
@@ -189,7 +181,7 @@ module('Integration | Component | mount backend form', function (hooks) {
hbs``
);
- await mountBackend('ssh', 'foo');
+ await component.mount('ssh', 'foo');
later(() => cancelTimers(), 50);
await settled();
@@ -201,50 +193,43 @@ module('Integration | Component | mount backend form', function (hooks) {
});
module('WIF secret engines', function () {
- test('it shows identityTokenKey when type is a WIF engine and hides when its not', async function (assert) {
+ test('it shows identityTokenKey when type is aws and hides when its not', async function (assert) {
await render(
hbs``
);
- for (const engine of WIF_ENGINES) {
- await click(MOUNT_BACKEND_FORM.mountType(engine));
- await click(GENERAL.toggleGroup('Method Options'));
- assert
- .dom(GENERAL.fieldByAttr('identityTokenKey'))
- .exists(`Identity token key field shows when type=${this.model.type}`);
- await click(GENERAL.backButton);
- }
- for (const engine of mountableEngines().filter((e) => !WIF_ENGINES.includes(e.type))) {
- // check non-wif engine
- await click(MOUNT_BACKEND_FORM.mountType(engine.type));
- await click(GENERAL.toggleGroup('Method Options'));
- assert
- .dom(GENERAL.fieldByAttr('identityTokenKey'))
- .doesNotExist(`Identity token key field hidden when type=${this.model.type}`);
- await click(GENERAL.backButton);
- }
+ await component.selectType('ldap');
+
+ await click(GENERAL.toggleGroup('Method Options'));
+ assert
+ .dom(GENERAL.fieldByAttr('identityTokenKey'))
+ .doesNotExist(`Identity token key field hidden when type=${this.model.type}`);
+
+ await component.back();
+ await component.selectType('aws');
+ await click(GENERAL.toggleGroup('Method Options'));
+ assert
+ .dom(GENERAL.fieldByAttr('identityTokenKey'))
+ .exists(`Identity token key field shows when type=${this.model.type}`);
});
test('it updates identityTokeKey if user has changed it', async function (assert) {
await render(
hbs``
);
+ await component.selectType('aws');
assert.strictEqual(
this.model.config.identityTokenKey,
undefined,
- `On init identityTokenKey is not set on the model`
+ 'On init identityTokenKey is not set on the model'
+ );
+
+ await click(GENERAL.toggleGroup('Method Options'));
+ await typeIn(GENERAL.inputSearch('key'), 'default');
+ assert.strictEqual(
+ this.model.config.identityTokenKey,
+ 'default',
+ 'updates model with default identityTokenKey'
);
- for (const engine of WIF_ENGINES) {
- await click(MOUNT_BACKEND_FORM.mountType(engine));
- await click(GENERAL.toggleGroup('Method Options'));
- await typeIn(GENERAL.inputSearch('key'), `${engine}+specialKey`); // set to something else
-
- assert.strictEqual(
- this.model.config.identityTokenKey,
- `${engine}+specialKey`,
- `updates ${engine} model with custom identityTokenKey`
- );
- await click(GENERAL.backButton);
- }
});
});
});
diff --git a/ui/tests/integration/components/mount-backend/type-form-test.js b/ui/tests/integration/components/mount-backend/type-form-test.js
index e66c6304a2a9..2c988fb1f192 100644
--- a/ui/tests/integration/components/mount-backend/type-form-test.js
+++ b/ui/tests/integration/components/mount-backend/type-form-test.js
@@ -11,7 +11,6 @@ import sinon from 'sinon';
import { allEngines, mountableEngines } from 'vault/helpers/mountable-secret-engines';
import { allMethods, methods } from 'vault/helpers/mountable-auth-methods';
import { setRunOptions } from 'ember-a11y-testing/test-support';
-import { MOUNT_BACKEND_FORM } from 'vault/tests/helpers/components/mount-backend-form-selectors';
const secretTypes = mountableEngines().map((engine) => engine.type);
const allSecretTypes = allEngines().map((engine) => engine.type);
@@ -26,28 +25,26 @@ module('Integration | Component | mount-backend/type-form', function (hooks) {
});
test('it calls secrets setMountType when type is selected', async function (assert) {
- assert.expect(secretTypes.length + 1, 'renders all mountable engines plus calls a spy');
const spy = sinon.spy();
this.set('setType', spy);
await render(hbs``);
- for (const type of secretTypes) {
- assert.dom(MOUNT_BACKEND_FORM.mountType(type)).exists(`Renders ${type} mountable secret engine`);
- }
- await click(MOUNT_BACKEND_FORM.mountType('ssh'));
+ assert
+ .dom('[data-test-mount-type]')
+ .exists({ count: secretTypes.length }, 'Renders all mountable engines');
+ await click(`[data-test-mount-type="ssh"]`);
assert.ok(spy.calledOnceWith('ssh'));
});
test('it calls auth setMountType when type is selected', async function (assert) {
- assert.expect(authTypes.length + 1, 'renders all mountable auth methods plus calls a spy');
const spy = sinon.spy();
this.set('setType', spy);
await render(hbs``);
- for (const type of authTypes) {
- assert.dom(MOUNT_BACKEND_FORM.mountType(type)).exists(`Renders ${type} mountable auth engine`);
- }
- await click(MOUNT_BACKEND_FORM.mountType('okta'));
+ assert
+ .dom('[data-test-mount-type]')
+ .exists({ count: authTypes.length }, 'Renders all mountable auth methods');
+ await click(`[data-test-mount-type="okta"]`);
assert.ok(spy.calledOnceWith('okta'));
});
@@ -58,7 +55,6 @@ module('Integration | Component | mount-backend/type-form', function (hooks) {
});
test('it renders correct items for enterprise secrets', async function (assert) {
- assert.expect(allSecretTypes.length, 'renders all enterprise secret engines');
setRunOptions({
rules: {
// TODO: Fix disabled enterprise options with enterprise badge
@@ -66,17 +62,14 @@ module('Integration | Component | mount-backend/type-form', function (hooks) {
},
});
await render(hbs``);
- for (const type of allSecretTypes) {
- assert.dom(MOUNT_BACKEND_FORM.mountType(type)).exists(`Renders ${type} secret engine`);
- }
+ assert
+ .dom('[data-test-mount-type]')
+ .exists({ count: allSecretTypes.length }, 'Renders all secret engines');
});
test('it renders correct items for enterprise auth methods', async function (assert) {
- assert.expect(allAuthTypes.length, 'renders all enterprise auth engines');
await render(hbs``);
- for (const type of allAuthTypes) {
- assert.dom(MOUNT_BACKEND_FORM.mountType(type)).exists(`Renders ${type} auth engine`);
- }
+ assert.dom('[data-test-mount-type]').exists({ count: allAuthTypes.length }, 'Renders all auth methods');
});
});
});
diff --git a/ui/tests/integration/components/okta-number-challenge-test.js b/ui/tests/integration/components/okta-number-challenge-test.js
index a117a9dd13c5..2c57009258e1 100644
--- a/ui/tests/integration/components/okta-number-challenge-test.js
+++ b/ui/tests/integration/components/okta-number-challenge-test.js
@@ -8,7 +8,6 @@ import { setupRenderingTest } from 'ember-qunit';
import { render, click } from '@ember/test-helpers';
import { hbs } from 'ember-cli-htmlbars';
import sinon from 'sinon';
-import { GENERAL } from 'vault/tests/helpers/general-selectors';
module('Integration | Component | auth | okta-number-challenge', function (hooks) {
setupRenderingTest(hooks);
@@ -72,7 +71,7 @@ module('Integration | Component | auth | okta-number-challenge', function (hooks
'Correct description renders'
);
assert.dom('[data-test-message-error]').hasText(`Error ${this.hasError}`);
- await click(GENERAL.backButton);
+ await click('[data-test-back-button]');
assert.true(this.onCancel.calledOnce, 'onCancel is called');
});
});
diff --git a/ui/tests/integration/components/page/mode-index-test.js b/ui/tests/integration/components/page/mode-index-test.js
index 9af014881efb..506fc725cd12 100644
--- a/ui/tests/integration/components/page/mode-index-test.js
+++ b/ui/tests/integration/components/page/mode-index-test.js
@@ -24,7 +24,7 @@ module('Integration | Component | replication page/mode-index', function (hooks)
hooks.beforeEach(function () {
this.store = this.owner.lookup('service:store');
this.onEnable = () => {};
- this.clusterModel = { replicationAttrs: {} };
+ this.clusterModel = {};
this.replicationMode = '';
this.replicationDisabled = true;
diff --git a/ui/tests/integration/components/replication-overview-mode-test.js b/ui/tests/integration/components/replication-overview-mode-test.js
deleted file mode 100644
index dc8729dc6dd6..000000000000
--- a/ui/tests/integration/components/replication-overview-mode-test.js
+++ /dev/null
@@ -1,124 +0,0 @@
-/**
- * Copyright (c) HashiCorp, Inc.
- * SPDX-License-Identifier: BUSL-1.1
- */
-
-import { module, test } from 'qunit';
-import { setupRenderingTest } from 'vault/tests/helpers';
-import { render, settled } from '@ember/test-helpers';
-import { hbs } from 'ember-cli-htmlbars';
-import { setupEngine } from 'ember-engines/test-support';
-
-const OVERVIEW_MODE = {
- title: '[data-test-overview-mode-title]',
- body: '[data-test-overview-mode-body]',
- detailsLink: '[data-test-replication-details-link]',
-};
-module('Integration | Component | replication-overview-mode', function (hooks) {
- setupRenderingTest(hooks);
- setupEngine(hooks, 'replication');
-
- hooks.beforeEach(function () {
- this.versionService = this.owner.lookup('service:version');
- this.versionService.features = [];
- this.mode = 'dr';
- this.clusterName = 'foobar';
- this.modeDetails = { mode: 'disabled' };
-
- this.renderComponent = async () => {
- return render(
- hbs`
- `,
- { owner: this.engine }
- );
- };
- });
-
- test('without features', async function (assert) {
- await this.renderComponent();
- assert.dom(OVERVIEW_MODE.title).hasText('Disaster Recovery (DR)');
- assert
- .dom(OVERVIEW_MODE.body)
- .includesText('Disaster Recovery is a feature of Vault Enterprise Premium. Upgrade');
- assert.dom(OVERVIEW_MODE.detailsLink).doesNotExist('does not show link to replication (dr)');
-
- this.set('mode', 'performance');
- await settled();
- assert.dom(OVERVIEW_MODE.title).hasText('Performance');
- assert
- .dom(OVERVIEW_MODE.body)
- .includesText('Performance Replication is a feature of Vault Enterprise Premium. Upgrade');
- assert.dom(OVERVIEW_MODE.detailsLink).doesNotExist('does not show link to replication (perf)');
- });
-
- module('with features', function (hooks) {
- hooks.beforeEach(function () {
- this.versionService.features = ['DR Replication', 'Performance Replication'];
- });
-
- test('it renders when replication disabled', async function (assert) {
- await this.renderComponent();
- assert.dom(OVERVIEW_MODE.title).hasText('Disaster Recovery (DR)');
- assert
- .dom(OVERVIEW_MODE.body)
- .hasText(
- 'Disaster Recovery Replication is designed to protect against catastrophic failure of entire clusters. Secondaries do not forward service requests until they are elected and become a new primary.'
- );
- assert.dom(OVERVIEW_MODE.detailsLink).hasText('Enable');
-
- this.set('mode', 'performance');
- await settled();
- assert.dom(OVERVIEW_MODE.title).hasText('Performance');
- assert
- .dom(OVERVIEW_MODE.body)
- .hasText(
- 'Performance Replication scales workloads horizontally across clusters to make requests faster. Local secondaries handle read requests but forward writes to the primary to be handled.'
- );
- assert.dom(OVERVIEW_MODE.detailsLink).hasText('Enable');
- });
-
- test('it renders when replication enabled', async function (assert) {
- this.mode = 'performance';
- this.modeDetails = {
- replicationEnabled: true,
- mode: 'primary',
- modeForUrl: 'primary',
- clusterIdDisplay: 'foobar12',
- };
- await this.renderComponent();
- assert.dom(OVERVIEW_MODE.title).hasText('Performance');
- assert
- .dom(OVERVIEW_MODE.body)
- .includesText('ENABLED Primary foobar12', 'renders mode type and cluster ID if passed');
- assert.dom(OVERVIEW_MODE.detailsLink).hasText('Details');
-
- this.set('modeDetails', {
- replicationEnabled: true,
- mode: 'secondary',
- modeForUrl: 'secondary',
- clusterIdDisplay: 'foobar12',
- secondaryId: 'some-secondary',
- });
- await settled();
- assert.dom(OVERVIEW_MODE.title).hasText('Performance');
- assert.dom(OVERVIEW_MODE.body).includesText('ENABLED Secondary some-secondary foobar12');
- assert.dom(OVERVIEW_MODE.detailsLink).hasText('Details');
- });
-
- test('it renders when replication bootstrapping', async function (assert) {
- this.modeDetails = {
- replicationEnabled: true,
- mode: 'bootstrapping',
- modeForUrl: 'bootstrapping',
- };
- await this.renderComponent();
- assert.dom(OVERVIEW_MODE.title).hasText('Disaster Recovery (DR)');
- assert.dom(OVERVIEW_MODE.body).includesText('ENABLED Bootstrapping');
- assert.dom(OVERVIEW_MODE.detailsLink).hasText('Details');
- });
- });
-});
diff --git a/ui/tests/integration/components/sync/secrets/destination-header-test.js b/ui/tests/integration/components/sync/secrets/destination-header-test.js
index 07e7ac0b1532..11c3c11ba787 100644
--- a/ui/tests/integration/components/sync/secrets/destination-header-test.js
+++ b/ui/tests/integration/components/sync/secrets/destination-header-test.js
@@ -47,7 +47,7 @@ module('Integration | Component | sync | Secrets::DestinationHeader', function (
assert.expect(3);
const transitionStub = sinon.stub(this.owner.lookup('service:router'), 'transitionTo');
- const clearDatasetStub = sinon.stub(this.owner.lookup('service:pagination'), 'clearDataset');
+ const clearDatasetStub = sinon.stub(this.store, 'clearDataset');
this.server.delete('/sys/sync/destinations/aws-sm/us-west-1', () => {
assert.ok(true, 'Request made to delete destination');
diff --git a/ui/tests/integration/components/sync/secrets/page/destinations-test.js b/ui/tests/integration/components/sync/secrets/page/destinations-test.js
index e070c2f1f53f..57daee409f2d 100644
--- a/ui/tests/integration/components/sync/secrets/page/destinations-test.js
+++ b/ui/tests/integration/components/sync/secrets/page/destinations-test.js
@@ -56,7 +56,7 @@ module('Integration | Component | sync | Page::Destinations', function (hooks) {
};
this.transitionStub = sinon.stub(this.owner.lookup('service:router'), 'transitionTo');
- this.clearDatasetStub = sinon.stub(this.owner.lookup('service:pagination'), 'clearDataset');
+ this.clearDatasetStub = sinon.stub(store, 'clearDataset');
});
test('it should render header and tabs', async function (assert) {
diff --git a/ui/tests/integration/components/sync/secrets/page/destinations/create-and-edit-test.js b/ui/tests/integration/components/sync/secrets/page/destinations/create-and-edit-test.js
index 6a62bc9737a3..3f4a32e5e47d 100644
--- a/ui/tests/integration/components/sync/secrets/page/destinations/create-and-edit-test.js
+++ b/ui/tests/integration/components/sync/secrets/page/destinations/create-and-edit-test.js
@@ -24,7 +24,7 @@ module('Integration | Component | sync | Secrets::Page::Destinations::CreateAndE
hooks.beforeEach(function () {
this.store = this.owner.lookup('service:store');
this.transitionStub = sinon.stub(this.owner.lookup('service:router'), 'transitionTo');
- this.clearDatasetStub = sinon.stub(this.owner.lookup('service:pagination'), 'clearDataset');
+ this.clearDatasetStub = sinon.stub(this.store, 'clearDataset');
this.renderFormComponent = () => {
return render(hbs` `, {
diff --git a/ui/tests/integration/components/sync/secrets/page/destinations/destination/sync-test.js b/ui/tests/integration/components/sync/secrets/page/destinations/destination/sync-test.js
index 88bab1e4bbb8..fdb4127f4403 100644
--- a/ui/tests/integration/components/sync/secrets/page/destinations/destination/sync-test.js
+++ b/ui/tests/integration/components/sync/secrets/page/destinations/destination/sync-test.js
@@ -140,7 +140,7 @@ module('Integration | Component | sync | Secrets::Page::Destinations::Destinatio
});
test('it should clear sync associations from store in willDestroy hook', async function (assert) {
- const clearDatasetStub = sinon.stub(this.owner.lookup('service:pagination'), 'clearDataset');
+ const clearDatasetStub = sinon.stub(this.store, 'clearDataset');
this.renderComponent = true;
await render(
diff --git a/ui/tests/integration/components/transit-edit-test.js b/ui/tests/integration/components/transit-edit-test.js
index 0d0751a9ef58..264ab160038d 100644
--- a/ui/tests/integration/components/transit-edit-test.js
+++ b/ui/tests/integration/components/transit-edit-test.js
@@ -7,8 +7,6 @@ import { module, test } from 'qunit';
import { setupRenderingTest } from 'vault/tests/helpers';
import { click, fillIn, render } from '@ember/test-helpers';
import { hbs } from 'ember-cli-htmlbars';
-import { setupMirage } from 'ember-cli-mirage/test-support';
-import { capabilitiesStub } from 'vault/tests/helpers/stubs';
const SELECTORS = {
createForm: '[data-test-transit-create-form]',
@@ -18,13 +16,9 @@ const SELECTORS = {
};
module('Integration | Component | transit-edit', function (hooks) {
setupRenderingTest(hooks);
- setupMirage(hooks);
hooks.beforeEach(function () {
this.store = this.owner.lookup('service:store');
- this.server.post('/sys/capabilities-self', () =>
- capabilitiesStub('transit-backend/keys/some-key', ['sudo'])
- );
this.model = this.store.createRecord('transit-key', { backend: 'transit-backend', id: 'some-key' });
this.backendCrumb = {
label: 'transit',
diff --git a/ui/tests/integration/components/transit-key-actions-test.js b/ui/tests/integration/components/transit-key-actions-test.js
index bd1d2c3b3d3d..f6208965c52a 100644
--- a/ui/tests/integration/components/transit-key-actions-test.js
+++ b/ui/tests/integration/components/transit-key-actions-test.js
@@ -95,50 +95,6 @@ module('Integration | Component | transit key actions', function (hooks) {
.exists({ count: 1 }, 'renders signature_algorithm field on verify with rsa key');
});
- test('it renders: padding_scheme field for rsa key types', async function (assert) {
- const supportedActions = ['datakey', 'decrypt', 'encrypt'];
- const supportedKeyTypes = ['rsa-2048', 'rsa-3072', 'rsa-4096'];
-
- for (const key of supportedKeyTypes) {
- this.set('key', {
- type: key,
- backend: 'transit',
- supportedActions,
- });
- for (const action of this.key.supportedActions) {
- this.selectedAction = action;
- await render(hbs`
- `);
- assert
- .dom('[data-test-padding-scheme]')
- .hasValue(
- 'oaep',
- `key type: ${key} renders padding_scheme field with default value for action: ${action}`
- );
- }
- }
- });
- test('it renders: decrypt_padding_scheme and encrypt_padding_scheme fields for rsa key types', async function (assert) {
- this.selectedAction = 'rewrap';
- const supportedKeyTypes = ['rsa-2048', 'rsa-3072', 'rsa-4096'];
- const SELECTOR = (type) => `[data-test-padding-scheme="${type}"]`;
- for (const key of supportedKeyTypes) {
- this.set('key', {
- type: key,
- backend: 'transit',
- supportedActions: [this.selectedAction],
- });
- await render(hbs`
- `);
- assert
- .dom(SELECTOR('encrypt'))
- .hasValue('oaep', `key type: ${key} renders ${SELECTOR('encrypt')} field with default value`);
- assert
- .dom(SELECTOR('decrypt'))
- .hasValue('oaep', `key type: ${key} renders ${SELECTOR('decrypt')} field with default value`);
- }
- });
-
async function doEncrypt(assert, actions = [], keyattrs = {}) {
const keyDefaults = { backend: 'transit', id: 'akey', supportedActions: ['encrypt'].concat(actions) };
diff --git a/ui/tests/integration/helpers/transition-to-test.js b/ui/tests/integration/helpers/transition-to-test.js
deleted file mode 100644
index 32b7e35ac65a..000000000000
--- a/ui/tests/integration/helpers/transition-to-test.js
+++ /dev/null
@@ -1,77 +0,0 @@
-/**
- * Copyright (c) HashiCorp, Inc.
- * SPDX-License-Identifier: BUSL-1.1
- */
-
-import { module, test } from 'qunit';
-import { setupRenderingTest } from 'vault/tests/helpers';
-import { setupEngine } from 'ember-engines/test-support';
-import { click, render } from '@ember/test-helpers';
-import { hbs } from 'ember-cli-htmlbars';
-import Sinon from 'sinon';
-
-module('Integration | Helper | transition-to', function (hooks) {
- setupRenderingTest(hooks);
- // using 'kv' here for testing, but this could be any Ember engine in the app
- // sets this.engine, which we use to set context for the component testing service:app-router
- setupEngine(hooks, 'kv');
-
- hooks.beforeEach(function () {
- this.router = this.owner.lookup('service:router');
- this.router.reopen({
- transitionTo: Sinon.stub(),
- transitionToExternal: Sinon.stub(),
- });
- });
-
- test('it does not call transition on render', async function (assert) {
- await render(hbs``);
-
- assert.true(this.router.transitionTo.notCalled, 'transitionTo not called on render');
- assert.true(this.router.transitionToExternal.notCalled, 'transitionToExternal not called on render');
- });
-
- test('it calls transitionTo correctly', async function (assert) {
- await render(
- hbs``
- );
- await click('[data-test-btn]');
-
- assert.true(this.router.transitionTo.calledOnce, 'transitionTo called once on click');
- assert.deepEqual(
- this.router.transitionTo.args[0],
- ['vault.cluster', 'foobar', 'baz'],
- 'transitionTo called with positional params'
- );
- assert.true(this.router.transitionToExternal.notCalled, 'transitionToExternal not called');
- });
-
- test('it calls transitionToExternal correctly', async function (assert) {
- await render(
- hbs``
- );
- await click('[data-test-btn]');
-
- assert.true(this.router.transitionToExternal.calledOnce, 'transitionToExternal called');
- assert.deepEqual(
- this.router.transitionToExternal.args[0],
- ['vault.cluster', 'foobar', 'baz'],
- 'transitionToExternal called with positional params'
- );
- assert.true(this.router.transitionTo.notCalled, 'transitionTo not called');
- });
-
- // This test is confusing (and admittedly not ideal) because stubbing routers gets strange,
- // but if you go into the TransitionTo class and console.log owner.lookup('service:router') in get router()
- // you'll see the getter returns 'service:app-router' (because of the context setup)
- // so although we're asserting this.router, the TransitionTo helper is using "service:app-router" under the hood.
- // This test passing, indirectly means the helper works as expected. Failures might be something like "global failure: TypeError: this.router is undefined"
- test('it uses service:app-router when base router undefined', async function (assert) {
- await render(
- hbs``,
- { owner: this.engine }
- );
- await click('[data-test-btn]');
- assert.true(this.router.transitionToExternal.calledOnce, 'transitionToExternal called');
- });
-});
diff --git a/ui/tests/pages/access/methods.js b/ui/tests/pages/access/methods.js
new file mode 100644
index 000000000000..67c4e03d7e50
--- /dev/null
+++ b/ui/tests/pages/access/methods.js
@@ -0,0 +1,24 @@
+/**
+ * Copyright (c) HashiCorp, Inc.
+ * SPDX-License-Identifier: BUSL-1.1
+ */
+
+import { create, attribute, visitable, collection, hasClass, text } from 'ember-cli-page-object';
+
+export default create({
+ visit: visitable('/vault/access/'),
+ methodsLink: {
+ isActive: hasClass('active'),
+ text: text(),
+ scope: '[data-test-sidebar-nav-link="Authentication Methods"]',
+ },
+
+ backendLinks: collection('[data-test-auth-backend-link]', {
+ path: text('[data-test-path]'),
+ id: attribute('data-test-id', '[data-test-path]'),
+ }),
+
+ findLinkById(id) {
+ return this.backendLinks.filterBy('id', id)[0];
+ },
+});
diff --git a/ui/tests/pages/components/mount-backend-form.js b/ui/tests/pages/components/mount-backend-form.js
new file mode 100644
index 000000000000..543a4b611be9
--- /dev/null
+++ b/ui/tests/pages/components/mount-backend-form.js
@@ -0,0 +1,33 @@
+/**
+ * Copyright (c) HashiCorp, Inc.
+ * SPDX-License-Identifier: BUSL-1.1
+ */
+
+import { clickable, collection, fillable, text, value, attribute } from 'ember-cli-page-object';
+import fields from './form-field';
+
+export default {
+ ...fields,
+ header: text('[data-test-mount-form-header]'),
+ submit: clickable('[data-test-mount-submit]'),
+ back: clickable('[data-test-mount-back]'),
+ path: fillable('[data-test-input="path"]'),
+ toggleOptions: clickable('[data-test-toggle-group="Method Options"]'),
+ pathValue: value('[data-test-input="path"]'),
+ types: collection('[data-test-mount-type]', {
+ select: clickable(),
+ id: attribute('id'),
+ }),
+ type: fillable('[name="mount-type"]'),
+ async selectType(type) {
+ return this.types.filterBy('id', type)[0].select();
+ },
+ async mount(type, path) {
+ await this.selectType(type);
+ if (path) {
+ await this.path(path).submit();
+ } else {
+ await this.submit();
+ }
+ },
+};
diff --git a/ui/tests/pages/secrets/backend/kmip/roles.js b/ui/tests/pages/secrets/backend/kmip/roles.js
index bc06acb4a584..aafaeba988a0 100644
--- a/ui/tests/pages/secrets/backend/kmip/roles.js
+++ b/ui/tests/pages/secrets/backend/kmip/roles.js
@@ -11,7 +11,7 @@ export default create({
visit: visitable('/vault/secrets/:backend/kmip/scopes/:scope/roles'),
visitDetail: visitable('/vault/secrets/:backend/kmip/scopes/:scope/roles/:role'),
create: clickable('[data-test-role-create]'),
- roleName: fillable('[data-test-input="role"]'),
+ roleName: fillable('[data-test-input="name"]'),
submit: clickable('[data-test-edit-form-submit]'),
detailEditLink: clickable('[data-test-kmip-link-edit-role]'),
cancelLink: clickable('[data-test-edit-form-cancel]'),
diff --git a/ui/tests/pages/secrets/backend/ssh/edit-role.js b/ui/tests/pages/secrets/backend/ssh/edit-role.js
index a5a58f205b77..3c7df3755569 100644
--- a/ui/tests/pages/secrets/backend/ssh/edit-role.js
+++ b/ui/tests/pages/secrets/backend/ssh/edit-role.js
@@ -10,6 +10,15 @@ export default create({
...Base,
visitEdit: visitable('/vault/secrets/:backend/edit/:id'),
visitEditRoot: visitable('/vault/secrets/:backend/edit'),
+ keyType: fillable('[data-test-input="keyType"]'),
+ defaultUser: fillable('[data-test-input="defaultUser"]'),
+ toggleMore: clickable('[data-test-toggle-group="Options"]'),
name: fillable('[data-test-input="name"]'),
+ CIDR: fillable('[data-test-input="cidrList"]'),
save: clickable('[data-test-role-ssh-create]'),
+
+ async createOTPRole(name) {
+ await this.name(name);
+ await this.toggleMore().keyType('otp').defaultUser('admin').CIDR('0.0.0.0/0').save();
+ },
});
diff --git a/ui/tests/pages/secrets/backend/ssh/generate-otp.js b/ui/tests/pages/secrets/backend/ssh/generate-otp.js
index f49afde06fb7..2e67e466e286 100644
--- a/ui/tests/pages/secrets/backend/ssh/generate-otp.js
+++ b/ui/tests/pages/secrets/backend/ssh/generate-otp.js
@@ -4,7 +4,7 @@
*/
import { Base } from '../credentials';
-import { value, create, fillable, isPresent } from 'ember-cli-page-object';
+import { clickable, value, create, fillable, isPresent } from 'ember-cli-page-object';
export default create({
...Base,
@@ -14,5 +14,9 @@ export default create({
ip: fillable('[data-test-input="ip"]'),
warningIsPresent: isPresent('[data-test-warning]'),
commonNameValue: value('[data-test-input="commonName"]'),
- generateOTP: async function () {},
+ submit: clickable('[data-test-save]'),
+ back: clickable('[data-test-back-button]'),
+ generateOTP: async function () {
+ await this.user('admin').ip('192.168.1.1').submit();
+ },
});
diff --git a/ui/tests/pages/settings/auth/enable.js b/ui/tests/pages/settings/auth/enable.js
index 13c309c1377f..34893885a31e 100644
--- a/ui/tests/pages/settings/auth/enable.js
+++ b/ui/tests/pages/settings/auth/enable.js
@@ -4,14 +4,15 @@
*/
import { create, visitable } from 'ember-cli-page-object';
+import backendForm from '../../components/mount-backend-form';
import flashMessages from '../../components/flash-message';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
export default create({
visit: visitable('/vault/settings/auth/enable'),
+ ...backendForm,
flash: flashMessages,
enable: async function (type, path) {
await this.visit();
- await mountBackend(type, path);
+ await this.mount(type, path);
},
});
diff --git a/ui/tests/pages/settings/mount-secret-backend.js b/ui/tests/pages/settings/mount-secret-backend.js
index ef7da0be9e77..6a4e58661caf 100644
--- a/ui/tests/pages/settings/mount-secret-backend.js
+++ b/ui/tests/pages/settings/mount-secret-backend.js
@@ -5,10 +5,11 @@
import { create, visitable, fillable, clickable } from 'ember-cli-page-object';
import { settled } from '@ember/test-helpers';
-import { mountBackend } from 'vault/tests/helpers/components/mount-backend-form-helpers';
+import mountForm from 'vault/tests/pages/components/mount-backend-form';
export default create({
visit: visitable('/vault/settings/mount-secret-backend'),
+ ...mountForm,
version: fillable('[data-test-input="version"]'),
setMaxVersion: fillable('[data-test-input="maxVersions"]'),
enableMaxTtl: clickable('[data-test-toggle-input="Max Lease TTL"]'),
@@ -22,7 +23,7 @@ export default create({
enable: async function (type, path) {
await this.visit();
await settled();
- await mountBackend(type, path);
+ await this.mount(type, path);
await settled();
},
});
diff --git a/ui/tests/unit/adapters/auth-method-test.js b/ui/tests/unit/adapters/auth-method-test.js
index c73f6cf0806f..52100261140c 100644
--- a/ui/tests/unit/adapters/auth-method-test.js
+++ b/ui/tests/unit/adapters/auth-method-test.js
@@ -54,7 +54,7 @@ module('Unit | Adapter | auth method', function (hooks) {
await this.store.findAll('auth-method', { adapterOptions: { unauthenticated: true } });
});
- test('query makes request to correct endpoint ', async function (assert) {
+ test('findAll makes request to correct endpoint when useMountsEndpoint is true', async function (assert) {
assert.expect(1);
this.server.get('sys/internal/ui/mounts', () => {
@@ -62,6 +62,6 @@ module('Unit | Adapter | auth method', function (hooks) {
return this.mockResponse;
});
- await this.store.query('auth-method', {});
+ await this.store.findAll('auth-method', { adapterOptions: { useMountsEndpoint: true } });
});
});
diff --git a/ui/tests/unit/adapters/kmip/role-test.js b/ui/tests/unit/adapters/kmip/role-test.js
index 36dcf9a883de..a9d20d900459 100644
--- a/ui/tests/unit/adapters/kmip/role-test.js
+++ b/ui/tests/unit/adapters/kmip/role-test.js
@@ -9,8 +9,6 @@ import { setupTest } from 'ember-qunit';
module('Unit | Adapter | kmip/role', function (hooks) {
setupTest(hooks);
- // these are only some of the actual editable fields
- const editableFields = ['tlsTtl', 'operationAll', 'operationNone', 'operationGet', 'operationCreate'];
const serializeTests = [
[
'operation_all is the only operation item present after serialization',
@@ -19,7 +17,7 @@ module('Unit | Adapter | kmip/role', function (hooks) {
return { operation_all: true, operation_get: true, operation_create: true, tls_ttl: '10s' };
},
record: {
- editableFields,
+ nonOperationFields: ['tlsTtl'],
},
},
{
@@ -34,7 +32,7 @@ module('Unit | Adapter | kmip/role', function (hooks) {
return { operation_all: true, operation_get: true, operation_create: true };
},
record: {
- editableFields,
+ nonOperationFields: ['tlsTtl'],
},
},
{
@@ -48,7 +46,7 @@ module('Unit | Adapter | kmip/role', function (hooks) {
return { operation_none: true, operation_get: true, operation_add_attribute: true, tls_ttl: '10s' };
},
record: {
- editableFields,
+ nonOperationFields: ['tlsTtl'],
},
},
{
@@ -69,7 +67,7 @@ module('Unit | Adapter | kmip/role', function (hooks) {
};
},
record: {
- editableFields,
+ nonOperationFields: ['tlsTtl'],
},
},
{
diff --git a/ui/tests/unit/adapters/kv/metadata-test.js b/ui/tests/unit/adapters/kv/metadata-test.js
index 5145c0a4987b..96db4fcdb18f 100644
--- a/ui/tests/unit/adapters/kv/metadata-test.js
+++ b/ui/tests/unit/adapters/kv/metadata-test.js
@@ -182,7 +182,7 @@ module('Unit | Adapter | kv/metadata', function (hooks) {
let record = await this.store.peekRecord('kv/metadata', data.id);
await record.destroyRecord();
- assert.true(record.isDeleted, 'record is deleted');
+ assert.true(record.isDestroyed, 'record is destroyed');
record = await this.store.peekRecord('kv/metadata', this.id);
assert.strictEqual(record, null, 'record is no longer in store');
});
diff --git a/ui/tests/unit/adapters/sync/associations-test.js b/ui/tests/unit/adapters/sync/associations-test.js
index ea7f385a4966..61f4bd47c57a 100644
--- a/ui/tests/unit/adapters/sync/associations-test.js
+++ b/ui/tests/unit/adapters/sync/associations-test.js
@@ -15,7 +15,6 @@ module('Unit | Adapter | sync | association', function (hooks) {
hooks.beforeEach(function () {
this.store = this.owner.lookup('service:store');
- this.pagination = this.owner.lookup('service:pagination');
this.params = [
{ type: 'aws-sm', name: 'us-west-1' },
@@ -51,7 +50,7 @@ module('Unit | Adapter | sync | association', function (hooks) {
return associationsResponse(schema, req);
});
- await this.pagination.lazyPaginatedQuery('sync/association', {
+ await this.store.lazyPaginatedQuery('sync/association', {
responsePath: 'data.keys',
page: 1,
destinationType: 'aws-sm',
diff --git a/ui/tests/unit/decorators/model-expanded-attributes-test.js b/ui/tests/unit/decorators/model-expanded-attributes-test.js
index 9e3abe771be2..26c077fe9544 100644
--- a/ui/tests/unit/decorators/model-expanded-attributes-test.js
+++ b/ui/tests/unit/decorators/model-expanded-attributes-test.js
@@ -4,16 +4,58 @@
*/
import { module, test } from 'qunit';
-import { setupApplicationTest } from 'ember-qunit';
+import { setupTest } from 'ember-qunit';
import sinon from 'sinon';
+import Model, { attr } from '@ember-data/model';
import { withExpandedAttributes } from 'vault/decorators/model-expanded-attributes';
+// create class using decorator
+const createClass = () => {
+ @withExpandedAttributes()
+ class Foo extends Model {
+ @attr('string', {
+ label: 'Foo',
+ subText: 'A form field',
+ })
+ foo;
+ @attr('boolean', {
+ label: 'Bar',
+ subText: 'Maybe a checkbox',
+ })
+ bar;
+ @attr('number', {
+ label: 'Baz',
+ subText: 'A number field',
+ })
+ baz;
+
+ get fieldGroups() {
+ return [{ default: ['baz'] }, { 'Other options': ['foo', 'bar'] }];
+ }
+ }
+ return new Foo();
+};
+
module('Unit | Decorators | model-expanded-attributes', function (hooks) {
- setupApplicationTest(hooks);
+ setupTest(hooks);
hooks.beforeEach(function () {
- this.store = this.owner.lookup('service:store');
this.spy = sinon.spy(console, 'error');
+ this.fooField = {
+ name: 'foo',
+ options: { label: 'Foo', subText: 'A form field' },
+ type: 'string',
+ };
+ this.barField = {
+ name: 'bar',
+ options: { label: 'Bar', subText: 'Maybe a checkbox' },
+ type: 'boolean',
+ };
+ this.bazField = {
+ name: 'baz',
+ options: { label: 'Baz', subText: 'A number field' },
+ type: 'number',
+ };
});
hooks.afterEach(function () {
this.spy.restore();
@@ -29,70 +71,28 @@ module('Unit | Decorators | model-expanded-attributes', function (hooks) {
test('it adds allByKey value to model', function (assert) {
assert.expect(1);
- const model = this.store.modelFor('namespace');
+ const model = createClass();
assert.deepEqual(
- model.prototype.allByKey,
- {
- path: {
- name: 'path',
- options: {},
- type: 'string',
- },
- },
+ { foo: this.fooField, bar: this.barField, baz: this.bazField },
+ model.allByKey,
'allByKey set on Model class'
);
});
test('_expandGroups helper works correctly', function (assert) {
- const model = this.store.modelFor('aws-credential');
- const result = model.prototype._expandGroups([
- { default: ['roleArn'] },
- { 'Other options': ['ttl', 'leaseId'] },
- ]);
+ const model = createClass();
+ const result = model._expandGroups(model.fieldGroups);
assert.deepEqual(result, [
- {
- default: [
- {
- name: 'roleArn',
- options: {
- helpText:
- 'The ARN of the role to assume if credential_type on the Vault role is assumed_role. Optional if the role has a single role ARN; required otherwise.',
- label: 'Role ARN',
- },
- type: 'string',
- },
- ],
- },
- {
- 'Other options': [
- {
- name: 'ttl',
- options: {
- defaultValue: '3600s',
- editType: 'ttl',
- helpText:
- 'Specifies the TTL for the use of the STS token. Valid only when credential_type is assumed_role, federation_token, or session_token.',
- label: 'TTL',
- setDefault: true,
- ttlOffValue: '',
- },
- type: undefined,
- },
- {
- name: 'leaseId',
- options: {},
- type: 'string',
- },
- ],
- },
+ { default: [this.bazField] },
+ { 'Other options': [this.fooField, this.barField] },
]);
});
test('_expandGroups throws assertion when incorrect inputs', function (assert) {
assert.expect(1);
- const model = this.store.modelFor('aws-credential');
+ const model = createClass();
try {
- model.prototype._expandGroups({ foo: ['bar'] });
+ model._expandGroups({ foo: ['bar'] });
} catch (e) {
assert.strictEqual(e.message, '_expandGroups expects an array of objects');
}
diff --git a/ui/tests/unit/decorators/model-form-fields-test.js b/ui/tests/unit/decorators/model-form-fields-test.js
index 89080bbc3483..330736432568 100644
--- a/ui/tests/unit/decorators/model-form-fields-test.js
+++ b/ui/tests/unit/decorators/model-form-fields-test.js
@@ -4,16 +4,54 @@
*/
import { module, test } from 'qunit';
-import { setupApplicationTest } from 'ember-qunit';
+import { setupTest } from 'ember-qunit';
import { withFormFields } from 'vault/decorators/model-form-fields';
import sinon from 'sinon';
+import Model, { attr } from '@ember-data/model';
+
+// create class using decorator
+const createClass = (propertyNames, groups) => {
+ @withFormFields(propertyNames, groups)
+ class Foo extends Model {
+ @attr('string', {
+ label: 'Foo',
+ subText: 'A form field',
+ })
+ foo;
+ @attr('boolean', {
+ label: 'Bar',
+ subText: 'Maybe a checkbox',
+ })
+ bar;
+ @attr('number', {
+ label: 'Baz',
+ subText: 'A number field',
+ })
+ baz;
+ }
+ return new Foo();
+};
module('Unit | Decorators | ModelFormFields', function (hooks) {
- setupApplicationTest(hooks);
+ setupTest(hooks);
hooks.beforeEach(function () {
this.spy = sinon.spy(console, 'error');
- this.store = this.owner.lookup('service:store');
+ this.fooField = {
+ name: 'foo',
+ options: { label: 'Foo', subText: 'A form field' },
+ type: 'string',
+ };
+ this.barField = {
+ name: 'bar',
+ options: { label: 'Bar', subText: 'Maybe a checkbox' },
+ type: 'boolean',
+ };
+ this.bazField = {
+ name: 'baz',
+ options: { label: 'Baz', subText: 'A number field' },
+ type: 'number',
+ };
});
hooks.afterEach(function () {
this.spy.restore();
@@ -29,101 +67,23 @@ module('Unit | Decorators | ModelFormFields', function (hooks) {
test('it return allFields when arguments not provided', function (assert) {
assert.expect(1);
- // test by instantiating a record that uses this decorator
- const record = this.store.createRecord('kv/data');
+ const model = createClass();
assert.deepEqual(
- record.allFields,
- [
- {
- name: 'backend',
- options: {},
- type: 'string',
- },
- {
- name: 'path',
- options: {
- label: 'Path for this secret',
- subText: 'Names with forward slashes define hierarchical path structures.',
- },
- type: 'string',
- },
- {
- name: 'secretData',
- options: {},
- type: 'object',
- },
- {
- name: 'createdTime',
- options: {},
- type: 'string',
- },
- {
- name: 'customMetadata',
- options: {},
- type: 'object',
- },
- {
- name: 'deletionTime',
- options: {},
- type: 'string',
- },
- {
- name: 'destroyed',
- options: {},
- type: 'boolean',
- },
- {
- name: 'version',
- options: {},
- type: 'number',
- },
- {
- name: 'failReadErrorCode',
- options: {},
- type: 'number',
- },
- {
- name: 'casVersion',
- options: {},
- type: 'number',
- },
- ],
+ [this.fooField, this.barField, this.bazField],
+ model.allFields,
'allFields set on Model class'
);
});
test('it should set formFields prop on Model class', function (assert) {
- // this model uses withFormFields
- const record = this.store.createRecord('clients/config');
- assert.deepEqual(
- record.formFields,
- [
- {
- name: 'enabled',
- options: {},
- type: 'string',
- },
- {
- name: 'retentionMonths',
- options: {
- label: 'Retention period',
- subText: 'The number of months of activity logs to maintain for client tracking.',
- },
- type: 'number',
- },
- ],
- 'formFields set on Model class'
- );
+ const model = createClass(['foo']);
+ assert.deepEqual([this.fooField], model.formFields, 'formFields set on Model class');
});
test('it should set formFieldGroups on Model class', function (assert) {
- // this model uses withFormFields with groups
- const record = this.store.createRecord('ldap/config');
- const groups = record.formFieldGroups.map((group) => Object.keys(group)[0]);
- assert.deepEqual(
- groups,
- ['default', 'TLS options', 'More options'],
- 'formFieldGroups set on Model class with correct group labels'
- );
+ const groups = [{ default: ['foo'] }, { subgroup: ['bar'] }];
+ const model = createClass(null, groups);
+ const fieldGroups = [{ default: [this.fooField] }, { subgroup: [this.barField] }];
+ assert.deepEqual(fieldGroups, model.formFieldGroups, 'formFieldGroups set on Model class');
});
});
diff --git a/ui/tests/unit/decorators/model-validations-test.js b/ui/tests/unit/decorators/model-validations-test.js
index 6148cb3d9a44..94ca73b1bda0 100644
--- a/ui/tests/unit/decorators/model-validations-test.js
+++ b/ui/tests/unit/decorators/model-validations-test.js
@@ -6,7 +6,7 @@
import { module, test } from 'qunit';
import { setupTest } from 'ember-qunit';
import { withModelValidations } from 'vault/decorators/model-validations';
-import validators from 'vault/utils/model-helpers/validators';
+import validators from 'vault/utils/validators';
import sinon from 'sinon';
import Model from '@ember-data/model';
diff --git a/ui/tests/unit/models/generated-item-test.js b/ui/tests/unit/models/generated-item-test.js
deleted file mode 100644
index 1c97de05cfaf..000000000000
--- a/ui/tests/unit/models/generated-item-test.js
+++ /dev/null
@@ -1,18 +0,0 @@
-/**
- * Copyright (c) HashiCorp, Inc.
- * SPDX-License-Identifier: BUSL-1.1
- */
-
-import { module, test } from 'qunit';
-
-import { setupTest } from 'vault/tests/helpers';
-
-module('Unit | Model | generated item', function (hooks) {
- setupTest(hooks);
-
- test('it exists', function (assert) {
- const store = this.owner.lookup('service:store');
- const model = store.createRecord('generated-item', {});
- assert.ok(model, 'generated-item model exists');
- });
-});
diff --git a/ui/tests/unit/services/pagination-test.js b/ui/tests/unit/services/pagination-test.js
deleted file mode 100644
index 9763c8d3a04e..000000000000
--- a/ui/tests/unit/services/pagination-test.js
+++ /dev/null
@@ -1,288 +0,0 @@
-/**
- * Copyright (c) HashiCorp, Inc.
- * SPDX-License-Identifier: BUSL-1.1
- */
-
-import { module, test } from 'qunit';
-import { setupTest } from 'ember-qunit';
-import { keyForCache } from 'vault/services/pagination';
-import { dasherize } from '@ember/string';
-import clamp from 'vault/utils/clamp';
-import config from 'vault/config/environment';
-import Sinon from 'sinon';
-
-const { DEFAULT_PAGE_SIZE } = config.APP;
-
-module('Unit | Service | pagination', function (hooks) {
- setupTest(hooks);
-
- hooks.beforeEach(function () {
- this.pagination = this.owner.lookup('service:pagination');
- this.store = this.owner.lookup('service:store');
- });
-
- test('pagination.setLazyCacheForModel', function (assert) {
- const modelName = 'someModel';
- const key = {
- id: '',
- backend: 'database',
- responsePath: 'data.keys',
- page: 1,
- pageFilter: null,
- size: 15,
- };
- const value = {
- response: {
- request_id: '1eb6473c-8df0-924e-1c8d-e016a6420aee',
- lease_id: '',
- renewable: false,
- lease_duration: 0,
- data: {
- keys: null,
- },
- wrap_info: null,
- warnings: null,
- auth: null,
- mount_type: 'database',
- backend: 'database',
- },
- dataset: ['connection', 'connection2'],
- };
- this.pagination.setLazyCacheForModel(modelName, key, value);
- const cacheEntry = this.pagination.lazyCaches.get(dasherize(modelName));
- const actual = Object.fromEntries(cacheEntry); // convert from Map to Object for assertion
- const expected = { '{"backend":"database","id":""}': value };
- assert.propEqual(actual, expected, 'model name is dasherized and can be retrieved from lazyCache');
- });
-
- test('keyForCache', function (assert) {
- const query = { id: 1 };
- const queryWithSize = { id: 1, size: 1 };
- assert.deepEqual(keyForCache(query), JSON.stringify(query), 'generated the correct cache key');
- assert.deepEqual(keyForCache(queryWithSize), JSON.stringify(query), 'excludes size from query cache');
- });
-
- test('clamp', function (assert) {
- assert.strictEqual(clamp('foo', 0, 100), 0, 'returns the min if passed a non-number');
- assert.strictEqual(clamp(0, 1, 100), 1, 'returns the min when passed number is less than the min');
- assert.strictEqual(clamp(200, 1, 100), 100, 'returns the max passed number is greater than the max');
- assert.strictEqual(clamp(50, 1, 100), 50, 'returns the passed number when it is in range');
- });
-
- test('pagination.storeDataset', function (assert) {
- const arr = ['one', 'two'];
- const query = { id: 1 };
- this.pagination.storeDataset('data', query, {}, arr);
-
- assert.deepEqual(
- this.pagination.getDataset('data', query).dataset,
- arr,
- 'it stores the array as .dataset'
- );
- assert.deepEqual(
- this.pagination.getDataset('data', query).response,
- {},
- 'it stores the response as .response'
- );
- assert.ok(this.pagination.get('lazyCaches').has('data'), 'it stores model map');
- assert.ok(
- this.pagination.get('lazyCaches').get('data').has(keyForCache(query)),
- 'it stores data on the model map'
- );
- });
-
- test('pagination.clearDataset with a prefix', function (assert) {
- const arr = ['one', 'two'];
- const arr2 = ['one', 'two', 'three', 'four'];
- this.pagination.storeDataset('data', { id: 1 }, {}, arr);
- this.pagination.storeDataset('transit-key', { id: 2 }, {}, arr2);
- assert.strictEqual(this.pagination.get('lazyCaches').size, 2, 'it stores both keys');
-
- this.pagination.clearDataset('transit-key');
- assert.strictEqual(this.pagination.get('lazyCaches').size, 1, 'deletes one key');
- assert.notOk(this.pagination.get('lazyCaches').has('transit-key'), 'cache is no longer stored');
- });
-
- test('pagination.clearDataset with no args clears entire cache', function (assert) {
- const arr = ['one', 'two'];
- const arr2 = ['one', 'two', 'three', 'four'];
- this.pagination.storeDataset('data', { id: 1 }, {}, arr);
- this.pagination.storeDataset('transit-key', { id: 2 }, {}, arr2);
- assert.strictEqual(this.pagination.get('lazyCaches').size, 2, 'it stores both keys');
-
- this.pagination.clearDataset();
- assert.strictEqual(this.pagination.get('lazyCaches').size, 0, 'deletes all of the keys');
- assert.notOk(this.pagination.get('lazyCaches').has('transit-key'), 'first cache key is no longer stored');
- assert.notOk(this.pagination.get('lazyCaches').has('data'), 'second cache key is no longer stored');
- });
-
- test('pagination.getDataset', function (assert) {
- const arr = ['one', 'two'];
- this.pagination.storeDataset('data', { id: 1 }, {}, arr);
-
- assert.deepEqual(this.pagination.getDataset('data', { id: 1 }), { response: {}, dataset: arr });
- });
-
- test('pagination.constructResponse', function (assert) {
- const arr = ['one', 'two', 'three', 'fifteen', 'twelve'];
- this.pagination.storeDataset('data', { id: 1 }, {}, arr);
-
- assert.deepEqual(
- this.pagination.constructResponse('data', {
- id: 1,
- pageFilter: 't',
- page: 1,
- size: 3,
- responsePath: 'data',
- }),
- {
- data: ['two', 'three', 'fifteen'],
- meta: {
- currentPage: 1,
- lastPage: 2,
- nextPage: 2,
- prevPage: 1,
- total: 5,
- filteredTotal: 4,
- pageSize: 3,
- },
- },
- 'it returns filtered results'
- );
- });
-
- test('pagination.fetchPage', async function (assert) {
- const keys = ['zero', 'one', 'two', 'three', 'four', 'five', 'six'];
- const data = {
- data: {
- keys,
- },
- };
- const pageSize = 2;
- const query = {
- size: pageSize,
- page: 1,
- responsePath: 'data.keys',
- };
- this.pagination.storeDataset('transit-key', query, data, keys);
-
- let result;
- result = await this.pagination.fetchPage('transit-key', query);
- assert.strictEqual(result.get('length'), pageSize, 'returns the correct number of items');
- assert.deepEqual(
- result.map((r) => r.id),
- keys.slice(0, pageSize),
- 'returns the first page of items'
- );
- assert.deepEqual(
- result.get('meta'),
- {
- nextPage: 2,
- prevPage: 1,
- currentPage: 1,
- lastPage: 4,
- total: 7,
- filteredTotal: 7,
- pageSize: 2,
- },
- 'returns correct meta values'
- );
-
- result = await this.pagination.fetchPage('transit-key', {
- size: pageSize,
- page: 3,
- responsePath: 'data.keys',
- });
- const pageThreeEnd = 3 * pageSize;
- const pageThreeStart = pageThreeEnd - pageSize;
- assert.deepEqual(
- result.map((r) => r.id),
- keys.slice(pageThreeStart, pageThreeEnd),
- 'returns the third page of items'
- );
-
- result = await this.pagination.fetchPage('transit-key', {
- size: pageSize,
- page: 99,
- responsePath: 'data.keys',
- });
-
- assert.deepEqual(
- result.map((r) => r.id),
- keys.slice(keys.length - 1),
- 'returns the last page when the page value is beyond the of bounds'
- );
-
- result = await this.pagination.fetchPage('transit-key', {
- size: pageSize,
- page: 0,
- responsePath: 'data.keys',
- });
- assert.deepEqual(
- result.map((r) => r.id),
- keys.slice(0, pageSize),
- 'returns the first page when page value is under the bounds'
- );
- });
-
- test('pagination.lazyPaginatedQuery', async function (assert) {
- const response = {
- data: ['foo'],
- };
- let queryArgs;
- const adapterForStub = () => {
- return {
- query(store, modelName, query) {
- queryArgs = query;
- return Promise.resolve(response);
- },
- };
- };
- Sinon.stub(this.store, 'adapterFor').callsFake(adapterForStub);
- // stub fetchPage because we test it separately
- Sinon.stub(this.pagination, 'fetchPage').callsFake(() => {});
- const query = { page: 1, size: 1, responsePath: 'data' };
-
- await this.pagination.lazyPaginatedQuery('transit-key', query);
- assert.deepEqual(
- this.pagination.getDataset('transit-key', query),
- { response: { data: null }, dataset: ['foo'] },
- 'stores returned dataset'
- );
-
- await this.pagination.lazyPaginatedQuery('secret', { page: 1, responsePath: 'data' });
- assert.strictEqual(queryArgs.size, DEFAULT_PAGE_SIZE, 'calls query with DEFAULT_PAGE_SIZE');
-
- assert.throws(
- () => {
- this.pagination.lazyPaginatedQuery('transit-key', {});
- },
- /responsePath is required/,
- 'requires responsePath'
- );
- assert.throws(
- () => {
- this.pagination.lazyPaginatedQuery('transit-key', { responsePath: 'foo' });
- },
- /page is required/,
- 'requires page'
- );
- });
-
- test('pagination.filterData', async function (assert) {
- const dataset = [
- { id: 'foo', name: 'Foo', type: 'test' },
- { id: 'bar', name: 'Bar', type: 'test' },
- { id: 'bar-2', name: 'Bar', type: null },
- ];
-
- const defaultFiltering = this.pagination.filterData('foo', dataset);
- assert.deepEqual(defaultFiltering, [{ id: 'foo', name: 'Foo', type: 'test' }]);
-
- const filter = (data) => {
- return data.filter((d) => d.name === 'Bar' && d.type === 'test');
- };
- const customFiltering = this.pagination.filterData(filter, dataset);
- assert.deepEqual(customFiltering, [{ id: 'bar', name: 'Bar', type: 'test' }]);
- });
-});
diff --git a/ui/tests/unit/services/store-test.js b/ui/tests/unit/services/store-test.js
new file mode 100644
index 000000000000..9510fac73489
--- /dev/null
+++ b/ui/tests/unit/services/store-test.js
@@ -0,0 +1,257 @@
+/**
+ * Copyright (c) HashiCorp, Inc.
+ * SPDX-License-Identifier: BUSL-1.1
+ */
+
+import { resolve } from 'rsvp';
+import { run } from '@ember/runloop';
+import { module, test } from 'qunit';
+import { setupTest } from 'ember-qunit';
+import { normalizeModelName, keyForCache } from 'vault/services/store';
+import clamp from 'vault/utils/clamp';
+import config from 'vault/config/environment';
+
+const { DEFAULT_PAGE_SIZE } = config.APP;
+
+module('Unit | Service | store', function (hooks) {
+ setupTest(hooks);
+
+ hooks.beforeEach(function () {
+ this.store = this.owner.lookup('service:store');
+ });
+
+ test('normalizeModelName', function (assert) {
+ assert.strictEqual(normalizeModelName('oneThing'), 'one-thing', 'dasherizes modelName');
+ });
+
+ test('keyForCache', function (assert) {
+ const query = { id: 1 };
+ const queryWithSize = { id: 1, size: 1 };
+ assert.deepEqual(keyForCache(query), JSON.stringify(query), 'generated the correct cache key');
+ assert.deepEqual(keyForCache(queryWithSize), JSON.stringify(query), 'excludes size from query cache');
+ });
+
+ test('clamp', function (assert) {
+ assert.strictEqual(clamp('foo', 0, 100), 0, 'returns the min if passed a non-number');
+ assert.strictEqual(clamp(0, 1, 100), 1, 'returns the min when passed number is less than the min');
+ assert.strictEqual(clamp(200, 1, 100), 100, 'returns the max passed number is greater than the max');
+ assert.strictEqual(clamp(50, 1, 100), 50, 'returns the passed number when it is in range');
+ });
+
+ test('store.storeDataset', function (assert) {
+ const arr = ['one', 'two'];
+ const query = { id: 1 };
+ this.store.storeDataset('data', query, {}, arr);
+
+ assert.deepEqual(this.store.getDataset('data', query).dataset, arr, 'it stores the array as .dataset');
+ assert.deepEqual(
+ this.store.getDataset('data', query).response,
+ {},
+ 'it stores the response as .response'
+ );
+ assert.ok(this.store.get('lazyCaches').has('data'), 'it stores model map');
+ assert.ok(
+ this.store.get('lazyCaches').get('data').has(keyForCache(query)),
+ 'it stores data on the model map'
+ );
+ });
+
+ test('store.clearDataset with a prefix', function (assert) {
+ const arr = ['one', 'two'];
+ const arr2 = ['one', 'two', 'three', 'four'];
+ this.store.storeDataset('data', { id: 1 }, {}, arr);
+ this.store.storeDataset('transit-key', { id: 2 }, {}, arr2);
+ assert.strictEqual(this.store.get('lazyCaches').size, 2, 'it stores both keys');
+
+ this.store.clearDataset('transit-key');
+ assert.strictEqual(this.store.get('lazyCaches').size, 1, 'deletes one key');
+ assert.notOk(this.store.get('lazyCaches').has('transit-key'), 'cache is no longer stored');
+ });
+
+ test('store.clearAllDatasets', function (assert) {
+ const arr = ['one', 'two'];
+ const arr2 = ['one', 'two', 'three', 'four'];
+ this.store.storeDataset('data', { id: 1 }, {}, arr);
+ this.store.storeDataset('transit-key', { id: 2 }, {}, arr2);
+ assert.strictEqual(this.store.get('lazyCaches').size, 2, 'it stores both keys');
+
+ this.store.clearAllDatasets();
+ assert.strictEqual(this.store.get('lazyCaches').size, 0, 'deletes all of the keys');
+ assert.notOk(this.store.get('lazyCaches').has('transit-key'), 'first cache key is no longer stored');
+ assert.notOk(this.store.get('lazyCaches').has('data'), 'second cache key is no longer stored');
+ });
+
+ test('store.getDataset', function (assert) {
+ const arr = ['one', 'two'];
+ this.store.storeDataset('data', { id: 1 }, {}, arr);
+
+ assert.deepEqual(this.store.getDataset('data', { id: 1 }), { response: {}, dataset: arr });
+ });
+
+ test('store.constructResponse', function (assert) {
+ const arr = ['one', 'two', 'three', 'fifteen', 'twelve'];
+ this.store.storeDataset('data', { id: 1 }, {}, arr);
+
+ assert.deepEqual(
+ this.store.constructResponse('data', {
+ id: 1,
+ pageFilter: 't',
+ page: 1,
+ size: 3,
+ responsePath: 'data',
+ }),
+ {
+ data: ['two', 'three', 'fifteen'],
+ meta: {
+ currentPage: 1,
+ lastPage: 2,
+ nextPage: 2,
+ prevPage: 1,
+ total: 5,
+ filteredTotal: 4,
+ pageSize: 3,
+ },
+ },
+ 'it returns filtered results'
+ );
+ });
+
+ test('store.fetchPage', async function (assert) {
+ const keys = ['zero', 'one', 'two', 'three', 'four', 'five', 'six'];
+ const data = {
+ data: {
+ keys,
+ },
+ };
+ const pageSize = 2;
+ const query = {
+ size: pageSize,
+ page: 1,
+ responsePath: 'data.keys',
+ };
+ this.store.storeDataset('transit-key', query, data, keys);
+
+ let result;
+ result = await this.store.fetchPage('transit-key', query);
+ assert.strictEqual(result.get('length'), pageSize, 'returns the correct number of items');
+ assert.deepEqual(
+ result.map((r) => r.id),
+ keys.slice(0, pageSize),
+ 'returns the first page of items'
+ );
+ assert.deepEqual(
+ result.get('meta'),
+ {
+ nextPage: 2,
+ prevPage: 1,
+ currentPage: 1,
+ lastPage: 4,
+ total: 7,
+ filteredTotal: 7,
+ pageSize: 2,
+ },
+ 'returns correct meta values'
+ );
+
+ result = await this.store.fetchPage('transit-key', {
+ size: pageSize,
+ page: 3,
+ responsePath: 'data.keys',
+ });
+ const pageThreeEnd = 3 * pageSize;
+ const pageThreeStart = pageThreeEnd - pageSize;
+ assert.deepEqual(
+ result.map((r) => r.id),
+ keys.slice(pageThreeStart, pageThreeEnd),
+ 'returns the third page of items'
+ );
+
+ result = await this.store.fetchPage('transit-key', {
+ size: pageSize,
+ page: 99,
+ responsePath: 'data.keys',
+ });
+
+ assert.deepEqual(
+ result.map((r) => r.id),
+ keys.slice(keys.length - 1),
+ 'returns the last page when the page value is beyond the of bounds'
+ );
+
+ result = await this.store.fetchPage('transit-key', {
+ size: pageSize,
+ page: 0,
+ responsePath: 'data.keys',
+ });
+ assert.deepEqual(
+ result.map((r) => r.id),
+ keys.slice(0, pageSize),
+ 'returns the first page when page value is under the bounds'
+ );
+ });
+
+ test('store.lazyPaginatedQuery', function (assert) {
+ const response = {
+ data: ['foo'],
+ };
+ let queryArgs;
+ const store = this.owner.factoryFor('service:store').create({
+ adapterFor() {
+ return {
+ query(store, modelName, query) {
+ queryArgs = query;
+ return resolve(response);
+ },
+ };
+ },
+ fetchPage() {},
+ });
+
+ const query = { page: 1, size: 1, responsePath: 'data' };
+ run(function () {
+ store.lazyPaginatedQuery('transit-key', query);
+ });
+ assert.deepEqual(
+ store.getDataset('transit-key', query),
+ { response: { data: null }, dataset: ['foo'] },
+ 'stores returned dataset'
+ );
+
+ run(function () {
+ store.lazyPaginatedQuery('secret', { page: 1, responsePath: 'data' });
+ });
+ assert.strictEqual(queryArgs.size, DEFAULT_PAGE_SIZE, 'calls query with DEFAULT_PAGE_SIZE');
+
+ assert.throws(
+ () => {
+ store.lazyPaginatedQuery('transit-key', {});
+ },
+ /responsePath is required/,
+ 'requires responsePath'
+ );
+ assert.throws(
+ () => {
+ store.lazyPaginatedQuery('transit-key', { responsePath: 'foo' });
+ },
+ /page is required/,
+ 'requires page'
+ );
+ });
+
+ test('store.filterData', async function (assert) {
+ const dataset = [
+ { id: 'foo', name: 'Foo', type: 'test' },
+ { id: 'bar', name: 'Bar', type: 'test' },
+ { id: 'bar-2', name: 'Bar', type: null },
+ ];
+
+ const defaultFiltering = this.store.filterData('foo', dataset);
+ assert.deepEqual(defaultFiltering, [{ id: 'foo', name: 'Foo', type: 'test' }]);
+
+ const filter = (data) => {
+ return data.filter((d) => d.name === 'Bar' && d.type === 'test');
+ };
+ const customFiltering = this.store.filterData(filter, dataset);
+ assert.deepEqual(customFiltering, [{ id: 'bar', name: 'Bar', type: 'test' }]);
+ });
+});
diff --git a/ui/tests/unit/utils/kmip-role-fields-test.js b/ui/tests/unit/utils/kmip-role-fields-test.js
deleted file mode 100644
index ff7e34a62d05..000000000000
--- a/ui/tests/unit/utils/kmip-role-fields-test.js
+++ /dev/null
@@ -1,52 +0,0 @@
-/**
- * Copyright (c) HashiCorp, Inc.
- * SPDX-License-Identifier: BUSL-1.1
- */
-
-import { module, test } from 'qunit';
-import { setupTest } from 'ember-qunit';
-import {
- nonOperationFields,
- operationFields,
- operationFieldsWithoutSpecial,
-} from 'vault/utils/model-helpers/kmip-role-fields';
-
-module('Unit | Util | kmip role fields', function (hooks) {
- setupTest(hooks);
-
- [
- {
- name: 'when fields is empty',
- fields: [],
- opFields: [],
- nonOpFields: [],
- opWithoutSpecial: [],
- },
- {
- name: 'when no op fields',
- fields: ['foo', 'bar'],
- opFields: [],
- nonOpFields: ['foo', 'bar'],
- opWithoutSpecial: [],
- },
- {
- name: 'when op fields',
- fields: ['foo', 'bar', 'operationFoo', 'operationBar', 'operationAll'],
- opFields: ['operationFoo', 'operationBar', 'operationAll'],
- nonOpFields: ['foo', 'bar'],
- opWithoutSpecial: ['operationFoo', 'operationBar'],
- },
- ].forEach(({ name, fields, opFields, nonOpFields, opWithoutSpecial }) => {
- test(`${name}`, function (assert) {
- const originalFields = JSON.parse(JSON.stringify(fields));
- assert.deepEqual(operationFields(fields), opFields, 'operation fields correct');
- assert.deepEqual(nonOperationFields(fields), nonOpFields, 'non operation fields');
- assert.deepEqual(
- operationFieldsWithoutSpecial(fields),
- opWithoutSpecial,
- 'operation fields without special'
- );
- assert.deepEqual(fields, originalFields, 'does not mutate the original');
- });
- });
-});
diff --git a/ui/tests/unit/utils/openapi-helpers-test.js b/ui/tests/unit/utils/openapi-helpers-test.js
index dd8dbfb648e3..a0f18a37bcdc 100644
--- a/ui/tests/unit/utils/openapi-helpers-test.js
+++ b/ui/tests/unit/utils/openapi-helpers-test.js
@@ -4,20 +4,9 @@
*/
import { module, test } from 'qunit';
-import {
- _getPathParam,
- combineOpenApiAttrs,
- expandOpenApiProps,
- getHelpUrlForModel,
- pathToHelpUrlSegment,
-} from 'vault/utils/openapi-helpers';
-import Model, { attr } from '@ember-data/model';
-import { setupTest } from 'ember-qunit';
-import { camelize } from '@ember/string';
-
-module('Unit | Utility | OpenAPI helper utils', function (hooks) {
- setupTest(hooks);
+import { _getPathParam, getHelpUrlForModel, pathToHelpUrlSegment } from 'vault/utils/openapi-helpers';
+module('Unit | Utility | OpenAPI helper utils', function () {
test(`pathToHelpUrlSegment`, function (assert) {
[
{ path: '/auth/{username}', result: '/auth/example' },
@@ -59,236 +48,4 @@ module('Unit | Utility | OpenAPI helper utils', function (hooks) {
);
});
});
-
- test('combineOpenApiAttrs should combine attributes correctly', async function (assert) {
- class FooModel extends Model {
- @attr('string', {
- label: 'Foo',
- subText: 'A form field',
- })
- foo;
- @attr('boolean', {
- label: 'Bar',
- subText: 'Maybe a checkbox',
- })
- bar;
- @attr('number', {
- label: 'Baz',
- subText: 'A number field',
- })
- baz;
- }
- this.owner.register('model:foo', FooModel);
- const myModel = this.owner.lookup('service:store').modelFor('foo');
- const newProps = {
- foo: {
- editType: 'ttl',
- },
- baz: {
- type: 'number',
- editType: 'slider',
- label: 'Old label',
- },
- foobar: {
- type: 'string',
- label: 'Foo-bar',
- },
- };
- const expected = [
- {
- name: 'foo',
- type: 'string',
- options: {
- label: 'Foo',
- subText: 'A form field',
- editType: 'ttl',
- },
- },
- {
- name: 'bar',
- type: 'boolean',
- options: {
- label: 'Bar',
- subText: 'Maybe a checkbox',
- },
- },
- {
- name: 'baz',
- type: 'number',
- options: {
- label: 'Baz', // uses the value we set on the model
- editType: 'slider',
- subText: 'A number field',
- },
- },
- {
- name: 'foobar',
- type: 'string',
- options: {
- label: 'Foo-bar',
- },
- },
- ];
- const { attrs, newFields } = combineOpenApiAttrs(myModel.attributes, newProps);
- assert.deepEqual(newFields, ['foobar'], 'correct newFields added');
-
- // When combineOpenApiAttrs
- assert.strictEqual(attrs.length, 4, 'correct number of attributes returned');
- expected.forEach((exp) => {
- const name = exp.name;
- const attr = attrs.find((a) => a.name === name);
- assert.deepEqual(attr, exp, `${name} combined properly`);
- });
- });
-
- module('expandopenApiProps', function () {
- const OPENAPI_RESPONSE_PROPS = {
- ttl: {
- type: 'string',
- format: 'seconds',
- description: 'this is a TTL!',
- 'x-vault-displayAttrs': {
- name: 'TTL',
- },
- },
- 'awesome-people': {
- type: 'array',
- items: {
- type: 'string',
- },
- 'x-vault-displayAttrs': {
- value: 'Grace Hopper,Lady Ada',
- },
- },
- 'favorite-ice-cream': {
- type: 'string',
- enum: ['vanilla', 'chocolate', 'strawberry'],
- },
- 'default-value': {
- default: 30,
- 'x-vault-displayAttrs': {
- value: 300,
- },
- type: 'integer',
- },
- default: {
- 'x-vault-displayAttrs': {
- value: 30,
- },
- type: 'integer',
- },
- 'super-secret': {
- type: 'string',
- 'x-vault-displayAttrs': {
- sensitive: true,
- },
- description: 'A really secret thing',
- },
- };
- const EXPANDED_PROPS = {
- ttl: {
- helpText: 'this is a TTL!',
- editType: 'ttl',
- label: 'TTL',
- fieldGroup: 'default',
- },
- awesomePeople: {
- editType: 'stringArray',
- defaultValue: 'Grace Hopper,Lady Ada',
- fieldGroup: 'default',
- },
- favoriteIceCream: {
- editType: 'string',
- type: 'string',
- possibleValues: ['vanilla', 'chocolate', 'strawberry'],
- fieldGroup: 'default',
- },
- defaultValue: {
- editType: 'number',
- type: 'number',
- defaultValue: 300,
- fieldGroup: 'default',
- },
- default: {
- editType: 'number',
- type: 'number',
- defaultValue: 30,
- fieldGroup: 'default',
- },
- superSecret: {
- type: 'string',
- editType: 'string',
- sensitive: true,
- helpText: 'A really secret thing',
- fieldGroup: 'default',
- },
- };
- const OPENAPI_DESCRIPTIONS = {
- token_bound_cidrs: {
- type: 'array',
- description:
- 'Comma separated string or JSON list of CIDR blocks. If set, specifies the blocks of IP addresses which are allowed to use the generated token.',
- items: {
- type: 'string',
- },
- 'x-vault-displayAttrs': {
- description:
- 'List of CIDR blocks. If set, specifies the blocks of IP addresses which are allowed to use the generated token.',
- name: "Generated Token's Bound CIDRs",
- group: 'Tokens',
- },
- },
- blah_blah: {
- type: 'array',
- description: 'Comma-separated list of policies',
- items: {
- type: 'string',
- },
- 'x-vault-displayAttrs': {
- name: "Generated Token's Policies",
- group: 'Tokens',
- },
- },
- only_display_description: {
- type: 'array',
- items: {
- type: 'string',
- },
- 'x-vault-displayAttrs': {
- description: 'Hello there, you look nice today',
- },
- },
- };
-
- const STRING_ARRAY_DESCRIPTIONS = {
- token_bound_cidrs: {
- helpText:
- 'List of CIDR blocks. If set, specifies the blocks of IP addresses which are allowed to use the generated token.',
- },
- blah_blah: {
- helpText: 'Comma-separated list of policies',
- },
- only_display_description: {
- helpText: 'Hello there, you look nice today',
- },
- };
- test('it creates objects from OpenAPI schema props', function (assert) {
- assert.expect(6);
- const generatedProps = expandOpenApiProps(OPENAPI_RESPONSE_PROPS);
- for (const propName in EXPANDED_PROPS) {
- assert.deepEqual(EXPANDED_PROPS[propName], generatedProps[propName], `correctly expands ${propName}`);
- }
- });
- test('it uses the description from the display attrs block if it exists', async function (assert) {
- assert.expect(3);
- const generatedProps = expandOpenApiProps(OPENAPI_DESCRIPTIONS);
- for (const propName in STRING_ARRAY_DESCRIPTIONS) {
- assert.strictEqual(
- generatedProps[camelize(propName)].helpText,
- STRING_ARRAY_DESCRIPTIONS[propName].helpText,
- `correctly updates helpText for ${propName}`
- );
- }
- });
- });
});
diff --git a/ui/tests/unit/utils/openapi-to-attrs-test.js b/ui/tests/unit/utils/openapi-to-attrs-test.js
index 560bff9f48da..3d4cd57081e0 100644
--- a/ui/tests/unit/utils/openapi-to-attrs-test.js
+++ b/ui/tests/unit/utils/openapi-to-attrs-test.js
@@ -3,12 +3,215 @@
* SPDX-License-Identifier: BUSL-1.1
*/
-import { combineFieldGroups } from 'vault/utils/openapi-to-attrs';
+import { attr } from '@ember-data/model';
+import { expandOpenApiProps, combineAttributes, combineFieldGroups } from 'vault/utils/openapi-to-attrs';
import { module, test } from 'qunit';
+import { camelize } from '@ember/string';
+
+module('Unit | Util | OpenAPI Data Utilities', function () {
+ const OPENAPI_RESPONSE_PROPS = {
+ ttl: {
+ type: 'string',
+ format: 'seconds',
+ description: 'this is a TTL!',
+ 'x-vault-displayAttrs': {
+ name: 'TTL',
+ },
+ },
+ 'awesome-people': {
+ type: 'array',
+ items: {
+ type: 'string',
+ },
+ 'x-vault-displayAttrs': {
+ value: 'Grace Hopper,Lady Ada',
+ },
+ },
+ 'favorite-ice-cream': {
+ type: 'string',
+ enum: ['vanilla', 'chocolate', 'strawberry'],
+ },
+ 'default-value': {
+ default: 30,
+ 'x-vault-displayAttrs': {
+ value: 300,
+ },
+ type: 'integer',
+ },
+ default: {
+ 'x-vault-displayAttrs': {
+ value: 30,
+ },
+ type: 'integer',
+ },
+ 'super-secret': {
+ type: 'string',
+ 'x-vault-displayAttrs': {
+ sensitive: true,
+ },
+ description: 'A really secret thing',
+ },
+ };
+ const EXPANDED_PROPS = {
+ ttl: {
+ helpText: 'this is a TTL!',
+ editType: 'ttl',
+ label: 'TTL',
+ fieldGroup: 'default',
+ },
+ awesomePeople: {
+ editType: 'stringArray',
+ defaultValue: 'Grace Hopper,Lady Ada',
+ fieldGroup: 'default',
+ },
+ favoriteIceCream: {
+ editType: 'string',
+ type: 'string',
+ possibleValues: ['vanilla', 'chocolate', 'strawberry'],
+ fieldGroup: 'default',
+ },
+ defaultValue: {
+ editType: 'number',
+ type: 'number',
+ defaultValue: 300,
+ fieldGroup: 'default',
+ },
+ default: {
+ editType: 'number',
+ type: 'number',
+ defaultValue: 30,
+ fieldGroup: 'default',
+ },
+ superSecret: {
+ type: 'string',
+ editType: 'string',
+ sensitive: true,
+ helpText: 'A really secret thing',
+ fieldGroup: 'default',
+ },
+ };
+
+ const EXISTING_MODEL_ATTRS = [
+ {
+ key: 'name',
+ value: {
+ isAttribute: true,
+ name: 'name',
+ options: {
+ editType: 'string',
+ label: 'Role name',
+ },
+ },
+ },
+ {
+ key: 'awesomePeople',
+ value: {
+ isAttribute: true,
+ name: 'awesomePeople',
+ options: {
+ label: 'People Who Are Awesome',
+ },
+ },
+ },
+ ];
+
+ const COMBINED_ATTRS = {
+ name: attr('string', {
+ editType: 'string',
+ type: 'string',
+ label: 'Role name',
+ }),
+ ttl: attr('string', {
+ editType: 'ttl',
+ label: 'TTL',
+ helpText: 'this is a TTL!',
+ }),
+ awesomePeople: attr({
+ label: 'People Who Are Awesome',
+ editType: 'stringArray',
+ defaultValue: 'Grace Hopper,Lady Ada',
+ }),
+ favoriteIceCream: attr('string', {
+ type: 'string',
+ editType: 'string',
+ possibleValues: ['vanilla', 'chocolate', 'strawberry'],
+ }),
+ superSecret: attr('string', {
+ type: 'string',
+ editType: 'string',
+ sensitive: true,
+ description: 'A really secret thing',
+ }),
+ };
-module('Unit | Util | combineFieldGroups', function () {
const NEW_FIELDS = ['one', 'two', 'three'];
+ const OPENAPI_DESCRIPTIONS = {
+ token_bound_cidrs: {
+ type: 'array',
+ description:
+ 'Comma separated string or JSON list of CIDR blocks. If set, specifies the blocks of IP addresses which are allowed to use the generated token.',
+ items: {
+ type: 'string',
+ },
+ 'x-vault-displayAttrs': {
+ description:
+ 'List of CIDR blocks. If set, specifies the blocks of IP addresses which are allowed to use the generated token.',
+ name: "Generated Token's Bound CIDRs",
+ group: 'Tokens',
+ },
+ },
+ blah_blah: {
+ type: 'array',
+ description: 'Comma-separated list of policies',
+ items: {
+ type: 'string',
+ },
+ 'x-vault-displayAttrs': {
+ name: "Generated Token's Policies",
+ group: 'Tokens',
+ },
+ },
+ only_display_description: {
+ type: 'array',
+ items: {
+ type: 'string',
+ },
+ 'x-vault-displayAttrs': {
+ description: 'Hello there, you look nice today',
+ },
+ },
+ };
+
+ const STRING_ARRAY_DESCRIPTIONS = {
+ token_bound_cidrs: {
+ helpText:
+ 'List of CIDR blocks. If set, specifies the blocks of IP addresses which are allowed to use the generated token.',
+ },
+ blah_blah: {
+ helpText: 'Comma-separated list of policies',
+ },
+ only_display_description: {
+ helpText: 'Hello there, you look nice today',
+ },
+ };
+
+ test('it creates objects from OpenAPI schema props', function (assert) {
+ assert.expect(6);
+ const generatedProps = expandOpenApiProps(OPENAPI_RESPONSE_PROPS);
+ for (const propName in EXPANDED_PROPS) {
+ assert.deepEqual(EXPANDED_PROPS[propName], generatedProps[propName], `correctly expands ${propName}`);
+ }
+ });
+
+ test('it combines OpenAPI props with existing model attrs', function (assert) {
+ assert.expect(3);
+ const combined = combineAttributes(EXISTING_MODEL_ATTRS, EXPANDED_PROPS);
+ for (const propName in EXISTING_MODEL_ATTRS) {
+ assert.deepEqual(COMBINED_ATTRS[propName], combined[propName]);
+ }
+ });
+
test('it adds new fields from OpenAPI to fieldGroups except for exclusions', function (assert) {
assert.expect(3);
const modelFieldGroups = [
@@ -77,4 +280,16 @@ module('Unit | Util | combineFieldGroups', function () {
assert.deepEqual(fieldGroups[groupName], expectedGroups[groupName], 'it incorporates all new fields');
}
});
+
+ test('it uses the description from the display attrs block if it exists', async function (assert) {
+ assert.expect(3);
+ const generatedProps = expandOpenApiProps(OPENAPI_DESCRIPTIONS);
+ for (const propName in STRING_ARRAY_DESCRIPTIONS) {
+ assert.strictEqual(
+ generatedProps[camelize(propName)].helpText,
+ STRING_ARRAY_DESCRIPTIONS[propName].helpText,
+ `correctly updates helpText for ${propName}`
+ );
+ }
+ });
});
diff --git a/ui/tests/unit/utils/model-helpers/validators-test.js b/ui/tests/unit/utils/validators-test.js
similarity index 98%
rename from ui/tests/unit/utils/model-helpers/validators-test.js
rename to ui/tests/unit/utils/validators-test.js
index 1642236db301..acd0011bf1b3 100644
--- a/ui/tests/unit/utils/model-helpers/validators-test.js
+++ b/ui/tests/unit/utils/validators-test.js
@@ -5,7 +5,7 @@
import { module, test } from 'qunit';
import { setupTest } from 'ember-qunit';
-import validators from 'vault/utils/model-helpers/validators';
+import validators from 'vault/utils/validators';
module('Unit | Util | validators', function (hooks) {
setupTest(hooks);
diff --git a/ui/types/vault/services/pagination.d.ts b/ui/types/vault/services/pagination.d.ts
deleted file mode 100644
index cc06d35a0ca1..000000000000
--- a/ui/types/vault/services/pagination.d.ts
+++ /dev/null
@@ -1,16 +0,0 @@
-/**
- * Copyright (c) HashiCorp, Inc.
- * SPDX-License-Identifier: BUSL-1.1
- */
-
-import Service from '@ember/service';
-import { RecordArray } from '@ember-data/store';
-
-export default class PaginationService extends Service {
- lazyPaginatedQuery(
- modelName: string,
- query: object,
- options?: { adapterOptions: object }
- ): Promise;
- clearDataset(modelName: string);
-}
diff --git a/ui/types/vault/services/store.d.ts b/ui/types/vault/services/store.d.ts
index 970990b691c5..9abeb08ee75e 100644
--- a/ui/types/vault/services/store.d.ts
+++ b/ui/types/vault/services/store.d.ts
@@ -3,11 +3,16 @@
* SPDX-License-Identifier: BUSL-1.1
*/
-import Store from '@ember-data/store';
+import Store, { RecordArray } from '@ember-data/store';
export default class StoreService extends Store {
- adapterFor(modelName: string);
- createRecord(modelName: string, object);
+ lazyPaginatedQuery(
+ modelName: string,
+ query: object,
+ options?: { adapterOptions: object }
+ ): Promise;
+
+ clearDataset(modelName: string);
findRecord(modelName: string, path: string);
peekRecord(modelName: string, path: string);
query(modelName: string, query: object);
diff --git a/ui/yarn.lock b/ui/yarn.lock
index cc35969b0648..bcf86973735e 100644
--- a/ui/yarn.lock
+++ b/ui/yarn.lock
@@ -16,12 +16,12 @@ __metadata:
linkType: hard
"@babel/cli@npm:^7.24.6":
- version: 7.25.9
- resolution: "@babel/cli@npm:7.25.9"
+ version: 7.24.8
+ resolution: "@babel/cli@npm:7.24.8"
dependencies:
"@jridgewell/trace-mapping": ^0.3.25
"@nicolo-ribaudo/chokidar-2": 2.1.8-no-fsevents.3
- chokidar: ^3.6.0
+ chokidar: ^3.4.0
commander: ^6.2.0
convert-source-map: ^2.0.0
fs-readdir-recursive: ^1.1.0
@@ -38,54 +38,53 @@ __metadata:
bin:
babel: ./bin/babel.js
babel-external-helpers: ./bin/babel-external-helpers.js
- checksum: e52fb39df804cf272785ec138c47c0a3cda6bd7099520123e987683fd72c0b8d3665512b01b01d38d351e3263e17be3fc6e8dcf01e417b8f052370375b6419d4
+ checksum: 8a1fb83d0c2959b6a83cccab55ac1b0ffd408e1959369609071dadb38c1dc99a501d58751b6e4f0c43b751e595e9868856433b01832a19f592f004dd854a8c1f
languageName: node
linkType: hard
-"@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.25.9, @babel/code-frame@npm:^7.26.0":
- version: 7.26.2
- resolution: "@babel/code-frame@npm:7.26.2"
+"@babel/code-frame@npm:^7.0.0, @babel/code-frame@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/code-frame@npm:7.24.7"
dependencies:
- "@babel/helper-validator-identifier": ^7.25.9
- js-tokens: ^4.0.0
+ "@babel/highlight": ^7.24.7
picocolors: ^1.0.0
- checksum: db13f5c42d54b76c1480916485e6900748bbcb0014a8aca87f50a091f70ff4e0d0a6db63cade75eb41fcc3d2b6ba0a7f89e343def4f96f00269b41b8ab8dd7b8
+ checksum: 830e62cd38775fdf84d612544251ce773d544a8e63df667728cc9e0126eeef14c6ebda79be0f0bc307e8318316b7f58c27ce86702e0a1f5c321d842eb38ffda4
languageName: node
linkType: hard
-"@babel/compat-data@npm:^7.20.5, @babel/compat-data@npm:^7.22.6, @babel/compat-data@npm:^7.25.9, @babel/compat-data@npm:^7.26.0":
- version: 7.26.2
- resolution: "@babel/compat-data@npm:7.26.2"
- checksum: d52fae9b0dc59b409d6005ae6b172e89329f46d68136130065ebe923a156fc633e0f1c8600b3e319b9e0f99fd948f64991a5419e2e9431d00d9d235d5f7a7618
+"@babel/compat-data@npm:^7.20.5, @babel/compat-data@npm:^7.22.6, @babel/compat-data@npm:^7.24.8":
+ version: 7.24.9
+ resolution: "@babel/compat-data@npm:7.24.9"
+ checksum: 3590be0f7028bca0565a83f66752c0f0283b818e9e1bb7fc12912822768e379a6ff84c59d77dc64ba62c140b8500a3828d95c0ce013cd62d254a179bae38709b
languageName: node
linkType: hard
-"@babel/core@npm:^7.0.0, @babel/core@npm:^7.12.0, @babel/core@npm:^7.13.10, @babel/core@npm:^7.16.10, @babel/core@npm:^7.16.7, @babel/core@npm:^7.22.20, @babel/core@npm:^7.23.2, @babel/core@npm:^7.23.6, @babel/core@npm:^7.25.2, @babel/core@npm:^7.3.4":
- version: 7.26.0
- resolution: "@babel/core@npm:7.26.0"
+"@babel/core@npm:^7.0.0, @babel/core@npm:^7.12.0, @babel/core@npm:^7.13.10, @babel/core@npm:^7.16.10, @babel/core@npm:^7.16.7, @babel/core@npm:^7.21.4, @babel/core@npm:^7.22.20, @babel/core@npm:^7.23.2, @babel/core@npm:^7.23.6, @babel/core@npm:^7.24.5, @babel/core@npm:^7.3.4":
+ version: 7.24.9
+ resolution: "@babel/core@npm:7.24.9"
dependencies:
"@ampproject/remapping": ^2.2.0
- "@babel/code-frame": ^7.26.0
- "@babel/generator": ^7.26.0
- "@babel/helper-compilation-targets": ^7.25.9
- "@babel/helper-module-transforms": ^7.26.0
- "@babel/helpers": ^7.26.0
- "@babel/parser": ^7.26.0
- "@babel/template": ^7.25.9
- "@babel/traverse": ^7.25.9
- "@babel/types": ^7.26.0
+ "@babel/code-frame": ^7.24.7
+ "@babel/generator": ^7.24.9
+ "@babel/helper-compilation-targets": ^7.24.8
+ "@babel/helper-module-transforms": ^7.24.9
+ "@babel/helpers": ^7.24.8
+ "@babel/parser": ^7.24.8
+ "@babel/template": ^7.24.7
+ "@babel/traverse": ^7.24.8
+ "@babel/types": ^7.24.9
convert-source-map: ^2.0.0
debug: ^4.1.0
gensync: ^1.0.0-beta.2
json5: ^2.2.3
semver: ^6.3.1
- checksum: b296084cfd818bed8079526af93b5dfa0ba70282532d2132caf71d4060ab190ba26d3184832a45accd82c3c54016985a4109ab9118674347a7e5e9bc464894e6
+ checksum: eae273bee154d6a059e742a2bb7a58b03438a1f70d7909887a28258b29556dc99bcd5cbd41f13cd4755a20b0baf5e82731acb1d3690e02b7a9300fb6d1950e2c
languageName: node
linkType: hard
"@babel/eslint-parser@npm:^7.22.15":
- version: 7.25.9
- resolution: "@babel/eslint-parser@npm:7.25.9"
+ version: 7.24.8
+ resolution: "@babel/eslint-parser@npm:7.24.8"
dependencies:
"@nicolo-ribaudo/eslint-scope-5-internals": 5.1.1-v1
eslint-visitor-keys: ^2.1.0
@@ -93,86 +92,87 @@ __metadata:
peerDependencies:
"@babel/core": ^7.11.0
eslint: ^7.5.0 || ^8.0.0 || ^9.0.0
- checksum: dd2afa122b62a5b07c1e71d1c23b2cd4d655d96609eb2ba1b1ae3ec6f415f4365b77d6669ff859aa7b75952fb63a1d29c5db6e5811fc4012841491cb2dee36e4
+ checksum: 4ca8845b6b068185af1c5b28217a005f370887cf8489983263bc7aebcc2290774a37ad9b971b78fbc3eca6a3d812306153f892b37525c3fc6be43e79c446d39e
languageName: node
linkType: hard
-"@babel/generator@npm:^7.25.9, @babel/generator@npm:^7.26.0":
- version: 7.26.2
- resolution: "@babel/generator@npm:7.26.2"
+"@babel/generator@npm:^7.24.8, @babel/generator@npm:^7.24.9":
+ version: 7.24.10
+ resolution: "@babel/generator@npm:7.24.10"
dependencies:
- "@babel/parser": ^7.26.2
- "@babel/types": ^7.26.0
+ "@babel/types": ^7.24.9
"@jridgewell/gen-mapping": ^0.3.5
"@jridgewell/trace-mapping": ^0.3.25
- jsesc: ^3.0.2
- checksum: 6ff850b7d6082619f8c2f518d993cf7254cfbaa20b026282cbef5c9b2197686d076a432b18e36c4d1a42721c016df4f77a8f62c67600775d9683621d534b91b4
+ jsesc: ^2.5.1
+ checksum: eb13806e9eb76932ea5205502a85ea650a991c7a6f757fbe859176f6d9b34b3da5a2c1f52a2c24fdbe0045a90438fe6889077e338cdd6c727619dee925af1ba6
languageName: node
linkType: hard
-"@babel/helper-annotate-as-pure@npm:^7.18.6, @babel/helper-annotate-as-pure@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-annotate-as-pure@npm:7.25.9"
+"@babel/helper-annotate-as-pure@npm:^7.18.6, @babel/helper-annotate-as-pure@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-annotate-as-pure@npm:7.24.7"
dependencies:
- "@babel/types": ^7.25.9
- checksum: 41edda10df1ae106a9b4fe617bf7c6df77db992992afd46192534f5cff29f9e49a303231733782dd65c5f9409714a529f215325569f14282046e9d3b7a1ffb6c
+ "@babel/types": ^7.24.7
+ checksum: 6178566099a6a0657db7a7fa601a54fb4731ca0b8614fbdccfd8e523c210c13963649bc8fdfd53ce7dd14d05e3dda2fb22dea5b30113c488b9eb1a906d60212e
languageName: node
linkType: hard
-"@babel/helper-builder-binary-assignment-operator-visitor@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-builder-binary-assignment-operator-visitor@npm:7.25.9"
+"@babel/helper-builder-binary-assignment-operator-visitor@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-builder-binary-assignment-operator-visitor@npm:7.24.7"
dependencies:
- "@babel/traverse": ^7.25.9
- "@babel/types": ^7.25.9
- checksum: e1bb465b3b0155702d82cfef09e3813e87a6d777cdd2c513796861eac14953340491eafea1d4109278bf4ceb48b54074c45758f042c0544d00c498090bee5a6f
+ "@babel/traverse": ^7.24.7
+ "@babel/types": ^7.24.7
+ checksum: 71a6158a9fdebffb82fdc400d5555ba8f2e370cea81a0d578155877bdc4db7d5252b75c43b2fdf3f72b3f68348891f99bd35ae315542daad1b7ace8322b1abcb
languageName: node
linkType: hard
-"@babel/helper-compilation-targets@npm:^7.12.0, @babel/helper-compilation-targets@npm:^7.20.7, @babel/helper-compilation-targets@npm:^7.22.6, @babel/helper-compilation-targets@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-compilation-targets@npm:7.25.9"
+"@babel/helper-compilation-targets@npm:^7.12.0, @babel/helper-compilation-targets@npm:^7.20.7, @babel/helper-compilation-targets@npm:^7.22.6, @babel/helper-compilation-targets@npm:^7.24.7, @babel/helper-compilation-targets@npm:^7.24.8":
+ version: 7.24.8
+ resolution: "@babel/helper-compilation-targets@npm:7.24.8"
dependencies:
- "@babel/compat-data": ^7.25.9
- "@babel/helper-validator-option": ^7.25.9
- browserslist: ^4.24.0
+ "@babel/compat-data": ^7.24.8
+ "@babel/helper-validator-option": ^7.24.8
+ browserslist: ^4.23.1
lru-cache: ^5.1.1
semver: ^6.3.1
- checksum: 3af536e2db358b38f968abdf7d512d425d1018fef2f485d6f131a57a7bcaed32c606b4e148bb230e1508fa42b5b2ac281855a68eb78270f54698c48a83201b9b
+ checksum: 40c9e87212fffccca387504b259a629615d7df10fc9080c113da6c51095d3e8b622a1409d9ed09faf2191628449ea28d582179c5148e2e993a3140234076b8da
languageName: node
linkType: hard
-"@babel/helper-create-class-features-plugin@npm:^7.18.6, @babel/helper-create-class-features-plugin@npm:^7.21.0, @babel/helper-create-class-features-plugin@npm:^7.25.9, @babel/helper-create-class-features-plugin@npm:^7.5.5":
- version: 7.25.9
- resolution: "@babel/helper-create-class-features-plugin@npm:7.25.9"
+"@babel/helper-create-class-features-plugin@npm:^7.18.6, @babel/helper-create-class-features-plugin@npm:^7.21.0, @babel/helper-create-class-features-plugin@npm:^7.24.7, @babel/helper-create-class-features-plugin@npm:^7.24.8, @babel/helper-create-class-features-plugin@npm:^7.5.5":
+ version: 7.24.8
+ resolution: "@babel/helper-create-class-features-plugin@npm:7.24.8"
dependencies:
- "@babel/helper-annotate-as-pure": ^7.25.9
- "@babel/helper-member-expression-to-functions": ^7.25.9
- "@babel/helper-optimise-call-expression": ^7.25.9
- "@babel/helper-replace-supers": ^7.25.9
- "@babel/helper-skip-transparent-expression-wrappers": ^7.25.9
- "@babel/traverse": ^7.25.9
+ "@babel/helper-annotate-as-pure": ^7.24.7
+ "@babel/helper-environment-visitor": ^7.24.7
+ "@babel/helper-function-name": ^7.24.7
+ "@babel/helper-member-expression-to-functions": ^7.24.8
+ "@babel/helper-optimise-call-expression": ^7.24.7
+ "@babel/helper-replace-supers": ^7.24.7
+ "@babel/helper-skip-transparent-expression-wrappers": ^7.24.7
+ "@babel/helper-split-export-declaration": ^7.24.7
semver: ^6.3.1
peerDependencies:
"@babel/core": ^7.0.0
- checksum: 91dd5f203ed04568c70b052e2f26dfaac7c146447196c00b8ecbb6d3d2f3b517abadb985d3321a19d143adaed6fe17f7f79f8f50e0c20e9d8ad83e1027b42424
+ checksum: b4707e2c4a2cb504d7656168d887bf653db6fbe8ece4502e28e5798f2ec624dc606f2d6bc4820d31b4dc1b80f7d83d98db83516dda321a76c075e5f531abed0b
languageName: node
linkType: hard
-"@babel/helper-create-regexp-features-plugin@npm:^7.18.6, @babel/helper-create-regexp-features-plugin@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-create-regexp-features-plugin@npm:7.25.9"
+"@babel/helper-create-regexp-features-plugin@npm:^7.18.6, @babel/helper-create-regexp-features-plugin@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-create-regexp-features-plugin@npm:7.24.7"
dependencies:
- "@babel/helper-annotate-as-pure": ^7.25.9
- regexpu-core: ^6.1.1
+ "@babel/helper-annotate-as-pure": ^7.24.7
+ regexpu-core: ^5.3.1
semver: ^6.3.1
peerDependencies:
"@babel/core": ^7.0.0
- checksum: 563ed361ceed3d7a9d64dd58616bf6f0befcc23620ab22d31dd6d8b751d3f99d6d210487b1a5a1e209ab4594df67bacfab7445cbfa092bfe2b719cd42ae1ba6f
+ checksum: 17c59fa222af50f643946eca940ce1d474ff2da1f4afed2312687ab9d708ebbb8c9372754ddbdf44b6e21ead88b8fc144644f3a7b63ccb886de002458cef3974
languageName: node
linkType: hard
-"@babel/helper-define-polyfill-provider@npm:^0.6.2":
+"@babel/helper-define-polyfill-provider@npm:^0.6.1, @babel/helper-define-polyfill-provider@npm:^0.6.2":
version: 0.6.2
resolution: "@babel/helper-define-polyfill-provider@npm:0.6.2"
dependencies:
@@ -187,210 +187,249 @@ __metadata:
languageName: node
linkType: hard
-"@babel/helper-member-expression-to-functions@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-member-expression-to-functions@npm:7.25.9"
+"@babel/helper-environment-visitor@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-environment-visitor@npm:7.24.7"
+ dependencies:
+ "@babel/types": ^7.24.7
+ checksum: 079d86e65701b29ebc10baf6ed548d17c19b808a07aa6885cc141b690a78581b180ee92b580d755361dc3b16adf975b2d2058b8ce6c86675fcaf43cf22f2f7c6
+ languageName: node
+ linkType: hard
+
+"@babel/helper-function-name@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-function-name@npm:7.24.7"
+ dependencies:
+ "@babel/template": ^7.24.7
+ "@babel/types": ^7.24.7
+ checksum: 142ee08922074dfdc0ff358e09ef9f07adf3671ab6eef4fca74dcf7a551f1a43717e7efa358c9e28d7eea84c28d7f177b7a58c70452fc312ae3b1893c5dab2a4
+ languageName: node
+ linkType: hard
+
+"@babel/helper-hoist-variables@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-hoist-variables@npm:7.24.7"
+ dependencies:
+ "@babel/types": ^7.24.7
+ checksum: 6cfdcf2289cd12185dcdbdf2435fa8d3447b797ac75851166de9fc8503e2fd0021db6baf8dfbecad3753e582c08e6a3f805c8d00cbed756060a877d705bd8d8d
+ languageName: node
+ linkType: hard
+
+"@babel/helper-member-expression-to-functions@npm:^7.24.7, @babel/helper-member-expression-to-functions@npm:^7.24.8":
+ version: 7.24.8
+ resolution: "@babel/helper-member-expression-to-functions@npm:7.24.8"
dependencies:
- "@babel/traverse": ^7.25.9
- "@babel/types": ^7.25.9
- checksum: 8e2f1979b6d596ac2a8cbf17f2cf709180fefc274ac3331408b48203fe19134ed87800774ef18838d0275c3965130bae22980d90caed756b7493631d4b2cf961
+ "@babel/traverse": ^7.24.8
+ "@babel/types": ^7.24.8
+ checksum: bf923d05d81b06857f4ca4fe9c528c9c447a58db5ea39595bb559eae2fce01a8266173db0fd6a2ec129d7bbbb9bb22f4e90008252f7c66b422c76630a878a4bc
languageName: node
linkType: hard
-"@babel/helper-module-imports@npm:^7.16.7, @babel/helper-module-imports@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-module-imports@npm:7.25.9"
+"@babel/helper-module-imports@npm:^7.16.7, @babel/helper-module-imports@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-module-imports@npm:7.24.7"
dependencies:
- "@babel/traverse": ^7.25.9
- "@babel/types": ^7.25.9
- checksum: 1b411ce4ca825422ef7065dffae7d8acef52023e51ad096351e3e2c05837e9bf9fca2af9ca7f28dc26d596a588863d0fedd40711a88e350b736c619a80e704e6
+ "@babel/traverse": ^7.24.7
+ "@babel/types": ^7.24.7
+ checksum: 8ac15d96d262b8940bc469052a048e06430bba1296369be695fabdf6799f201dd0b00151762b56012a218464e706bc033f27c07f6cec20c6f8f5fd6543c67054
languageName: node
linkType: hard
-"@babel/helper-module-transforms@npm:^7.25.9, @babel/helper-module-transforms@npm:^7.26.0":
- version: 7.26.0
- resolution: "@babel/helper-module-transforms@npm:7.26.0"
+"@babel/helper-module-transforms@npm:^7.24.7, @babel/helper-module-transforms@npm:^7.24.8, @babel/helper-module-transforms@npm:^7.24.9":
+ version: 7.24.9
+ resolution: "@babel/helper-module-transforms@npm:7.24.9"
dependencies:
- "@babel/helper-module-imports": ^7.25.9
- "@babel/helper-validator-identifier": ^7.25.9
- "@babel/traverse": ^7.25.9
+ "@babel/helper-environment-visitor": ^7.24.7
+ "@babel/helper-module-imports": ^7.24.7
+ "@babel/helper-simple-access": ^7.24.7
+ "@babel/helper-split-export-declaration": ^7.24.7
+ "@babel/helper-validator-identifier": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0
- checksum: 942eee3adf2b387443c247a2c190c17c4fd45ba92a23087abab4c804f40541790d51ad5277e4b5b1ed8d5ba5b62de73857446b7742f835c18ebd350384e63917
+ checksum: ffcf11b678a8d3e6a243285cb5262c37f4d47d507653420c1f7f0bd27076e88177f2b7158850d1a470fcfe923426a2e6571c554c455a90c9755ff488ac36ac40
languageName: node
linkType: hard
-"@babel/helper-optimise-call-expression@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-optimise-call-expression@npm:7.25.9"
+"@babel/helper-optimise-call-expression@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-optimise-call-expression@npm:7.24.7"
dependencies:
- "@babel/types": ^7.25.9
- checksum: f09d0ad60c0715b9a60c31841b3246b47d67650c512ce85bbe24a3124f1a4d66377df793af393273bc6e1015b0a9c799626c48e53747581c1582b99167cc65dc
+ "@babel/types": ^7.24.7
+ checksum: 280654eaf90e92bf383d7eed49019573fb35a98c9e992668f701ad099957246721044be2068cf6840cb2299e0ad393705a1981c88c23a1048096a8d59e5f79a3
languageName: node
linkType: hard
-"@babel/helper-plugin-utils@npm:^7.0.0, @babel/helper-plugin-utils@npm:^7.12.13, @babel/helper-plugin-utils@npm:^7.14.5, @babel/helper-plugin-utils@npm:^7.18.6, @babel/helper-plugin-utils@npm:^7.20.2, @babel/helper-plugin-utils@npm:^7.22.5, @babel/helper-plugin-utils@npm:^7.25.9, @babel/helper-plugin-utils@npm:^7.8.0":
- version: 7.25.9
- resolution: "@babel/helper-plugin-utils@npm:7.25.9"
- checksum: e19ec8acf0b696756e6d84531f532c5fe508dce57aa68c75572a77798bd04587a844a9a6c8ea7d62d673e21fdc174d091c9097fb29aea1c1b49f9c6eaa80f022
+"@babel/helper-plugin-utils@npm:^7.0.0, @babel/helper-plugin-utils@npm:^7.10.4, @babel/helper-plugin-utils@npm:^7.12.13, @babel/helper-plugin-utils@npm:^7.14.5, @babel/helper-plugin-utils@npm:^7.18.6, @babel/helper-plugin-utils@npm:^7.20.2, @babel/helper-plugin-utils@npm:^7.22.5, @babel/helper-plugin-utils@npm:^7.24.7, @babel/helper-plugin-utils@npm:^7.24.8, @babel/helper-plugin-utils@npm:^7.8.0, @babel/helper-plugin-utils@npm:^7.8.3":
+ version: 7.24.8
+ resolution: "@babel/helper-plugin-utils@npm:7.24.8"
+ checksum: 73b1a83ba8bcee21dc94de2eb7323207391715e4369fd55844bb15cf13e3df6f3d13a40786d990e6370bf0f571d94fc31f70dec96c1d1002058258c35ca3767a
languageName: node
linkType: hard
-"@babel/helper-remap-async-to-generator@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-remap-async-to-generator@npm:7.25.9"
+"@babel/helper-remap-async-to-generator@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-remap-async-to-generator@npm:7.24.7"
dependencies:
- "@babel/helper-annotate-as-pure": ^7.25.9
- "@babel/helper-wrap-function": ^7.25.9
- "@babel/traverse": ^7.25.9
+ "@babel/helper-annotate-as-pure": ^7.24.7
+ "@babel/helper-environment-visitor": ^7.24.7
+ "@babel/helper-wrap-function": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0
- checksum: ea37ad9f8f7bcc27c109963b8ebb9d22bac7a5db2a51de199cb560e251d5593fe721e46aab2ca7d3e7a24b0aa4aff0eaf9c7307af9c2fd3a1d84268579073052
+ checksum: bab7be178f875350f22a2cb9248f67fe3a8a8128db77a25607096ca7599fd972bc7049fb11ed9e95b45a3f1dd1fac3846a3279f9cbac16f337ecb0e6ca76e1fc
languageName: node
linkType: hard
-"@babel/helper-replace-supers@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-replace-supers@npm:7.25.9"
+"@babel/helper-replace-supers@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-replace-supers@npm:7.24.7"
dependencies:
- "@babel/helper-member-expression-to-functions": ^7.25.9
- "@babel/helper-optimise-call-expression": ^7.25.9
- "@babel/traverse": ^7.25.9
+ "@babel/helper-environment-visitor": ^7.24.7
+ "@babel/helper-member-expression-to-functions": ^7.24.7
+ "@babel/helper-optimise-call-expression": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0
- checksum: 84f40e12520b7023e52d289bf9d569a06284879fe23bbbacad86bec5d978b2669769f11b073fcfeb1567d8c547168323005fda88607a4681ecaeb4a5cdd48bb9
+ checksum: 2bf0d113355c60d86a04e930812d36f5691f26c82d4ec1739e5ec0a4c982c9113dad3167f7c74f888a96328bd5e696372232406d8200e5979e6e0dc2af5e7c76
languageName: node
linkType: hard
-"@babel/helper-simple-access@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-simple-access@npm:7.25.9"
+"@babel/helper-simple-access@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-simple-access@npm:7.24.7"
dependencies:
- "@babel/traverse": ^7.25.9
- "@babel/types": ^7.25.9
- checksum: 6d96c94b88e8288d15e5352c1221486bd4f62de8c7dc7c7b9f5b107ce2c79f67fec5ed71a0476e146f1fefbbbf1d69abe35dc821d80ce01fc7f472286c342421
+ "@babel/traverse": ^7.24.7
+ "@babel/types": ^7.24.7
+ checksum: ddbf55f9dea1900213f2a1a8500fabfd21c5a20f44dcfa957e4b0d8638c730f88751c77f678644f754f1a1dc73f4eb8b766c300deb45a9daad000e4247957819
languageName: node
linkType: hard
-"@babel/helper-skip-transparent-expression-wrappers@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-skip-transparent-expression-wrappers@npm:7.25.9"
+"@babel/helper-skip-transparent-expression-wrappers@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-skip-transparent-expression-wrappers@npm:7.24.7"
dependencies:
- "@babel/traverse": ^7.25.9
- "@babel/types": ^7.25.9
- checksum: fdbb5248932198bc26daa6abf0d2ac42cab9c2dbb75b7e9f40d425c8f28f09620b886d40e7f9e4e08ffc7aaa2cefe6fc2c44be7c20e81f7526634702fb615bdc
+ "@babel/traverse": ^7.24.7
+ "@babel/types": ^7.24.7
+ checksum: 11b28fe534ce2b1a67c4d8e51a7b5711a2a0a0cae802f74614eee54cca58c744d9a62f6f60103c41759e81c537d270bfd665bf368a6bea214c6052f2094f8407
languageName: node
linkType: hard
-"@babel/helper-string-parser@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-string-parser@npm:7.25.9"
- checksum: 6435ee0849e101681c1849868278b5aee82686ba2c1e27280e5e8aca6233af6810d39f8e4e693d2f2a44a3728a6ccfd66f72d71826a94105b86b731697cdfa99
+"@babel/helper-split-export-declaration@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-split-export-declaration@npm:7.24.7"
+ dependencies:
+ "@babel/types": ^7.24.7
+ checksum: e3ddc91273e5da67c6953f4aa34154d005a00791dc7afa6f41894e768748540f6ebcac5d16e72541aea0c89bee4b89b4da6a3d65972a0ea8bfd2352eda5b7e22
languageName: node
linkType: hard
-"@babel/helper-validator-identifier@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-validator-identifier@npm:7.25.9"
- checksum: 5b85918cb1a92a7f3f508ea02699e8d2422fe17ea8e82acd445006c0ef7520fbf48e3dbcdaf7b0a1d571fc3a2715a29719e5226636cb6042e15fe6ed2a590944
+"@babel/helper-string-parser@npm:^7.24.8":
+ version: 7.24.8
+ resolution: "@babel/helper-string-parser@npm:7.24.8"
+ checksum: 39b03c5119216883878655b149148dc4d2e284791e969b19467a9411fccaa33f7a713add98f4db5ed519535f70ad273cdadfd2eb54d47ebbdeac5083351328ce
languageName: node
linkType: hard
-"@babel/helper-validator-option@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-validator-option@npm:7.25.9"
- checksum: 9491b2755948ebbdd68f87da907283698e663b5af2d2b1b02a2765761974b1120d5d8d49e9175b167f16f72748ffceec8c9cf62acfbee73f4904507b246e2b3d
+"@babel/helper-validator-identifier@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-validator-identifier@npm:7.24.7"
+ checksum: 6799ab117cefc0ecd35cd0b40ead320c621a298ecac88686a14cffceaac89d80cdb3c178f969861bf5fa5e4f766648f9161ea0752ecfe080d8e89e3147270257
languageName: node
linkType: hard
-"@babel/helper-wrap-function@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/helper-wrap-function@npm:7.25.9"
- dependencies:
- "@babel/template": ^7.25.9
- "@babel/traverse": ^7.25.9
- "@babel/types": ^7.25.9
- checksum: 8ec1701e60ae004415800c4a7a188f5564c73b4e4f3fdf58dd3f34a3feaa9753173f39bbd6d02e7ecc974f48155efc7940e62584435b3092c07728ee46a604ea
+"@babel/helper-validator-option@npm:^7.24.7, @babel/helper-validator-option@npm:^7.24.8":
+ version: 7.24.8
+ resolution: "@babel/helper-validator-option@npm:7.24.8"
+ checksum: a52442dfa74be6719c0608fee3225bd0493c4057459f3014681ea1a4643cd38b68ff477fe867c4b356da7330d085f247f0724d300582fa4ab9a02efaf34d107c
languageName: node
linkType: hard
-"@babel/helpers@npm:^7.26.0":
- version: 7.26.0
- resolution: "@babel/helpers@npm:7.26.0"
+"@babel/helper-wrap-function@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/helper-wrap-function@npm:7.24.7"
dependencies:
- "@babel/template": ^7.25.9
- "@babel/types": ^7.26.0
- checksum: d77fe8d45033d6007eadfa440355c1355eed57902d5a302f450827ad3d530343430a21210584d32eef2f216ae463d4591184c6fc60cf205bbf3a884561469200
+ "@babel/helper-function-name": ^7.24.7
+ "@babel/template": ^7.24.7
+ "@babel/traverse": ^7.24.7
+ "@babel/types": ^7.24.7
+ checksum: 085bf130ed08670336e3976f5841ae44e3e10001131632e22ef234659341978d2fd37e65785f59b6cb1745481347fc3bce84b33a685cacb0a297afbe1d2b03af
languageName: node
linkType: hard
-"@babel/parser@npm:^7.20.15, @babel/parser@npm:^7.25.9, @babel/parser@npm:^7.26.0, @babel/parser@npm:^7.26.2, @babel/parser@npm:^7.4.5":
- version: 7.26.2
- resolution: "@babel/parser@npm:7.26.2"
+"@babel/helpers@npm:^7.24.8":
+ version: 7.24.8
+ resolution: "@babel/helpers@npm:7.24.8"
dependencies:
- "@babel/types": ^7.26.0
- bin:
- parser: ./bin/babel-parser.js
- checksum: c88b5ea0adf357ef909cdc2c31e284a154943edc59f63f6e8a4c20bf773a1b2f3d8c2205e59c09ca7cdad91e7466300114548876529277a80651b6436a48d5d9
+ "@babel/template": ^7.24.7
+ "@babel/types": ^7.24.8
+ checksum: 2d7301b1b9c91e518c4766bae171230e243d98461c15eabbd44f8f9c83c297fad5c4a64ad80cfec9ca8e90412fc2b41ee86d7eb35dc8a7611c268bcf1317fe46
languageName: node
linkType: hard
-"@babel/plugin-bugfix-firefox-class-in-computed-class-key@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-bugfix-firefox-class-in-computed-class-key@npm:7.25.9"
+"@babel/highlight@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/highlight@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/traverse": ^7.25.9
- peerDependencies:
- "@babel/core": ^7.0.0
- checksum: b33d37dacf98a9c74f53959999adc37a258057668b62dba557e6865689433c53764673109eaba9102bf73b2ac4db162f0d9b89a6cca6f1b71d12f5908ec11da9
+ "@babel/helper-validator-identifier": ^7.24.7
+ chalk: ^2.4.2
+ js-tokens: ^4.0.0
+ picocolors: ^1.0.0
+ checksum: 5cd3a89f143671c4ac129960024ba678b669e6fc673ce078030f5175002d1d3d52bc10b22c5b916a6faf644b5028e9a4bd2bb264d053d9b05b6a98690f1d46f1
languageName: node
linkType: hard
-"@babel/plugin-bugfix-safari-class-field-initializer-scope@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-bugfix-safari-class-field-initializer-scope@npm:7.25.9"
+"@babel/parser@npm:^7.20.15, @babel/parser@npm:^7.24.7, @babel/parser@npm:^7.24.8, @babel/parser@npm:^7.4.5":
+ version: 7.24.8
+ resolution: "@babel/parser@npm:7.24.8"
+ bin:
+ parser: ./bin/babel-parser.js
+ checksum: 76f866333bfbd53800ac027419ae523bb0137fc63daa968232eb780e4390136bb6e497cb4a2cf6051a2c318aa335c2e6d2adc17079d60691ae7bde89b28c5688
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-bugfix-firefox-class-in-computed-class-key@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-bugfix-firefox-class-in-computed-class-key@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-environment-visitor": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0
- checksum: d3e14ab1cb9cb50246d20cab9539f2fbd1e7ef1ded73980c8ad7c0561b4d5e0b144d362225f0976d47898e04cbd40f2000e208b0913bd788346cf7791b96af91
+ checksum: 68d315642b53af143aa17a71eb976cf431b51339aee584e29514a462b81c998636dd54219c2713b5f13e1df89eaf130dfab59683f9116825608708c81696b96c
languageName: node
linkType: hard
-"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:7.25.9"
+"@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0
- checksum: a9d1ee3fd100d3eb6799a2f2bbd785296f356c531d75c9369f71541811fa324270258a374db103ce159156d006da2f33370330558d0133e6f7584152c34997ca
+ checksum: 7eb4e7ce5e3d6db4b0fdbdfaaa301c2e58f38a7ee39d5a4259a1fda61a612e83d3e4bc90fc36fb0345baf57e1e1a071e0caffeb80218623ad163f2fdc2e53a54
languageName: node
linkType: hard
-"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@npm:7.25.9"
+"@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-skip-transparent-expression-wrappers": ^7.25.9
- "@babel/plugin-transform-optional-chaining": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/helper-skip-transparent-expression-wrappers": ^7.24.7
+ "@babel/plugin-transform-optional-chaining": ^7.24.7
peerDependencies:
"@babel/core": ^7.13.0
- checksum: 5b298b28e156f64de51cdb03a2c5b80c7f978815ef1026f3ae8b9fc48d28bf0a83817d8fbecb61ef8fb94a7201f62cca5103cc6e7b9e8f28e38f766d7905b378
+ checksum: 07b92878ac58a98ea1fdf6a8b4ec3413ba4fa66924e28b694d63ec5b84463123fbf4d7153b56cf3cedfef4a3482c082fe3243c04f8fb2c041b32b0e29b4a9e21
languageName: node
linkType: hard
-"@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@npm:7.25.9"
+"@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/traverse": ^7.25.9
+ "@babel/helper-environment-visitor": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0
- checksum: c684593952ab1b40dfa4e64e98a07e7227c6db175c21bd0e6d71d2ad5d240fef4e4a984d56f05a494876542a022244fe1c1098f4116109fd90d06615e8a269b1
+ checksum: 8324d458db57060590942c7c2e9603880d07718ccb6450ec935105b8bd3c4393c4b8ada88e178c232258d91f33ffdcf2b1043d54e07a86989e50667ee100a32e
languageName: node
linkType: hard
@@ -407,15 +446,15 @@ __metadata:
linkType: hard
"@babel/plugin-proposal-decorators@npm:^7.13.5, @babel/plugin-proposal-decorators@npm:^7.16.7, @babel/plugin-proposal-decorators@npm:^7.20.13, @babel/plugin-proposal-decorators@npm:^7.23.2":
- version: 7.25.9
- resolution: "@babel/plugin-proposal-decorators@npm:7.25.9"
+ version: 7.24.7
+ resolution: "@babel/plugin-proposal-decorators@npm:7.24.7"
dependencies:
- "@babel/helper-create-class-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/plugin-syntax-decorators": ^7.25.9
+ "@babel/helper-create-class-features-plugin": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/plugin-syntax-decorators": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: ff598127818ac8e704009f1a9a207766ada5f84f6ca74e9de662cb6ce32bcb846c28fd52d6c5df9c55b4eac9a2a3492aa71fbd5cef0569a14b6f12003df22af2
+ checksum: 75aa5ff5537d5ff77f0e52eb161a2f67c7d2bfd8f2000be710dedb1dd238b43ce53d2f734f84bda95b3f013b69de126403f84167f4eddb1d35e8f26257ee07c8
languageName: node
linkType: hard
@@ -469,47 +508,157 @@ __metadata:
languageName: node
linkType: hard
-"@babel/plugin-syntax-decorators@npm:^7.16.7, @babel/plugin-syntax-decorators@npm:^7.23.3, @babel/plugin-syntax-decorators@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-syntax-decorators@npm:7.25.9"
+"@babel/plugin-syntax-async-generators@npm:^7.8.4":
+ version: 7.8.4
+ resolution: "@babel/plugin-syntax-async-generators@npm:7.8.4"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.8.0
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: 7ed1c1d9b9e5b64ef028ea5e755c0be2d4e5e4e3d6cf7df757b9a8c4cfa4193d268176d0f1f7fbecdda6fe722885c7fda681f480f3741d8a2d26854736f05367
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-syntax-class-properties@npm:^7.12.13":
+ version: 7.12.13
+ resolution: "@babel/plugin-syntax-class-properties@npm:7.12.13"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.12.13
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: 24f34b196d6342f28d4bad303612d7ff566ab0a013ce89e775d98d6f832969462e7235f3e7eaf17678a533d4be0ba45d3ae34ab4e5a9dcbda5d98d49e5efa2fc
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-syntax-class-static-block@npm:^7.14.5":
+ version: 7.14.5
+ resolution: "@babel/plugin-syntax-class-static-block@npm:7.14.5"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.14.5
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: aaf58b17e6aa08f41f93897daa93c601a486233a0375b4231799fc5c4e7c98480aaad3c1c44cf391a62e428c5f6546f76488a1023a4036bb87cd61fa79f1173b
+ checksum: 3e80814b5b6d4fe17826093918680a351c2d34398a914ce6e55d8083d72a9bdde4fbaf6a2dcea0e23a03de26dc2917ae3efd603d27099e2b98380345703bf948
languageName: node
linkType: hard
-"@babel/plugin-syntax-import-assertions@npm:^7.26.0":
- version: 7.26.0
- resolution: "@babel/plugin-syntax-import-assertions@npm:7.26.0"
+"@babel/plugin-syntax-decorators@npm:^7.16.7, @babel/plugin-syntax-decorators@npm:^7.23.3, @babel/plugin-syntax-decorators@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-syntax-decorators@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: b58f2306df4a690ca90b763d832ec05202c50af787158ff8b50cdf3354359710bce2e1eb2b5135fcabf284756ac8eadf09ca74764aa7e76d12a5cac5f6b21e67
+ checksum: dc303bcc1f5df61638f1eddc69dd55e65574bd43d8a4a098d3589f5a742e93a4ca3a173967b34eb95e4eaa994799b4c72bfed8688036e43c634be7f24db01ac5
languageName: node
linkType: hard
-"@babel/plugin-syntax-import-attributes@npm:^7.26.0":
- version: 7.26.0
- resolution: "@babel/plugin-syntax-import-attributes@npm:7.26.0"
+"@babel/plugin-syntax-dynamic-import@npm:^7.8.3":
+ version: 7.8.3
+ resolution: "@babel/plugin-syntax-dynamic-import@npm:7.8.3"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.8.0
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: ce307af83cf433d4ec42932329fad25fa73138ab39c7436882ea28742e1c0066626d224e0ad2988724c82644e41601cef607b36194f695cb78a1fcdc959637bd
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-syntax-export-namespace-from@npm:^7.8.3":
+ version: 7.8.3
+ resolution: "@babel/plugin-syntax-export-namespace-from@npm:7.8.3"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.8.3
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: 85740478be5b0de185228e7814451d74ab8ce0a26fcca7613955262a26e99e8e15e9da58f60c754b84515d4c679b590dbd3f2148f0f58025f4ae706f1c5a5d4a
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-syntax-import-assertions@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-syntax-import-assertions@npm:7.24.7"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.24.7
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: c4d67be4eb1d4637e361477dbe01f5b392b037d17c1f861cfa0faa120030e137aab90a9237931b8040fd31d1e5d159e11866fa1165f78beef7a3be876a391a17
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-syntax-import-attributes@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-syntax-import-attributes@npm:7.24.7"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.24.7
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: 590dbb5d1a15264f74670b427b8d18527672c3d6c91d7bae7e65f80fd810edbc83d90e68065088644cbad3f2457ed265a54a9956fb789fcb9a5b521822b3a275
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-syntax-import-meta@npm:^7.10.4":
+ version: 7.10.4
+ resolution: "@babel/plugin-syntax-import-meta@npm:7.10.4"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.10.4
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: 166ac1125d10b9c0c430e4156249a13858c0366d38844883d75d27389621ebe651115cb2ceb6dc011534d5055719fa1727b59f39e1ab3ca97820eef3dcab5b9b
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-syntax-json-strings@npm:^7.8.3":
+ version: 7.8.3
+ resolution: "@babel/plugin-syntax-json-strings@npm:7.8.3"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.8.0
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: bf5aea1f3188c9a507e16efe030efb996853ca3cadd6512c51db7233cc58f3ac89ff8c6bdfb01d30843b161cfe7d321e1bf28da82f7ab8d7e6bc5464666f354a
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-syntax-jsx@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-syntax-jsx@npm:7.24.7"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.24.7
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: 7a5ca629d8ca1e1ee78705a78e58c12920d07ed8006d7e7232b31296a384ff5e41d7b649bde5561196041037bbb9f9715be1d1c20975df87ca204f34ad15b965
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-syntax-logical-assignment-operators@npm:^7.10.4":
+ version: 7.10.4
+ resolution: "@babel/plugin-syntax-logical-assignment-operators@npm:7.10.4"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.10.4
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: aff33577037e34e515911255cdbb1fd39efee33658aa00b8a5fd3a4b903585112d037cce1cc9e4632f0487dc554486106b79ccd5ea63a2e00df4363f6d4ff886
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-syntax-nullish-coalescing-operator@npm:^7.8.3":
+ version: 7.8.3
+ resolution: "@babel/plugin-syntax-nullish-coalescing-operator@npm:7.8.3"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.8.0
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: c122aa577166c80ee67f75aebebeef4150a132c4d3109d25d7fc058bf802946f883e330f20b78c1d3e3a5ada631c8780c263d2d01b5dbaecc69efefeedd42916
+ checksum: 87aca4918916020d1fedba54c0e232de408df2644a425d153be368313fdde40d96088feed6c4e5ab72aac89be5d07fef2ddf329a15109c5eb65df006bf2580d1
languageName: node
linkType: hard
-"@babel/plugin-syntax-jsx@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-syntax-jsx@npm:7.25.9"
+"@babel/plugin-syntax-numeric-separator@npm:^7.10.4":
+ version: 7.10.4
+ resolution: "@babel/plugin-syntax-numeric-separator@npm:7.10.4"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.10.4
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: bb609d1ffb50b58f0c1bac8810d0e46a4f6c922aa171c458f3a19d66ee545d36e782d3bffbbc1fed0dc65a558bdce1caf5279316583c0fff5a2c1658982a8563
+ checksum: 01ec5547bd0497f76cc903ff4d6b02abc8c05f301c88d2622b6d834e33a5651aa7c7a3d80d8d57656a4588f7276eba357f6b7e006482f5b564b7a6488de493a1
languageName: node
linkType: hard
@@ -524,6 +673,28 @@ __metadata:
languageName: node
linkType: hard
+"@babel/plugin-syntax-optional-catch-binding@npm:^7.8.3":
+ version: 7.8.3
+ resolution: "@babel/plugin-syntax-optional-catch-binding@npm:7.8.3"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.8.0
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: 910d90e72bc90ea1ce698e89c1027fed8845212d5ab588e35ef91f13b93143845f94e2539d831dc8d8ededc14ec02f04f7bd6a8179edd43a326c784e7ed7f0b9
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-syntax-optional-chaining@npm:^7.8.3":
+ version: 7.8.3
+ resolution: "@babel/plugin-syntax-optional-chaining@npm:7.8.3"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.8.0
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: eef94d53a1453361553c1f98b68d17782861a04a392840341bc91780838dd4e695209c783631cf0de14c635758beafb6a3a65399846ffa4386bff90639347f30
+ languageName: node
+ linkType: hard
+
"@babel/plugin-syntax-private-property-in-object@npm:^7.14.5":
version: 7.14.5
resolution: "@babel/plugin-syntax-private-property-in-object@npm:7.14.5"
@@ -535,14 +706,25 @@ __metadata:
languageName: node
linkType: hard
-"@babel/plugin-syntax-typescript@npm:^7.2.0, @babel/plugin-syntax-typescript@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-syntax-typescript@npm:7.25.9"
+"@babel/plugin-syntax-top-level-await@npm:^7.14.5":
+ version: 7.14.5
+ resolution: "@babel/plugin-syntax-top-level-await@npm:7.14.5"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.14.5
+ peerDependencies:
+ "@babel/core": ^7.0.0-0
+ checksum: bbd1a56b095be7820029b209677b194db9b1d26691fe999856462e66b25b281f031f3dfd91b1619e9dcf95bebe336211833b854d0fb8780d618e35667c2d0d7e
+ languageName: node
+ linkType: hard
+
+"@babel/plugin-syntax-typescript@npm:^7.2.0, @babel/plugin-syntax-typescript@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-syntax-typescript@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 0e9821e8ba7d660c36c919654e4144a70546942ae184e85b8102f2322451eae102cbfadbcadd52ce077a2b44b400ee52394c616feab7b5b9f791b910e933fd33
+ checksum: 56fe84f3044ecbf038977281648db6b63bd1301f2fff6595820dc10ee276c1d1586919d48d52a8d497ecae32c958be38f42c1c8d174dc58aad856c516dc5b35a
languageName: node
linkType: hard
@@ -558,576 +740,565 @@ __metadata:
languageName: node
linkType: hard
-"@babel/plugin-transform-arrow-functions@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-arrow-functions@npm:7.25.9"
+"@babel/plugin-transform-arrow-functions@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-arrow-functions@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: c29f081224859483accf55fb4d091db2aac0dcd0d7954bac5ca889030cc498d3f771aa20eb2e9cd8310084ec394d85fa084b97faf09298b6bc9541182b3eb5bb
+ checksum: 707c209b5331c7dc79bd326128c6a6640dbd62a78da1653c844db20c4f36bf7b68454f1bc4d2d051b3fde9136fa291f276ec03a071bb00ee653069ff82f91010
languageName: node
linkType: hard
-"@babel/plugin-transform-async-generator-functions@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-async-generator-functions@npm:7.25.9"
+"@babel/plugin-transform-async-generator-functions@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-async-generator-functions@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-remap-async-to-generator": ^7.25.9
- "@babel/traverse": ^7.25.9
+ "@babel/helper-environment-visitor": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/helper-remap-async-to-generator": ^7.24.7
+ "@babel/plugin-syntax-async-generators": ^7.8.4
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 41e02c18c2a57de9f274fa2c5a1bf81a20ab5f321db29cc3051512b9c5bdf3f1a8c42f1fc282cb62343c6d50849f992eede954d5f7fb5e7df48ae0c59ea7e054
+ checksum: 112e3b18f9c496ebc01209fc27f0b41a3669c479c7bc44f7249383172b432ebaae1e523caa7c6ecbd2d0d7adcb7e5769fe2798f8cb01c08cd57232d1bb6d8ad4
languageName: node
linkType: hard
-"@babel/plugin-transform-async-to-generator@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-async-to-generator@npm:7.25.9"
+"@babel/plugin-transform-async-to-generator@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-async-to-generator@npm:7.24.7"
dependencies:
- "@babel/helper-module-imports": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-remap-async-to-generator": ^7.25.9
+ "@babel/helper-module-imports": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/helper-remap-async-to-generator": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: b3ad50fb93c171644d501864620ed23952a46648c4df10dc9c62cc9ad08031b66bd272cfdd708faeee07c23b6251b16f29ce0350473e4c79f0c32178d38ce3a6
+ checksum: 13704fb3b83effc868db2b71bfb2c77b895c56cb891954fc362e95e200afd523313b0e7cf04ce02f45b05e76017c5b5fa8070c92613727a35131bb542c253a36
languageName: node
linkType: hard
-"@babel/plugin-transform-block-scoped-functions@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-block-scoped-functions@npm:7.25.9"
+"@babel/plugin-transform-block-scoped-functions@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-block-scoped-functions@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: bf31896556b33a80f017af3d445ceb532ec0f5ca9d69bc211a963ac92514d172d5c24c5ac319f384d9dfa7f1a4d8dc23032c2fe3e74f98a59467ecd86f7033ae
+ checksum: 249cdcbff4e778b177245f9652b014ea4f3cd245d83297f10a7bf6d97790074089aa62bcde8c08eb299c5e68f2faed346b587d3ebac44d625ba9a83a4ee27028
languageName: node
linkType: hard
-"@babel/plugin-transform-block-scoping@npm:^7.12.1, @babel/plugin-transform-block-scoping@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-block-scoping@npm:7.25.9"
+"@babel/plugin-transform-block-scoping@npm:^7.12.1, @babel/plugin-transform-block-scoping@npm:^7.21.0, @babel/plugin-transform-block-scoping@npm:^7.22.5, @babel/plugin-transform-block-scoping@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-block-scoping@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: e869500cfb1995e06e64c9608543b56468639809febfcdd6fcf683bc0bf1be2431cacf2981a168a1a14f4766393e37bc9f7c96d25bc5b5f39a64a8a8ad0bf8e0
+ checksum: 039206155533600f079f3a455f85888dd7d4970ff7ffa85ef44760f4f5acb9f19c9d848cc1fec1b9bdbc0dfec9e8a080b90d0ab66ad2bdc7138b5ca4ba96e61c
languageName: node
linkType: hard
-"@babel/plugin-transform-class-properties@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-class-properties@npm:7.25.9"
+"@babel/plugin-transform-class-properties@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-class-properties@npm:7.24.7"
dependencies:
- "@babel/helper-create-class-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-create-class-features-plugin": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: a8d69e2c285486b63f49193cbcf7a15e1d3a5f632c1c07d7a97f65306df7f554b30270b7378dde143f8b557d1f8f6336c643377943dec8ec405e4cd11e90b9ea
+ checksum: 1348d7ce74da38ba52ea85b3b4289a6a86913748569ef92ef0cff30702a9eb849e5eaf59f1c6f3517059aa68115fb3067e389735dccacca39add4e2b0c67e291
languageName: node
linkType: hard
-"@babel/plugin-transform-class-static-block@npm:^7.16.7, @babel/plugin-transform-class-static-block@npm:^7.22.11, @babel/plugin-transform-class-static-block@npm:^7.26.0":
- version: 7.26.0
- resolution: "@babel/plugin-transform-class-static-block@npm:7.26.0"
+"@babel/plugin-transform-class-static-block@npm:^7.16.7, @babel/plugin-transform-class-static-block@npm:^7.22.11, @babel/plugin-transform-class-static-block@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-class-static-block@npm:7.24.7"
dependencies:
- "@babel/helper-create-class-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-create-class-features-plugin": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/plugin-syntax-class-static-block": ^7.14.5
peerDependencies:
"@babel/core": ^7.12.0
- checksum: d779d4d3a6f8d363f67fcbd928c15baa72be8d3b86c6d05e0300b50e66e2c4be9e99398b803d13064bc79d90ae36e37a505e3dc8af11904459804dec07660246
+ checksum: 324049263504f18416f1c3e24033baebfafd05480fdd885c8ebe6f2b415b0fc8e0b98d719360f9e30743cc78ac387fabc0b3c6606d2b54135756ffb92963b382
languageName: node
linkType: hard
-"@babel/plugin-transform-classes@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-classes@npm:7.25.9"
+"@babel/plugin-transform-classes@npm:^7.24.8":
+ version: 7.24.8
+ resolution: "@babel/plugin-transform-classes@npm:7.24.8"
dependencies:
- "@babel/helper-annotate-as-pure": ^7.25.9
- "@babel/helper-compilation-targets": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-replace-supers": ^7.25.9
- "@babel/traverse": ^7.25.9
+ "@babel/helper-annotate-as-pure": ^7.24.7
+ "@babel/helper-compilation-targets": ^7.24.8
+ "@babel/helper-environment-visitor": ^7.24.7
+ "@babel/helper-function-name": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.8
+ "@babel/helper-replace-supers": ^7.24.7
+ "@babel/helper-split-export-declaration": ^7.24.7
globals: ^11.1.0
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: d12584f72125314cc0fa8c77586ece2888d677788ac75f7393f5da574dfe4e45a556f7e3488fab29c8777ab3e5856d7a2d79f6df02834083aaa9d766440e3c68
+ checksum: 9c0f547d67e255b37055461df9c1a578c29bf59c7055bd5b40b07b92e5448af3ca8d853d50056125b7dae9bfe3a4cf1559d61b9ccbc3d2578dd43f15386f12fe
languageName: node
linkType: hard
-"@babel/plugin-transform-computed-properties@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-computed-properties@npm:7.25.9"
+"@babel/plugin-transform-computed-properties@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-computed-properties@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/template": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/template": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: f77fa4bc0c1e0031068172df28852388db6b0f91c268d037905f459607cf1e8ebab00015f9f179f4ad96e11c5f381b635cd5dc4e147a48c7ac79d195ae7542de
+ checksum: 0cf8c1b1e4ea57dec8d4612460d84fd4cdbf71a7499bb61ee34632cf89018a59eee818ffca88a8d99ee7057c20a4257044d7d463fda6daef9bf1db9fa81563cb
languageName: node
linkType: hard
-"@babel/plugin-transform-destructuring@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-destructuring@npm:7.25.9"
+"@babel/plugin-transform-destructuring@npm:^7.24.8":
+ version: 7.24.8
+ resolution: "@babel/plugin-transform-destructuring@npm:7.24.8"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.8
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 965f63077a904828f4adee91393f83644098533442b8217d5a135c23a759a4c252c714074c965676a60d2c33f610f579a4eeb59ffd783724393af61c0ca45fef
+ checksum: 0b4bd3d608979a1e5bd97d9d42acd5ad405c7fffa61efac4c7afd8e86ea6c2d91ab2d94b6a98d63919571363fe76e0b03c4ff161f0f60241b895842596e4a999
languageName: node
linkType: hard
-"@babel/plugin-transform-dotall-regex@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-dotall-regex@npm:7.25.9"
+"@babel/plugin-transform-dotall-regex@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-dotall-regex@npm:7.24.7"
dependencies:
- "@babel/helper-create-regexp-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-create-regexp-features-plugin": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 8bdf1bb9e6e3a2cc8154ae88a3872faa6dc346d6901994505fb43ac85f858728781f1219f40b67f7bb0687c507450236cb7838ac68d457e65637f98500aa161b
+ checksum: 67b10fc6abb1f61f0e765288eb4c6d63d1d0f9fc0660e69f6f2170c56fa16bc74e49857afc644beda112b41771cd90cf52df0940d11e97e52617c77c7dcff171
languageName: node
linkType: hard
-"@babel/plugin-transform-duplicate-keys@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-duplicate-keys@npm:7.25.9"
+"@babel/plugin-transform-duplicate-keys@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-duplicate-keys@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: b553eebc328797ead6be5ba5bdaf2f1222cea8a5bd33fb4ed625975d4f9b510bfb0d688d97e314cd4b4a48b279bea7b3634ad68c1b41ee143c3082db0ae74037
+ checksum: d1da2ff85ecb56a63f4ccfd9dc9ae69400d85f0dadf44ecddd9e71c6e5c7a9178e74e3a9637555f415a2bb14551e563f09f98534ab54f53d25e8439fdde6ba2d
languageName: node
linkType: hard
-"@babel/plugin-transform-duplicate-named-capturing-groups-regex@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-duplicate-named-capturing-groups-regex@npm:7.25.9"
+"@babel/plugin-transform-dynamic-import@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-dynamic-import@npm:7.24.7"
dependencies:
- "@babel/helper-create-regexp-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
- peerDependencies:
- "@babel/core": ^7.0.0
- checksum: f7233cf596be8c6843d31951afaf2464a62a610cb89c72c818c044765827fab78403ab8a7d3a6386f838c8df574668e2a48f6c206b1d7da965aff9c6886cb8e6
- languageName: node
- linkType: hard
-
-"@babel/plugin-transform-dynamic-import@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-dynamic-import@npm:7.25.9"
- dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/plugin-syntax-dynamic-import": ^7.8.3
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: aaca1ccda819be9b2b85af47ba08ddd2210ff2dbea222f26e4cd33f97ab020884bf81a66197e50872721e9daf36ceb5659502c82199884ea74d5d75ecda5c58b
+ checksum: 776509ff62ab40c12be814a342fc56a5cc09b91fb63032b2633414b635875fd7da03734657be0f6db2891fe6e3033b75d5ddb6f2baabd1a02e4443754a785002
languageName: node
linkType: hard
-"@babel/plugin-transform-exponentiation-operator@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-exponentiation-operator@npm:7.25.9"
+"@babel/plugin-transform-exponentiation-operator@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-exponentiation-operator@npm:7.24.7"
dependencies:
- "@babel/helper-builder-binary-assignment-operator-visitor": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-builder-binary-assignment-operator-visitor": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 57e1bb4135dd16782fe84b49dd360cce8f9bf5f62eb10424dcdaf221e54a8bacdf50f2541c5ac01dea9f833a6c628613d71be915290938a93454389cba4de06b
+ checksum: 23c84a23eb56589fdd35a3540f9a1190615be069110a2270865223c03aee3ba4e0fc68fe14850800cf36f0712b26e4964d3026235261f58f0405a29fe8dac9b1
languageName: node
linkType: hard
-"@babel/plugin-transform-export-namespace-from@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-export-namespace-from@npm:7.25.9"
+"@babel/plugin-transform-export-namespace-from@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-export-namespace-from@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/plugin-syntax-export-namespace-from": ^7.8.3
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 4dfe8df86c5b1d085d591290874bb2d78a9063090d71567ed657a418010ad333c3f48af2c974b865f53bbb718987a065f89828d43279a7751db1a56c9229078d
+ checksum: 3bd3a10038f10ae0dea1ee42137f3edcf7036b5e9e570a0d1cbd0865f03658990c6c2d84fa2475f87a754e7dc5b46766c16f7ce5c9b32c3040150b6a21233a80
languageName: node
linkType: hard
-"@babel/plugin-transform-for-of@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-for-of@npm:7.25.9"
+"@babel/plugin-transform-for-of@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-for-of@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-skip-transparent-expression-wrappers": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/helper-skip-transparent-expression-wrappers": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 41b56e70256a29fc26ed7fb95ece062d7ec2f3b6ea8f0686349ffd004cd4816132085ee21165b89c502ee7161cb7cfb12510961638851357945dc7bc546475b7
+ checksum: a53b42dc93ab4b7d1ebd3c695b52be22b3d592f6a3dbdb3dc2fea2c8e0a7e1508fe919864c455cde552aec44ce7518625fccbb70c7063373ca228d884f4f49ea
languageName: node
linkType: hard
-"@babel/plugin-transform-function-name@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-function-name@npm:7.25.9"
+"@babel/plugin-transform-function-name@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-function-name@npm:7.24.7"
dependencies:
- "@babel/helper-compilation-targets": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/traverse": ^7.25.9
+ "@babel/helper-compilation-targets": ^7.24.7
+ "@babel/helper-function-name": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: a8d7c8d019a6eb57eab5ca1be3e3236f175557d55b1f3b11f8ad7999e3fbb1cf37905fd8cb3a349bffb4163a558e9f33b63f631597fdc97c858757deac1b2fd7
+ checksum: 8eb1a67894a124910b5a67630bed4307757504381f39f0fb5cf82afc7ae8647dbc03b256d13865b73a749b9071b68e9fb8a28cef2369917b4299ebb93fd66146
languageName: node
linkType: hard
-"@babel/plugin-transform-json-strings@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-json-strings@npm:7.25.9"
+"@babel/plugin-transform-json-strings@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-json-strings@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/plugin-syntax-json-strings": ^7.8.3
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: e2498d84761cfd05aaea53799933d55af309c9d6204e66b38778792d171e4d1311ad34f334259a3aa3407dd0446f6bd3e390a1fcb8ce2e42fe5aabed0e41bee1
+ checksum: 88874d0b7a1ddea66c097fc0abb68801ffae194468aa44b828dde9a0e20ac5d8647943793de86092eabaa2911c96f67a6b373793d4bb9c932ef81b2711c06c2e
languageName: node
linkType: hard
-"@babel/plugin-transform-literals@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-literals@npm:7.25.9"
+"@babel/plugin-transform-literals@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-literals@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 3cca75823a38aab599bc151b0fa4d816b5e1b62d6e49c156aa90436deb6e13649f5505973151a10418b64f3f9d1c3da53e38a186402e0ed7ad98e482e70c0c14
+ checksum: 3c075cc093a3dd9e294b8b7d6656e65f889e7ca2179ca27978dcd65b4dc4885ebbfb327408d7d8f483c55547deed00ba840956196f3ac8a3c3d2308a330a8c23
languageName: node
linkType: hard
-"@babel/plugin-transform-logical-assignment-operators@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-logical-assignment-operators@npm:7.25.9"
+"@babel/plugin-transform-logical-assignment-operators@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-logical-assignment-operators@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/plugin-syntax-logical-assignment-operators": ^7.10.4
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 8c6febb4ac53852314d28b5e2c23d5dbbff7bf1e57d61f9672e0d97531ef7778b3f0ad698dcf1179f5486e626c77127508916a65eb846a89e98a92f70ed3537b
+ checksum: 3367ce0be243704dc6fce23e86a592c4380f01998ee5dd9f94c54b1ef7b971ac6f8a002901eb51599ac6cbdc0d067af8d1a720224fca1c40fde8bb8aab804aac
languageName: node
linkType: hard
-"@babel/plugin-transform-member-expression-literals@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-member-expression-literals@npm:7.25.9"
+"@babel/plugin-transform-member-expression-literals@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-member-expression-literals@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: db92041ae87b8f59f98b50359e0bb172480f6ba22e5e76b13bdfe07122cbf0daa9cd8ad2e78dcb47939938fed88ad57ab5989346f64b3a16953fc73dea3a9b1f
+ checksum: 2720c57aa3bf70576146ba7d6ea03227f4611852122d76d237924f7b008dafc952e6ae61a19e5024f26c665f44384bbd378466f01b6bd1305b3564a3b7fb1a5d
languageName: node
linkType: hard
-"@babel/plugin-transform-modules-amd@npm:^7.13.0, @babel/plugin-transform-modules-amd@npm:^7.20.11, @babel/plugin-transform-modules-amd@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-modules-amd@npm:7.25.9"
+"@babel/plugin-transform-modules-amd@npm:^7.13.0, @babel/plugin-transform-modules-amd@npm:^7.20.11, @babel/plugin-transform-modules-amd@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-modules-amd@npm:7.24.7"
dependencies:
- "@babel/helper-module-transforms": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-module-transforms": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: baad1f6fd0e0d38e9a9c1086a06abdc014c4c653fd452337cadfe23fb5bd8bf4368d1bc433a5ac8e6421bc0732ebb7c044cf3fb39c1b7ebe967d66e26c4e5cec
+ checksum: f1dd0fb2f46c0f8f21076b8c7ccd5b33a85ce6dcb31518ea4c648d9a5bb2474cd4bd87c9b1b752e68591e24b022e334ba0d07631fef2b6b4d8a4b85cf3d581f5
languageName: node
linkType: hard
-"@babel/plugin-transform-modules-commonjs@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-modules-commonjs@npm:7.25.9"
+"@babel/plugin-transform-modules-commonjs@npm:^7.24.7, @babel/plugin-transform-modules-commonjs@npm:^7.24.8":
+ version: 7.24.8
+ resolution: "@babel/plugin-transform-modules-commonjs@npm:7.24.8"
dependencies:
- "@babel/helper-module-transforms": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-simple-access": ^7.25.9
+ "@babel/helper-module-transforms": ^7.24.8
+ "@babel/helper-plugin-utils": ^7.24.8
+ "@babel/helper-simple-access": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 4f101f0ea4a57d1d27a7976d668c63a7d0bbb0d9c1909d8ac43c785fd1496c31e6552ffd9673730c088873df1bc64f1cc4aad7c3c90413ac5e80b33e336d80e4
+ checksum: a4cf95b1639c33382064b44558f73ee5fac023f2a94d16e549d2bb55ceebd5cbc10fcddd505d08cd5bc97f5a64af9fd155512358b7dcf7b1a0082e8945cf21c5
languageName: node
linkType: hard
-"@babel/plugin-transform-modules-systemjs@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-modules-systemjs@npm:7.25.9"
+"@babel/plugin-transform-modules-systemjs@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-modules-systemjs@npm:7.24.7"
dependencies:
- "@babel/helper-module-transforms": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-validator-identifier": ^7.25.9
- "@babel/traverse": ^7.25.9
+ "@babel/helper-hoist-variables": ^7.24.7
+ "@babel/helper-module-transforms": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/helper-validator-identifier": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: bf446202f372ba92dc0db32b24b56225b6e3ad3b227e31074de8b86fdec01c273ae2536873e38dbe3ceb1cd0894209343adeaa37df208e3fa88c0c7dffec7924
+ checksum: 8af7a9db2929991d82cfdf41fb175dee344274d39b39122f8c35f24b5d682f98368e3d8f5130401298bd21412df21d416a7d8b33b59c334fae3d3c762118b1d8
languageName: node
linkType: hard
-"@babel/plugin-transform-modules-umd@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-modules-umd@npm:7.25.9"
+"@babel/plugin-transform-modules-umd@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-modules-umd@npm:7.24.7"
dependencies:
- "@babel/helper-module-transforms": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-module-transforms": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 946db66be5f04ab9ee56c424b00257276ec094aa2f148508927e6085239f76b00304fa1e33026d29eccdbe312efea15ca3d92e74a12689d7f0cdd9a7ba1a6c54
+ checksum: 9ff1c464892efe042952ba778468bda6131b196a2729615bdcc3f24cdc94014f016a4616ee5643c5845bade6ba698f386833e61056d7201314b13a7fd69fac88
languageName: node
linkType: hard
-"@babel/plugin-transform-named-capturing-groups-regex@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-named-capturing-groups-regex@npm:7.25.9"
+"@babel/plugin-transform-named-capturing-groups-regex@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-named-capturing-groups-regex@npm:7.24.7"
dependencies:
- "@babel/helper-create-regexp-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-create-regexp-features-plugin": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0
- checksum: 434346ba05cf74e3f4704b3bdd439287b95cd2a8676afcdc607810b8c38b6f4798cd69c1419726b2e4c7204e62e4a04d31b0360e91ca57a930521c9211e07789
+ checksum: f1c6c7b5d60a86b6d7e4dd098798e1d393d55e993a0b57a73b53640c7a94985b601a96bdacee063f809a9a700bcea3a2ff18e98fa561554484ac56b761d774bd
languageName: node
linkType: hard
-"@babel/plugin-transform-new-target@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-new-target@npm:7.25.9"
+"@babel/plugin-transform-new-target@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-new-target@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: f8113539919aafce52f07b2bd182c771a476fe1d5d96d813460b33a16f173f038929369c595572cadc1f7bd8cb816ce89439d056e007770ddd7b7a0878e7895f
+ checksum: 3cb94cd1076b270f768f91fdcf9dd2f6d487f8dbfff3df7ca8d07b915900b86d02769a35ba1407d16fe49499012c8f055e1741299e2c880798b953d942a8fa1b
languageName: node
linkType: hard
-"@babel/plugin-transform-nullish-coalescing-operator@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-nullish-coalescing-operator@npm:7.25.9"
+"@babel/plugin-transform-nullish-coalescing-operator@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-nullish-coalescing-operator@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/plugin-syntax-nullish-coalescing-operator": ^7.8.3
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 26e03b1c2c0408cc300e46d8f8cb639653ff3a7b03456d0d8afbb53c44f33a89323f51d99991dade3a5676921119bbdf869728bb7911799b5ef99ffafa2cdd24
+ checksum: 4a9221356401d87762afbc37a9e8e764afc2daf09c421117537820f8cfbed6876888372ad3a7bcfae2d45c95f026651f050ab4020b777be31d3ffb00908dbdd3
languageName: node
linkType: hard
-"@babel/plugin-transform-numeric-separator@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-numeric-separator@npm:7.25.9"
+"@babel/plugin-transform-numeric-separator@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-numeric-separator@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/plugin-syntax-numeric-separator": ^7.10.4
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 0528ef041ed88e8c3f51624ee87b8182a7f246fe4013f0572788e0727d20795b558f2b82e3989b5dd416cbd339500f0d88857de41b6d3b6fdacb1d5344bcc5b1
+ checksum: 561b5f1d08b2c3f92ce849f092751558b5e6cfeb7eb55c79e7375c34dd9c3066dce5e630bb439affef6adcf202b6cbcaaa23870070276fa5bb429c8f5b8c7514
languageName: node
linkType: hard
-"@babel/plugin-transform-object-rest-spread@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-object-rest-spread@npm:7.25.9"
+"@babel/plugin-transform-object-rest-spread@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-object-rest-spread@npm:7.24.7"
dependencies:
- "@babel/helper-compilation-targets": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/plugin-transform-parameters": ^7.25.9
+ "@babel/helper-compilation-targets": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/plugin-syntax-object-rest-spread": ^7.8.3
+ "@babel/plugin-transform-parameters": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: a8ff73e1c46a03056b3a2236bafd6b3a4b83da93afe7ee24a50d0a8088150bf85bc5e5977daa04e66ff5fb7613d02d63ad49b91ebb64cf3f3022598d722e3a7a
+ checksum: 169d257b9800c13e1feb4c37fb05dae84f702e58b342bb76e19e82e6692b7b5337c9923ee89e3916a97c0dd04a3375bdeca14f5e126f110bbacbeb46d1886ca2
languageName: node
linkType: hard
-"@babel/plugin-transform-object-super@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-object-super@npm:7.25.9"
+"@babel/plugin-transform-object-super@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-object-super@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-replace-supers": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/helper-replace-supers": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 1817b5d8b80e451ae1ad9080cca884f4f16df75880a158947df76a2ed8ab404d567a7dce71dd8051ef95f90fbe3513154086a32aba55cc76027f6cbabfbd7f98
+ checksum: f71e607a830ee50a22fa1a2686524d3339440cf9dea63032f6efbd865cfe4e35000e1e3f3492459e5c986f7c0c07dc36938bf3ce61fc9ba5f8ab732d0b64ab37
languageName: node
linkType: hard
-"@babel/plugin-transform-optional-catch-binding@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-optional-catch-binding@npm:7.25.9"
+"@babel/plugin-transform-optional-catch-binding@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-optional-catch-binding@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/plugin-syntax-optional-catch-binding": ^7.8.3
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: b46a8d1e91829f3db5c252583eb00d05a779b4660abeea5500fda0f8ffa3584fd18299443c22f7fddf0ed9dfdb73c782c43b445dc468d4f89803f2356963b406
+ checksum: 7229f3a5a4facaab40f4fdfc7faabc157dc38a67d66bed7936599f4bc509e0bff636f847ac2aa45294881fce9cf8a0a460b85d2a465b7b977de9739fce9b18f6
languageName: node
linkType: hard
-"@babel/plugin-transform-optional-chaining@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-optional-chaining@npm:7.25.9"
+"@babel/plugin-transform-optional-chaining@npm:^7.24.7, @babel/plugin-transform-optional-chaining@npm:^7.24.8":
+ version: 7.24.8
+ resolution: "@babel/plugin-transform-optional-chaining@npm:7.24.8"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-skip-transparent-expression-wrappers": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.8
+ "@babel/helper-skip-transparent-expression-wrappers": ^7.24.7
+ "@babel/plugin-syntax-optional-chaining": ^7.8.3
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: f1642a7094456067e82b176e1e9fd426fda7ed9df54cb6d10109fc512b622bf4b3c83acc5875125732b8622565107fdbe2d60fe3ec8685e1d1c22c38c1b57782
+ checksum: 45e55e3a2fffb89002d3f89aef59c141610f23b60eee41e047380bffc40290b59f64fc649aa7ec5281f73d41b2065410d788acc6afaad2a9f44cad6e8af04442
languageName: node
linkType: hard
-"@babel/plugin-transform-parameters@npm:^7.20.7, @babel/plugin-transform-parameters@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-parameters@npm:7.25.9"
+"@babel/plugin-transform-parameters@npm:^7.20.7, @babel/plugin-transform-parameters@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-parameters@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: d7ba2a7d05edbc85aed741289b0ff3d6289a1c25d82ac4be32c565f88a66391f46631aad59ceeed40824037f7eeaa7a0de1998db491f50e65a565cd964f78786
+ checksum: ab534b03ac2eff94bc79342b8f39a4584666f5305a6c63c1964afda0b1b004e6b861e49d1683548030defe248e3590d3ff6338ee0552cb90c064f7e1479968c3
languageName: node
linkType: hard
-"@babel/plugin-transform-private-methods@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-private-methods@npm:7.25.9"
+"@babel/plugin-transform-private-methods@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-private-methods@npm:7.24.7"
dependencies:
- "@babel/helper-create-class-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-create-class-features-plugin": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 6e3671b352c267847c53a170a1937210fa8151764d70d25005e711ef9b21969aaf422acc14f9f7fb86bc0e4ec43e7aefcc0ad9196ae02d262ec10f509f126a58
+ checksum: c151548e34909be2adcceb224d8fdd70bafa393bc1559a600906f3f647317575bf40db670470934a360e90ee8084ef36dffa34ec25d387d414afd841e74cf3fe
languageName: node
linkType: hard
-"@babel/plugin-transform-private-property-in-object@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-private-property-in-object@npm:7.25.9"
+"@babel/plugin-transform-private-property-in-object@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-private-property-in-object@npm:7.24.7"
dependencies:
- "@babel/helper-annotate-as-pure": ^7.25.9
- "@babel/helper-create-class-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-annotate-as-pure": ^7.24.7
+ "@babel/helper-create-class-features-plugin": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/plugin-syntax-private-property-in-object": ^7.14.5
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 9ce3e983fea9b9ba677c192aa065c0b42ebdc7774be4c02135df09029ad92a55c35b004650c75952cb64d650872ed18f13ab64422c6fc891d06333762caa8a0a
+ checksum: 8cee9473095305cc787bb653fd681719b49363281feabf677db8a552e8e41c94441408055d7e5fd5c7d41b315e634fa70b145ad0c7c54456216049df4ed57350
languageName: node
linkType: hard
-"@babel/plugin-transform-property-literals@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-property-literals@npm:7.25.9"
+"@babel/plugin-transform-property-literals@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-property-literals@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 436046ab07d54a9b44a384eeffec701d4e959a37a7547dda72e069e751ca7ff753d1782a8339e354b97c78a868b49ea97bf41bf5a44c6d7a3c0a05ad40eeb49c
+ checksum: 9aeefc3aab6c6bf9d1fae1cf3a2d38c7d886fd3c6c81b7c608c477f5758aee2e7abf52f32724310fe861da61af934ee2508b78a5b5f234b9740c9134e1c14437
languageName: node
linkType: hard
-"@babel/plugin-transform-regenerator@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-regenerator@npm:7.25.9"
+"@babel/plugin-transform-regenerator@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-regenerator@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
regenerator-transform: ^0.15.2
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 1c09e8087b476c5967282c9790fb8710e065eda77c60f6cb5da541edd59ded9d003d96f8ef640928faab4a0b35bf997673499a194973da4f0c97f0935807a482
- languageName: node
- linkType: hard
-
-"@babel/plugin-transform-regexp-modifiers@npm:^7.26.0":
- version: 7.26.0
- resolution: "@babel/plugin-transform-regexp-modifiers@npm:7.26.0"
- dependencies:
- "@babel/helper-create-regexp-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
- peerDependencies:
- "@babel/core": ^7.0.0
- checksum: 726deca486bbd4b176f8a966eb0f4aabc19d9def3b8dabb8b3a656778eca0df1fda3f3c92b213aa5a184232fdafd5b7bd73b4e24ca4345c498ef6baff2bda4e1
+ checksum: 20c6c3fb6fc9f407829087316653388d311e8c1816b007609bb09aeef254092a7157adace8b3aaa8f34be752503717cb85c88a5fe482180a9b11bcbd676063be
languageName: node
linkType: hard
-"@babel/plugin-transform-reserved-words@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-reserved-words@npm:7.25.9"
+"@babel/plugin-transform-reserved-words@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-reserved-words@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 8beda04481b25767acbd1f6b9ef7b3a9c12fbd9dcb24df45a6ad120e1dc4b247c073db60ac742f9093657d6d8c050501fc0606af042f81a3bb6a3ff862cddc47
+ checksum: 3d5876954d5914d7270819479504f30c4bf5452a65c677f44e2dab2db50b3c9d4b47793c45dfad7abf4f377035dd79e4b3f554ae350df9f422201d370ce9f8dd
languageName: node
linkType: hard
"@babel/plugin-transform-runtime@npm:^7.13.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-runtime@npm:7.25.9"
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-runtime@npm:7.24.7"
dependencies:
- "@babel/helper-module-imports": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-module-imports": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
babel-plugin-polyfill-corejs2: ^0.4.10
- babel-plugin-polyfill-corejs3: ^0.10.6
+ babel-plugin-polyfill-corejs3: ^0.10.1
babel-plugin-polyfill-regenerator: ^0.6.1
semver: ^6.3.1
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: db7f20a7a7324dbfe3b43a09f0095c69dadcf8b08567fa7c7fa6e245d97c66cdcdc330e97733b7589261c0e1046bc5cc36741b932ac5dd7757374495b57e7b02
+ checksum: 98bcbbdc833d5c451189a6325f88820fe92973e119c59ce74bf28681cf4687c8280decb55b6c47f22e98c3973ae3a13521c4f51855a2b8577b230ecb1b4ca5b4
languageName: node
linkType: hard
-"@babel/plugin-transform-shorthand-properties@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-shorthand-properties@npm:7.25.9"
+"@babel/plugin-transform-shorthand-properties@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-shorthand-properties@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: f774995d58d4e3a992b732cf3a9b8823552d471040e280264dd15e0735433d51b468fef04d75853d061309389c66bda10ce1b298297ce83999220eb0ad62741d
+ checksum: 7b524245814607188212b8eb86d8c850e5974203328455a30881b4a92c364b93353fae14bc2af5b614ef16300b75b8c1d3b8f3a08355985b4794a7feb240adc3
languageName: node
linkType: hard
-"@babel/plugin-transform-spread@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-spread@npm:7.25.9"
+"@babel/plugin-transform-spread@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-spread@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-skip-transparent-expression-wrappers": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/helper-skip-transparent-expression-wrappers": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 2403a5d49171b7714d5e5ecb1f598c61575a4dbe5e33e5a5f08c0ea990b75e693ca1ea983b6a96b2e3e5e7da48c8238333f525e47498c53b577c5d094d964c06
+ checksum: 4c4254c8b9cceb1a8f975fa9b92257ddb08380a35c0a3721b8f4b9e13a3d82e403af2e0fba577b9f2452dd8f06bc3dea71cc53b1e2c6af595af5db52a13429d6
languageName: node
linkType: hard
-"@babel/plugin-transform-sticky-regex@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-sticky-regex@npm:7.25.9"
+"@babel/plugin-transform-sticky-regex@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-sticky-regex@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 7454b00844dbe924030dd15e2b3615b36e196500c4c47e98dabc6b37a054c5b1038ecd437e910aabf0e43bf56b973cb148d3437d50f6e2332d8309568e3e979b
+ checksum: 118fc7a7ebf7c20411b670c8a030535fdfe4a88bc5643bb625a584dbc4c8a468da46430a20e6bf78914246962b0f18f1b9d6a62561a7762c4f34a038a5a77179
languageName: node
linkType: hard
-"@babel/plugin-transform-template-literals@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-template-literals@npm:7.25.9"
+"@babel/plugin-transform-template-literals@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-template-literals@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 92eb1d6e2d95bd24abbb74fa7640d02b66ff6214e0bb616d7fda298a7821ce15132a4265d576a3502a347a3c9e94b6c69ed265bb0784664592fa076785a3d16a
+ checksum: ad44e5826f5a98c1575832dbdbd033adfe683cdff195e178528ead62507564bf02f479b282976cfd3caebad8b06d5fd7349c1cdb880dec3c56daea4f1f179619
languageName: node
linkType: hard
-"@babel/plugin-transform-typeof-symbol@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-typeof-symbol@npm:7.25.9"
+"@babel/plugin-transform-typeof-symbol@npm:^7.24.8":
+ version: 7.24.8
+ resolution: "@babel/plugin-transform-typeof-symbol@npm:7.24.8"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.8
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 3f9458840d96f61502f0e9dfaae3efe8325fa0b2151e24ea0d41307f28cdd166905419f5a43447ce0f1ae4bfd001f3906b658839a60269c254168164090b4c73
+ checksum: 8663a8e7347cedf181001d99c88cf794b6598c3d82f324098510fe8fb8bd22113995526a77aa35a3cc5d70ffd0617a59dd0d10311a9bf0e1a3a7d3e59b900c00
languageName: node
linkType: hard
-"@babel/plugin-transform-typescript@npm:^7.13.0, @babel/plugin-transform-typescript@npm:^7.16.8, @babel/plugin-transform-typescript@npm:^7.20.13, @babel/plugin-transform-typescript@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-typescript@npm:7.25.9"
+"@babel/plugin-transform-typescript@npm:^7.13.0, @babel/plugin-transform-typescript@npm:^7.16.8, @babel/plugin-transform-typescript@npm:^7.20.13, @babel/plugin-transform-typescript@npm:^7.24.7":
+ version: 7.24.8
+ resolution: "@babel/plugin-transform-typescript@npm:7.24.8"
dependencies:
- "@babel/helper-annotate-as-pure": ^7.25.9
- "@babel/helper-create-class-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-skip-transparent-expression-wrappers": ^7.25.9
- "@babel/plugin-syntax-typescript": ^7.25.9
+ "@babel/helper-annotate-as-pure": ^7.24.7
+ "@babel/helper-create-class-features-plugin": ^7.24.8
+ "@babel/helper-plugin-utils": ^7.24.8
+ "@babel/plugin-syntax-typescript": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 6dd1303f1b9f314e22c6c54568a8b9709a081ce97be757d4004f960e3e73d6b819e6b49cee6cf1fc8455511e41127a8b580fa34602de62d17ab8a0b2d0ccf183
+ checksum: 4dcdc0ca2b523ccfb216ad7e68d2954576e42d83956e0e65626ad1ece17da85cb1122b6c350c4746db927996060466c879945d40cde156a94019f30587fef41a
languageName: node
linkType: hard
@@ -1156,50 +1327,50 @@ __metadata:
languageName: node
linkType: hard
-"@babel/plugin-transform-unicode-escapes@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-unicode-escapes@npm:7.25.9"
+"@babel/plugin-transform-unicode-escapes@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-unicode-escapes@npm:7.24.7"
dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: be067e07488d804e3e82d7771f23666539d2ae5af03bf6eb8480406adf3dabd776e60c1fd5c6078dc5714b73cd80bbaca70e71d4f5d154c5c57200581602ca2f
+ checksum: 4af0a193e1ddea6ff82b2b15cc2501b872728050bd625740b813c8062fec917d32d530ff6b41de56c15e7296becdf3336a58db81f5ca8e7c445c1306c52f3e01
languageName: node
linkType: hard
-"@babel/plugin-transform-unicode-property-regex@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-unicode-property-regex@npm:7.25.9"
+"@babel/plugin-transform-unicode-property-regex@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-unicode-property-regex@npm:7.24.7"
dependencies:
- "@babel/helper-create-regexp-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-create-regexp-features-plugin": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 201f6f46c1beb399e79aa208b94c5d54412047511795ce1e790edcd189cef73752e6a099fdfc01b3ad12205f139ae344143b62f21f44bbe02338a95e8506a911
+ checksum: aae13350c50973f5802ca7906d022a6a0cc0e3aebac9122d0450bbd51e78252d4c2032ad69385e2759fcbdd3aac5d571bd7e26258907f51f8e1a51b53be626c2
languageName: node
linkType: hard
-"@babel/plugin-transform-unicode-regex@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-unicode-regex@npm:7.25.9"
+"@babel/plugin-transform-unicode-regex@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-unicode-regex@npm:7.24.7"
dependencies:
- "@babel/helper-create-regexp-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-create-regexp-features-plugin": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: e8baae867526e179467c6ef5280d70390fa7388f8763a19a27c21302dd59b121032568be080749514b097097ceb9af716bf4b90638f1b3cf689aa837ba20150f
+ checksum: 1cb4e70678906e431da0a05ac3f8350025fee290304ad7482d9cfaa1ca67b2e898654de537c9268efbdad5b80d3ebadf42b4a88ea84609bd8a4cce7b11b48afd
languageName: node
linkType: hard
-"@babel/plugin-transform-unicode-sets-regex@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/plugin-transform-unicode-sets-regex@npm:7.25.9"
+"@babel/plugin-transform-unicode-sets-regex@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/plugin-transform-unicode-sets-regex@npm:7.24.7"
dependencies:
- "@babel/helper-create-regexp-features-plugin": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
+ "@babel/helper-create-regexp-features-plugin": ^7.24.7
+ "@babel/helper-plugin-utils": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0
- checksum: 4445ef20de687cb4dcc95169742a8d9013d680aa5eee9186d8e25875bbfa7ee5e2de26a91177ccf70b1db518e36886abcd44750d28db5d7a9539f0efa6839f4b
+ checksum: 08a2844914f33dacd2ce1ab021ce8c1cc35dc6568521a746d8bf29c21571ee5be78787b454231c4bb3526cbbe280f1893223c82726cec5df2be5dae0a3b51837
languageName: node
linkType: hard
@@ -1214,81 +1385,93 @@ __metadata:
linkType: hard
"@babel/preset-env@npm:^7.16.5, @babel/preset-env@npm:^7.16.7, @babel/preset-env@npm:^7.20.2, @babel/preset-env@npm:^7.24.6":
- version: 7.26.0
- resolution: "@babel/preset-env@npm:7.26.0"
- dependencies:
- "@babel/compat-data": ^7.26.0
- "@babel/helper-compilation-targets": ^7.25.9
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-validator-option": ^7.25.9
- "@babel/plugin-bugfix-firefox-class-in-computed-class-key": ^7.25.9
- "@babel/plugin-bugfix-safari-class-field-initializer-scope": ^7.25.9
- "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": ^7.25.9
- "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": ^7.25.9
- "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": ^7.25.9
+ version: 7.24.8
+ resolution: "@babel/preset-env@npm:7.24.8"
+ dependencies:
+ "@babel/compat-data": ^7.24.8
+ "@babel/helper-compilation-targets": ^7.24.8
+ "@babel/helper-plugin-utils": ^7.24.8
+ "@babel/helper-validator-option": ^7.24.8
+ "@babel/plugin-bugfix-firefox-class-in-computed-class-key": ^7.24.7
+ "@babel/plugin-bugfix-safari-id-destructuring-collision-in-function-expression": ^7.24.7
+ "@babel/plugin-bugfix-v8-spread-parameters-in-optional-chaining": ^7.24.7
+ "@babel/plugin-bugfix-v8-static-class-fields-redefine-readonly": ^7.24.7
"@babel/plugin-proposal-private-property-in-object": 7.21.0-placeholder-for-preset-env.2
- "@babel/plugin-syntax-import-assertions": ^7.26.0
- "@babel/plugin-syntax-import-attributes": ^7.26.0
+ "@babel/plugin-syntax-async-generators": ^7.8.4
+ "@babel/plugin-syntax-class-properties": ^7.12.13
+ "@babel/plugin-syntax-class-static-block": ^7.14.5
+ "@babel/plugin-syntax-dynamic-import": ^7.8.3
+ "@babel/plugin-syntax-export-namespace-from": ^7.8.3
+ "@babel/plugin-syntax-import-assertions": ^7.24.7
+ "@babel/plugin-syntax-import-attributes": ^7.24.7
+ "@babel/plugin-syntax-import-meta": ^7.10.4
+ "@babel/plugin-syntax-json-strings": ^7.8.3
+ "@babel/plugin-syntax-logical-assignment-operators": ^7.10.4
+ "@babel/plugin-syntax-nullish-coalescing-operator": ^7.8.3
+ "@babel/plugin-syntax-numeric-separator": ^7.10.4
+ "@babel/plugin-syntax-object-rest-spread": ^7.8.3
+ "@babel/plugin-syntax-optional-catch-binding": ^7.8.3
+ "@babel/plugin-syntax-optional-chaining": ^7.8.3
+ "@babel/plugin-syntax-private-property-in-object": ^7.14.5
+ "@babel/plugin-syntax-top-level-await": ^7.14.5
"@babel/plugin-syntax-unicode-sets-regex": ^7.18.6
- "@babel/plugin-transform-arrow-functions": ^7.25.9
- "@babel/plugin-transform-async-generator-functions": ^7.25.9
- "@babel/plugin-transform-async-to-generator": ^7.25.9
- "@babel/plugin-transform-block-scoped-functions": ^7.25.9
- "@babel/plugin-transform-block-scoping": ^7.25.9
- "@babel/plugin-transform-class-properties": ^7.25.9
- "@babel/plugin-transform-class-static-block": ^7.26.0
- "@babel/plugin-transform-classes": ^7.25.9
- "@babel/plugin-transform-computed-properties": ^7.25.9
- "@babel/plugin-transform-destructuring": ^7.25.9
- "@babel/plugin-transform-dotall-regex": ^7.25.9
- "@babel/plugin-transform-duplicate-keys": ^7.25.9
- "@babel/plugin-transform-duplicate-named-capturing-groups-regex": ^7.25.9
- "@babel/plugin-transform-dynamic-import": ^7.25.9
- "@babel/plugin-transform-exponentiation-operator": ^7.25.9
- "@babel/plugin-transform-export-namespace-from": ^7.25.9
- "@babel/plugin-transform-for-of": ^7.25.9
- "@babel/plugin-transform-function-name": ^7.25.9
- "@babel/plugin-transform-json-strings": ^7.25.9
- "@babel/plugin-transform-literals": ^7.25.9
- "@babel/plugin-transform-logical-assignment-operators": ^7.25.9
- "@babel/plugin-transform-member-expression-literals": ^7.25.9
- "@babel/plugin-transform-modules-amd": ^7.25.9
- "@babel/plugin-transform-modules-commonjs": ^7.25.9
- "@babel/plugin-transform-modules-systemjs": ^7.25.9
- "@babel/plugin-transform-modules-umd": ^7.25.9
- "@babel/plugin-transform-named-capturing-groups-regex": ^7.25.9
- "@babel/plugin-transform-new-target": ^7.25.9
- "@babel/plugin-transform-nullish-coalescing-operator": ^7.25.9
- "@babel/plugin-transform-numeric-separator": ^7.25.9
- "@babel/plugin-transform-object-rest-spread": ^7.25.9
- "@babel/plugin-transform-object-super": ^7.25.9
- "@babel/plugin-transform-optional-catch-binding": ^7.25.9
- "@babel/plugin-transform-optional-chaining": ^7.25.9
- "@babel/plugin-transform-parameters": ^7.25.9
- "@babel/plugin-transform-private-methods": ^7.25.9
- "@babel/plugin-transform-private-property-in-object": ^7.25.9
- "@babel/plugin-transform-property-literals": ^7.25.9
- "@babel/plugin-transform-regenerator": ^7.25.9
- "@babel/plugin-transform-regexp-modifiers": ^7.26.0
- "@babel/plugin-transform-reserved-words": ^7.25.9
- "@babel/plugin-transform-shorthand-properties": ^7.25.9
- "@babel/plugin-transform-spread": ^7.25.9
- "@babel/plugin-transform-sticky-regex": ^7.25.9
- "@babel/plugin-transform-template-literals": ^7.25.9
- "@babel/plugin-transform-typeof-symbol": ^7.25.9
- "@babel/plugin-transform-unicode-escapes": ^7.25.9
- "@babel/plugin-transform-unicode-property-regex": ^7.25.9
- "@babel/plugin-transform-unicode-regex": ^7.25.9
- "@babel/plugin-transform-unicode-sets-regex": ^7.25.9
+ "@babel/plugin-transform-arrow-functions": ^7.24.7
+ "@babel/plugin-transform-async-generator-functions": ^7.24.7
+ "@babel/plugin-transform-async-to-generator": ^7.24.7
+ "@babel/plugin-transform-block-scoped-functions": ^7.24.7
+ "@babel/plugin-transform-block-scoping": ^7.24.7
+ "@babel/plugin-transform-class-properties": ^7.24.7
+ "@babel/plugin-transform-class-static-block": ^7.24.7
+ "@babel/plugin-transform-classes": ^7.24.8
+ "@babel/plugin-transform-computed-properties": ^7.24.7
+ "@babel/plugin-transform-destructuring": ^7.24.8
+ "@babel/plugin-transform-dotall-regex": ^7.24.7
+ "@babel/plugin-transform-duplicate-keys": ^7.24.7
+ "@babel/plugin-transform-dynamic-import": ^7.24.7
+ "@babel/plugin-transform-exponentiation-operator": ^7.24.7
+ "@babel/plugin-transform-export-namespace-from": ^7.24.7
+ "@babel/plugin-transform-for-of": ^7.24.7
+ "@babel/plugin-transform-function-name": ^7.24.7
+ "@babel/plugin-transform-json-strings": ^7.24.7
+ "@babel/plugin-transform-literals": ^7.24.7
+ "@babel/plugin-transform-logical-assignment-operators": ^7.24.7
+ "@babel/plugin-transform-member-expression-literals": ^7.24.7
+ "@babel/plugin-transform-modules-amd": ^7.24.7
+ "@babel/plugin-transform-modules-commonjs": ^7.24.8
+ "@babel/plugin-transform-modules-systemjs": ^7.24.7
+ "@babel/plugin-transform-modules-umd": ^7.24.7
+ "@babel/plugin-transform-named-capturing-groups-regex": ^7.24.7
+ "@babel/plugin-transform-new-target": ^7.24.7
+ "@babel/plugin-transform-nullish-coalescing-operator": ^7.24.7
+ "@babel/plugin-transform-numeric-separator": ^7.24.7
+ "@babel/plugin-transform-object-rest-spread": ^7.24.7
+ "@babel/plugin-transform-object-super": ^7.24.7
+ "@babel/plugin-transform-optional-catch-binding": ^7.24.7
+ "@babel/plugin-transform-optional-chaining": ^7.24.8
+ "@babel/plugin-transform-parameters": ^7.24.7
+ "@babel/plugin-transform-private-methods": ^7.24.7
+ "@babel/plugin-transform-private-property-in-object": ^7.24.7
+ "@babel/plugin-transform-property-literals": ^7.24.7
+ "@babel/plugin-transform-regenerator": ^7.24.7
+ "@babel/plugin-transform-reserved-words": ^7.24.7
+ "@babel/plugin-transform-shorthand-properties": ^7.24.7
+ "@babel/plugin-transform-spread": ^7.24.7
+ "@babel/plugin-transform-sticky-regex": ^7.24.7
+ "@babel/plugin-transform-template-literals": ^7.24.7
+ "@babel/plugin-transform-typeof-symbol": ^7.24.8
+ "@babel/plugin-transform-unicode-escapes": ^7.24.7
+ "@babel/plugin-transform-unicode-property-regex": ^7.24.7
+ "@babel/plugin-transform-unicode-regex": ^7.24.7
+ "@babel/plugin-transform-unicode-sets-regex": ^7.24.7
"@babel/preset-modules": 0.1.6-no-external-plugins
babel-plugin-polyfill-corejs2: ^0.4.10
- babel-plugin-polyfill-corejs3: ^0.10.6
+ babel-plugin-polyfill-corejs3: ^0.10.4
babel-plugin-polyfill-regenerator: ^0.6.1
- core-js-compat: ^3.38.1
+ core-js-compat: ^3.37.1
semver: ^6.3.1
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 0c3e2b3758cc0347dcf5551b5209db702764183dce66ff20bffceff6486c090bef9175f5f7d1e68cfe5584f0d817b2aab25ab5992058a7998f061f244c8caf5f
+ checksum: efea0039dbb089c9cc0b792b9ac0eef949699584b4c622e2abea062b44b1a0fbcda6ad25e2263ae36a69586889b4a22439a1096aa8152b366e3fedd921ae66ac
languageName: node
linkType: hard
@@ -1306,17 +1489,24 @@ __metadata:
linkType: hard
"@babel/preset-typescript@npm:^7.24.6":
- version: 7.26.0
- resolution: "@babel/preset-typescript@npm:7.26.0"
- dependencies:
- "@babel/helper-plugin-utils": ^7.25.9
- "@babel/helper-validator-option": ^7.25.9
- "@babel/plugin-syntax-jsx": ^7.25.9
- "@babel/plugin-transform-modules-commonjs": ^7.25.9
- "@babel/plugin-transform-typescript": ^7.25.9
+ version: 7.24.7
+ resolution: "@babel/preset-typescript@npm:7.24.7"
+ dependencies:
+ "@babel/helper-plugin-utils": ^7.24.7
+ "@babel/helper-validator-option": ^7.24.7
+ "@babel/plugin-syntax-jsx": ^7.24.7
+ "@babel/plugin-transform-modules-commonjs": ^7.24.7
+ "@babel/plugin-transform-typescript": ^7.24.7
peerDependencies:
"@babel/core": ^7.0.0-0
- checksum: 6d8641fa6efd0e10eec5e8f92cd164b916a06d57131cfa5216c281404289c87d2b4995140a1c1d9c3bad171ff6ef2226be5f0585e09577ffff349706e991ec71
+ checksum: 12929b24757f3bd6548103475f86478eda4c872bc7cefd920b29591eee8f4a4f350561d888e133d632d0c9402b8615fdcec9138e5127a6567dcb22f804ff207f
+ languageName: node
+ linkType: hard
+
+"@babel/regjsgen@npm:^0.8.0":
+ version: 0.8.0
+ resolution: "@babel/regjsgen@npm:0.8.0"
+ checksum: 89c338fee774770e5a487382170711014d49a68eb281e74f2b5eac88f38300a4ad545516a7786a8dd5702e9cf009c94c2f582d200f077ac5decd74c56b973730
languageName: node
linkType: hard
@@ -1330,47 +1520,51 @@ __metadata:
linkType: hard
"@babel/runtime@npm:^7.14.0, @babel/runtime@npm:^7.17.8, @babel/runtime@npm:^7.21.0, @babel/runtime@npm:^7.8.4":
- version: 7.26.0
- resolution: "@babel/runtime@npm:7.26.0"
+ version: 7.24.8
+ resolution: "@babel/runtime@npm:7.24.8"
dependencies:
regenerator-runtime: ^0.14.0
- checksum: c8e2c0504ab271b3467a261a8f119bf2603eb857a0d71e37791f4e3fae00f681365073cc79f141ddaa90c6077c60ba56448004ad5429d07ac73532be9f7cf28a
+ checksum: 6b1e4230580f67a807ad054720812bbefbb024cc2adc1159d050acbb764c4c81c7ac5f7a042c48f578987c5edc2453c71039268df059058e9501fa6023d764b0
languageName: node
linkType: hard
-"@babel/template@npm:^7.25.9":
- version: 7.25.9
- resolution: "@babel/template@npm:7.25.9"
+"@babel/template@npm:^7.24.7":
+ version: 7.24.7
+ resolution: "@babel/template@npm:7.24.7"
dependencies:
- "@babel/code-frame": ^7.25.9
- "@babel/parser": ^7.25.9
- "@babel/types": ^7.25.9
- checksum: 103641fea19c7f4e82dc913aa6b6ac157112a96d7c724d513288f538b84bae04fb87b1f1e495ac1736367b1bc30e10f058b30208fb25f66038e1f1eb4e426472
+ "@babel/code-frame": ^7.24.7
+ "@babel/parser": ^7.24.7
+ "@babel/types": ^7.24.7
+ checksum: ea90792fae708ddf1632e54c25fe1a86643d8c0132311f81265d2bdbdd42f9f4fac65457056c1b6ca87f7aa0d6a795b549566774bba064bdcea2034ab3960ee9
languageName: node
linkType: hard
-"@babel/traverse@npm:^7.25.9, @babel/traverse@npm:^7.4.5":
- version: 7.25.9
- resolution: "@babel/traverse@npm:7.25.9"
+"@babel/traverse@npm:^7.24.7, @babel/traverse@npm:^7.24.8, @babel/traverse@npm:^7.4.5":
+ version: 7.24.8
+ resolution: "@babel/traverse@npm:7.24.8"
dependencies:
- "@babel/code-frame": ^7.25.9
- "@babel/generator": ^7.25.9
- "@babel/parser": ^7.25.9
- "@babel/template": ^7.25.9
- "@babel/types": ^7.25.9
+ "@babel/code-frame": ^7.24.7
+ "@babel/generator": ^7.24.8
+ "@babel/helper-environment-visitor": ^7.24.7
+ "@babel/helper-function-name": ^7.24.7
+ "@babel/helper-hoist-variables": ^7.24.7
+ "@babel/helper-split-export-declaration": ^7.24.7
+ "@babel/parser": ^7.24.8
+ "@babel/types": ^7.24.8
debug: ^4.3.1
globals: ^11.1.0
- checksum: 901d325662ff1dd9bc51de00862e01055fa6bc374f5297d7e3731f2f0e268bbb1d2141f53fa82860aa308ee44afdcf186a948f16c83153927925804b95a9594d
+ checksum: ee7955476ce031613249f2b0ce9e74a3b7787c9d52e84534fcf39ad61aeb0b811a4cd83edc157608be4886f04c6ecf210861e211ba2a3db4fda729cc2048b5ed
languageName: node
linkType: hard
-"@babel/types@npm:^7.12.13, @babel/types@npm:^7.25.9, @babel/types@npm:^7.26.0, @babel/types@npm:^7.4.4, @babel/types@npm:^7.7.2":
- version: 7.26.0
- resolution: "@babel/types@npm:7.26.0"
+"@babel/types@npm:^7.12.13, @babel/types@npm:^7.24.7, @babel/types@npm:^7.24.8, @babel/types@npm:^7.24.9, @babel/types@npm:^7.4.4, @babel/types@npm:^7.7.2, @babel/types@npm:^7.8.3":
+ version: 7.24.9
+ resolution: "@babel/types@npm:7.24.9"
dependencies:
- "@babel/helper-string-parser": ^7.25.9
- "@babel/helper-validator-identifier": ^7.25.9
- checksum: a3dd37dabac693018872da96edb8c1843a605c1bfacde6c3f504fba79b972426a6f24df70aa646356c0c1b19bdd2c722c623c684a996c002381071680602280d
+ "@babel/helper-string-parser": ^7.24.8
+ "@babel/helper-validator-identifier": ^7.24.7
+ to-fast-properties: ^2.0.0
+ checksum: 15cb05c45be5d4c49a749575d3742bd005d0e2e850c13fb462754983a5bc1063fbc8f6566246fc064e3e8b21a5a75a37a948f1b3f27189cc90b236fee93f5e51
languageName: node
linkType: hard
@@ -1483,148 +1677,161 @@ __metadata:
languageName: node
linkType: hard
-"@ember-data/adapter@npm:5.3.9":
- version: 5.3.9
- resolution: "@ember-data/adapter@npm:5.3.9"
+"@ember-data/adapter@npm:4.12.8":
+ version: 4.12.8
+ resolution: "@ember-data/adapter@npm:4.12.8"
dependencies:
- "@ember/edition-utils": 1.2.0
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
- ember-cli-path-utils: ^1.0.0
- ember-cli-string-utils: ^1.1.0
+ "@ember-data/private-build-infra": 4.12.8
+ "@embroider/macros": ^1.10.0
+ ember-cli-babel: ^7.26.11
ember-cli-test-info: ^1.0.0
peerDependencies:
- "@ember-data/legacy-compat": 5.3.9
- "@ember-data/request-utils": 5.3.9
- "@ember-data/store": 5.3.9
- "@warp-drive/core-types": 0.0.0-beta.12
- checksum: efd1bc0ec877ddd0446e8051f1096e98c6eb2c6db857a2e846557aa4e51bcb643e29f846434fc1ddcf78ec12ace2124fbcd0b6fa6ae426bcdbafdb45198a24ae
+ "@ember-data/store": 4.12.8
+ "@ember/string": ^3.0.1
+ ember-inflector: ^4.0.2
+ checksum: 323ed336a2225d270095e851bb9df4cc5d60679e335f76ddb8a102a16fb952181413aa1e9e7cbd5709a0611d1f7bd5da46856a813b716794facce82859e44798
languageName: node
linkType: hard
-"@ember-data/debug@npm:5.3.9":
- version: 5.3.9
- resolution: "@ember-data/debug@npm:5.3.9"
+"@ember-data/debug@npm:4.12.8":
+ version: 4.12.8
+ resolution: "@ember-data/debug@npm:4.12.8"
dependencies:
+ "@ember-data/private-build-infra": 4.12.8
"@ember/edition-utils": ^1.2.0
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
+ "@embroider/macros": ^1.10.0
+ ember-auto-import: ^2.6.1
+ ember-cli-babel: ^7.26.11
peerDependencies:
- "@ember-data/model": 5.3.9
- "@ember-data/request-utils": 5.3.9
- "@ember-data/store": 5.3.9
- "@warp-drive/core-types": 0.0.0-beta.12
- checksum: c6b3c18b2697301db2c654b56edac16d71c7d9ad155dc638a08e0b5719989fc243cfa01c865d21f45eef02bddf9b8e6516c73b46b130be17fde39dcc9689af37
+ "@ember-data/store": 4.12.8
+ "@ember/string": ^3.0.1
+ checksum: ac0578c3e5791e4a5ef0fccdd19c93e0db8e6a070998d7bfafccbaaf3b8b1e02ea1d3a4098daec832a1b049ad62763f991743772bc9a292e8e32bca8df902902
languageName: node
linkType: hard
-"@ember-data/graph@npm:5.3.9":
- version: 5.3.9
- resolution: "@ember-data/graph@npm:5.3.9"
+"@ember-data/graph@npm:4.12.8":
+ version: 4.12.8
+ resolution: "@ember-data/graph@npm:4.12.8"
dependencies:
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
+ "@ember-data/private-build-infra": 4.12.8
+ "@ember/edition-utils": ^1.2.0
+ "@embroider/macros": ^1.10.0
+ ember-cli-babel: ^7.26.11
peerDependencies:
- "@ember-data/store": 5.3.9
- "@warp-drive/core-types": 0.0.0-beta.12
- checksum: f827b04d63012cbf4fb931e5d750027be9ddad2c78a6de722c2e0387bb53400faf73ca4db84fe2c2444ae3a41dc688d6c27e829a6c943429fa7967d396d1e297
+ "@ember-data/store": 4.12.8
+ checksum: 1ae38648d8b010a4cafdd6315ebc113d5a418c9f812d59da0da3a0b9913383b655b6f0f861f527b8d26f1341fdc5ac32b9ac8551dd02837613c985f3ad51b96e
languageName: node
linkType: hard
-"@ember-data/json-api@npm:5.3.9":
- version: 5.3.9
- resolution: "@ember-data/json-api@npm:5.3.9"
+"@ember-data/json-api@npm:4.12.8":
+ version: 4.12.8
+ resolution: "@ember-data/json-api@npm:4.12.8"
dependencies:
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
+ "@ember-data/private-build-infra": 4.12.8
+ "@ember/edition-utils": ^1.2.0
+ "@embroider/macros": ^1.10.0
+ ember-cli-babel: ^7.26.11
peerDependencies:
- "@ember-data/graph": 5.3.9
- "@ember-data/request-utils": 5.3.9
- "@ember-data/store": 5.3.9
- "@warp-drive/core-types": 0.0.0-beta.12
- checksum: dc4c097ef9c5c58bb1485662fdab4f43eda6036ff54d4c308c93d9a083abba315d2a48a232768317b2cfb9b0f419b0534f7be375547df971f2a0bdb990eccd1b
+ "@ember-data/graph": 4.12.8
+ "@ember-data/store": 4.12.8
+ checksum: f8a5f8e42f3ffbd3fae8cf694c08ef541ee4abad3efc016e9c5473603e0b681d6a7aad2d7b2333faebdb413ec8361a83b29ea39e47a10356769c9fa0fa3e9704
languageName: node
linkType: hard
-"@ember-data/legacy-compat@npm:5.3.9":
- version: 5.3.9
- resolution: "@ember-data/legacy-compat@npm:5.3.9"
+"@ember-data/legacy-compat@npm:4.12.8, @ember-data/legacy-compat@npm:~4.12.4":
+ version: 4.12.8
+ resolution: "@ember-data/legacy-compat@npm:4.12.8"
dependencies:
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
+ "@ember-data/private-build-infra": 4.12.8
+ "@embroider/macros": ^1.10.0
+ ember-cli-babel: ^7.26.11
peerDependencies:
- "@ember-data/graph": 5.3.9
- "@ember-data/json-api": 5.3.9
- "@ember-data/request": 5.3.9
- "@ember-data/request-utils": 5.3.9
- "@ember-data/store": 5.3.9
- "@ember/test-waiters": ^3.1.0
- "@warp-drive/core-types": 0.0.0-beta.12
+ "@ember-data/graph": 4.12.8
+ "@ember-data/json-api": 4.12.8
+ "@ember/string": ^3.0.1
peerDependenciesMeta:
"@ember-data/graph":
optional: true
"@ember-data/json-api":
optional: true
- checksum: c945a0cfd83ef79a174b5f9ceb44948a0347b05fe9073906e9dae4bfd31b0b67673b60d4abfb5afc14b921893fcbf65729591915dd82d0e2a29fa9f2a6836a22
+ checksum: 634e745a28c9cd515e191b899e3c8f8eb85dc6bde77087bc6bed8d4381d3ccb4bee5ab70beb27fcd223a5130bcb01463c3eaa7924e7bb3bb99c3b0f02556992c
languageName: node
linkType: hard
-"@ember-data/model@npm:5.3.9":
- version: 5.3.9
- resolution: "@ember-data/model@npm:5.3.9"
+"@ember-data/model@npm:4.12.8":
+ version: 4.12.8
+ resolution: "@ember-data/model@npm:4.12.8"
dependencies:
+ "@ember-data/private-build-infra": 4.12.8
"@ember/edition-utils": ^1.2.0
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
+ "@embroider/macros": ^1.10.0
+ ember-cached-decorator-polyfill: ^1.0.1
+ ember-cli-babel: ^7.26.11
ember-cli-string-utils: ^1.1.0
ember-cli-test-info: ^1.0.0
- inflection: ~3.0.0
+ inflection: ~2.0.1
peerDependencies:
- "@ember-data/graph": 5.3.9
- "@ember-data/json-api": 5.3.9
- "@ember-data/legacy-compat": 5.3.9
- "@ember-data/request-utils": 5.3.9
- "@ember-data/store": 5.3.9
- "@ember-data/tracking": 5.3.9
- "@warp-drive/core-types": 0.0.0-beta.12
+ "@ember-data/debug": 4.12.8
+ "@ember-data/graph": 4.12.8
+ "@ember-data/json-api": 4.12.8
+ "@ember-data/legacy-compat": 4.12.8
+ "@ember-data/store": 4.12.8
+ "@ember-data/tracking": 4.12.8
+ "@ember/string": ^3.0.1
+ ember-inflector: ^4.0.2
peerDependenciesMeta:
+ "@ember-data/debug":
+ optional: true
"@ember-data/graph":
optional: true
"@ember-data/json-api":
optional: true
- checksum: 27eecc9d0e3913e831142520debb1d74a3ece374ed81641607a3ac7dc27910479cbf8b805288910e2c9aea5316fb4a4927b2a442dbe01220b37be9ba00cc9611
+ checksum: b53aef0b093ff2f0ab5505fb715b3a21cb2ee5aaf5e49b85167577779eaf99d96e8ee87b97c33ff9a6bfd11c7ac1cd32e4aa8b15480f496e98470384a83991d5
languageName: node
linkType: hard
-"@ember-data/request-utils@npm:5.3.9":
- version: 5.3.9
- resolution: "@ember-data/request-utils@npm:5.3.9"
+"@ember-data/private-build-infra@npm:4.12.8":
+ version: 4.12.8
+ resolution: "@ember-data/private-build-infra@npm:4.12.8"
dependencies:
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
- peerDependencies:
- "@ember/string": ^3.1.1 || ^4.0.0
- "@warp-drive/core-types": 0.0.0-beta.12
- ember-inflector: ^4.0.2 || ^5.0.0
- peerDependenciesMeta:
- "@ember/string":
- optional: true
- ember-inflector:
- optional: true
- checksum: 09a204d536cc64f17699fbeb1539635c2a5cedc47d90362f1dccff3369e45f76ecdc0c174077943607df16d2a23d4a167fc43915ebeabca895504ae07ea6fe2b
+ "@babel/core": ^7.21.4
+ "@babel/plugin-transform-block-scoping": ^7.21.0
+ "@babel/runtime": ^7.21.0
+ "@ember/edition-utils": ^1.2.0
+ "@embroider/macros": ^1.10.0
+ babel-import-util: ^1.3.0
+ babel-plugin-debug-macros: ^0.3.4
+ babel-plugin-filter-imports: ^4.0.0
+ babel6-plugin-strip-class-callcheck: ^6.0.0
+ broccoli-debug: ^0.6.5
+ broccoli-file-creator: ^2.1.1
+ broccoli-funnel: ^3.0.8
+ broccoli-merge-trees: ^4.2.0
+ broccoli-rollup: ^5.0.0
+ calculate-cache-key-for-tree: ^2.0.0
+ chalk: ^4.1.2
+ ember-cli-babel: ^7.26.11
+ ember-cli-path-utils: ^1.0.0
+ ember-cli-string-utils: ^1.1.0
+ ember-cli-version-checker: ^5.1.2
+ git-repo-info: ^2.1.1
+ glob: ^9.3.4
+ npm-git-info: ^1.0.3
+ semver: ^7.3.8
+ silent-error: ^1.1.1
+ checksum: 6bdd5a9b32fdd093f199fed10d68f57e38dfbdf35beac9e886b459fb84afbb70cb4fa39b8f1e40d813aa627cd00430b72070cf2e83b1a5687999e52326d34201
languageName: node
linkType: hard
-"@ember-data/request@npm:5.3.9":
- version: 5.3.9
- resolution: "@ember-data/request@npm:5.3.9"
+"@ember-data/request@npm:4.12.8":
+ version: 4.12.8
+ resolution: "@ember-data/request@npm:4.12.8"
dependencies:
- "@ember/test-waiters": ^3.1.0
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
- peerDependencies:
- "@warp-drive/core-types": 0.0.0-beta.12
- checksum: 8af032ad628dc39427b981cb47a6369399d9d86367a4c529551020c8a2f888a6703063315375728d3088c2c3577742915a4d2d4857446c5b75c0aa0d464f246d
+ "@ember-data/private-build-infra": 4.12.8
+ "@ember/test-waiters": ^3.0.2
+ "@embroider/macros": ^1.10.0
+ ember-cli-babel: ^7.26.11
+ checksum: 9423f86f04c5e6b461ce53c2fc1ac5eca33da42c393ab170f4fa494cd4cd4ceb2673ddb7a8fa5b189073dc26d4165481c5b7c6437815ac8be130e03ce2253e66
languageName: node
linkType: hard
@@ -1635,50 +1842,59 @@ __metadata:
languageName: node
linkType: hard
-"@ember-data/serializer@npm:5.3.9":
- version: 5.3.9
- resolution: "@ember-data/serializer@npm:5.3.9"
+"@ember-data/serializer@npm:4.12.8":
+ version: 4.12.8
+ resolution: "@ember-data/serializer@npm:4.12.8"
dependencies:
- "@ember/edition-utils": 1.2.0
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
- ember-cli-path-utils: ^1.0.0
- ember-cli-string-utils: ^1.1.0
+ "@ember-data/private-build-infra": 4.12.8
+ "@embroider/macros": ^1.10.0
+ ember-cli-babel: ^7.26.11
ember-cli-test-info: ^1.0.0
peerDependencies:
- "@ember-data/legacy-compat": 5.3.9
- "@ember-data/request-utils": 5.3.9
- "@ember-data/store": 5.3.9
- "@warp-drive/core-types": 0.0.0-beta.12
- checksum: 30686ddf8fb1c09122d2c33acd44f3a51e7e1730ac9bd71b1c44f4eab27d72bc1c0eb384ff397685b6ddaef9782d9031a19a413368a0cef2ac913ef9fdd74d07
+ "@ember-data/store": 4.12.8
+ "@ember/string": ^3.0.1
+ ember-inflector: ^4.0.2
+ checksum: 2707f8c7b23a7b7390540ee2314b506203b944274485d83070041afa2fe13c52ce0b3a3ac1efbb62a13772fdf0efd70a71488796a6b24098d5614f9343a8b3d1
languageName: node
linkType: hard
-"@ember-data/store@npm:5.3.9":
- version: 5.3.9
- resolution: "@ember-data/store@npm:5.3.9"
+"@ember-data/store@npm:4.12.8":
+ version: 4.12.8
+ resolution: "@ember-data/store@npm:4.12.8"
dependencies:
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
+ "@ember-data/private-build-infra": 4.12.8
+ "@embroider/macros": ^1.10.0
+ ember-cached-decorator-polyfill: ^1.0.1
+ ember-cli-babel: ^7.26.11
peerDependencies:
- "@ember-data/request": 5.3.9
- "@ember-data/request-utils": 5.3.9
- "@ember-data/tracking": 5.3.9
- "@warp-drive/core-types": 0.0.0-beta.12
- checksum: 44731b3d7b085b32a922766badcd7d8ccbcb74f0f97812c2d86e2192d487140aacdf7eb08d09ac8526d84f821ebe70dd6d2f2b8e7a36c46cfc14f055c7c35cf7
+ "@ember-data/graph": 4.12.8
+ "@ember-data/json-api": 4.12.8
+ "@ember-data/legacy-compat": 4.12.8
+ "@ember-data/model": 4.12.8
+ "@ember-data/tracking": 4.12.8
+ "@ember/string": ^3.0.1
+ "@glimmer/tracking": ^1.1.2
+ peerDependenciesMeta:
+ "@ember-data/graph":
+ optional: true
+ "@ember-data/json-api":
+ optional: true
+ "@ember-data/legacy-compat":
+ optional: true
+ "@ember-data/model":
+ optional: true
+ checksum: 4e16d731b81f239188b6c078df699cd47344b5496880a66bfc86e25b33fef74ae09d96aef70e4ed809604ba0db0874858cfe2d14d91cca70a38ecac9c7b403a6
languageName: node
linkType: hard
-"@ember-data/tracking@npm:5.3.9":
- version: 5.3.9
- resolution: "@ember-data/tracking@npm:5.3.9"
+"@ember-data/tracking@npm:4.12.8":
+ version: 4.12.8
+ resolution: "@ember-data/tracking@npm:4.12.8"
dependencies:
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
- peerDependencies:
- "@warp-drive/core-types": 0.0.0-beta.12
- ember-source: ">= 3.28.12"
- checksum: 5b9006351718d16f5bf0319c598b6aef2c8e033f6d505abca617266b92b47b0bd2331fe83ed27c7328b19133a852a314ee8eecb9064cf6a3faa4711faa80f49b
+ "@ember-data/private-build-infra": 4.12.8
+ "@embroider/macros": ^1.10.0
+ ember-cli-babel: ^7.26.11
+ checksum: 4510ddf623442e86ab52ea957ab03ff4944f7251754afb261c1c9a36433e19778f76bab0436ad3874732ce3f7036243c1ac189270574c7f3ea02226071a8fd62
languageName: node
linkType: hard
@@ -1711,7 +1927,7 @@ __metadata:
languageName: node
linkType: hard
-"@ember/edition-utils@npm:1.2.0, @ember/edition-utils@npm:^1.2.0":
+"@ember/edition-utils@npm:^1.2.0":
version: 1.2.0
resolution: "@ember/edition-utils@npm:1.2.0"
checksum: eb1eeb08b66344054acb34a4695a80dbfaeb678d10cb11327eec4c0d03b2324ad7c05d0239a7d0298d5f8931e1698d85cba23dac37bff18ab52104f678a0e8c7
@@ -1733,8 +1949,8 @@ __metadata:
linkType: hard
"@ember/optional-features@npm:^2.0.0":
- version: 2.2.0
- resolution: "@ember/optional-features@npm:2.2.0"
+ version: 2.1.0
+ resolution: "@ember/optional-features@npm:2.1.0"
dependencies:
chalk: ^4.1.2
ember-cli-version-checker: ^5.1.2
@@ -1742,7 +1958,7 @@ __metadata:
inquirer: ^7.3.3
mkdirp: ^1.0.4
silent-error: ^1.1.1
- checksum: f4a1e91e9a735b746de830d90388edcffd6034fcb9551e22d8ce8ed1455944e270d31777e83d269c9ad5cf6376efe396de52dc096f8b96d4c31ac8d37f32919c
+ checksum: b1f1be8179b750de50db70ebc449a77b9590d341230cffc1f03324bc4ae1e24eb71a27f8503da0639096012b65ef4a540cea22b2ec5f8beff3deec56d0c5bd22
languageName: node
linkType: hard
@@ -1783,8 +1999,8 @@ __metadata:
linkType: hard
"@ember/test-helpers@npm:^3.2.0":
- version: 3.3.1
- resolution: "@ember/test-helpers@npm:3.3.1"
+ version: 3.3.0
+ resolution: "@ember/test-helpers@npm:3.3.0"
dependencies:
"@ember/test-waiters": ^3.0.2
"@embroider/macros": ^1.10.0
@@ -1793,11 +2009,11 @@ __metadata:
broccoli-funnel: ^3.0.8
dom-element-descriptors: ^0.5.0
ember-auto-import: ^2.6.0
- ember-cli-babel: ^8.2.0
+ ember-cli-babel: ^7.26.11
ember-cli-htmlbars: ^6.2.0
peerDependencies:
ember-source: ^4.0.0 || ^5.0.0
- checksum: fcb6594814944e1d871f74f5ff4f610484cc9276274383cf9e437bc81c43d1ee3afccf674ae905b0ed71b4d208cde8413e6093f62dac0015956dcc410c0ff26e
+ checksum: 711a37f143533dd43f0da0d94800feebaf1883a1db64b242f9f9f70cc0c1b6c8c0c00ac26d9e5e0ee800e1646251d19fcc25648f20a4b1a64c775591b86620a9
languageName: node
linkType: hard
@@ -1813,23 +2029,23 @@ __metadata:
languageName: node
linkType: hard
-"@embroider/addon-shim@npm:^1.0.0, @embroider/addon-shim@npm:^1.2.0, @embroider/addon-shim@npm:^1.6.0, @embroider/addon-shim@npm:^1.8.0, @embroider/addon-shim@npm:^1.8.3, @embroider/addon-shim@npm:^1.8.6, @embroider/addon-shim@npm:^1.8.7, @embroider/addon-shim@npm:^1.8.9":
- version: 1.9.0
- resolution: "@embroider/addon-shim@npm:1.9.0"
+"@embroider/addon-shim@npm:^1.0.0, @embroider/addon-shim@npm:^1.2.0, @embroider/addon-shim@npm:^1.6.0, @embroider/addon-shim@npm:^1.8.0, @embroider/addon-shim@npm:^1.8.3, @embroider/addon-shim@npm:^1.8.4, @embroider/addon-shim@npm:^1.8.6, @embroider/addon-shim@npm:^1.8.7, @embroider/addon-shim@npm:^1.8.9":
+ version: 1.8.9
+ resolution: "@embroider/addon-shim@npm:1.8.9"
dependencies:
- "@embroider/shared-internals": ^2.8.1
+ "@embroider/shared-internals": ^2.6.0
broccoli-funnel: ^3.0.8
common-ancestor-path: ^1.0.1
semver: ^7.3.8
- checksum: 189c08a9f79ff31baf0c0ae17bca9c5eccb04aaf439c309719f1c8216e96d289d969b2a0b3c4a3e8817bcb8b968a953dbdf9d305ec1d0361b63c8ff4d708a914
+ checksum: 487bca1e8c7de93427d2c7221cace571ea5d75a5bd6bb838861a2071dc6d06a45e844fe6875cc8fb8c003112df4f7df38ac7f702aa60d79ccf62b2ad73218c28
languageName: node
linkType: hard
"@embroider/macros@npm:^1.15.0":
- version: 1.16.9
- resolution: "@embroider/macros@npm:1.16.9"
+ version: 1.16.5
+ resolution: "@embroider/macros@npm:1.16.5"
dependencies:
- "@embroider/shared-internals": 2.8.1
+ "@embroider/shared-internals": 2.6.2
assert-never: ^1.2.1
babel-import-util: ^2.0.0
ember-cli-babel: ^7.26.6
@@ -1842,31 +2058,29 @@ __metadata:
peerDependenciesMeta:
"@glint/template":
optional: true
- checksum: 116294314d80f08c4d5b2d71bcaf48d761f879b55e3552241c3260e8ec574a3d07090c84af0c3334e653bbc3574ec692716f27c51b81e744bc78629e12c73179
+ checksum: 9fc4c96c8e1e26f9d0ddc0aef41951e88a0d7266f37ccaf9f45c9303419586ff607f8fc78f9c216152899382d42a07b24105c3014848fb41b4301ae856e3491e
languageName: node
linkType: hard
-"@embroider/shared-internals@npm:2.8.1, @embroider/shared-internals@npm:^2.0.0, @embroider/shared-internals@npm:^2.8.1":
- version: 2.8.1
- resolution: "@embroider/shared-internals@npm:2.8.1"
+"@embroider/shared-internals@npm:2.6.2, @embroider/shared-internals@npm:^2.0.0, @embroider/shared-internals@npm:^2.6.0":
+ version: 2.6.2
+ resolution: "@embroider/shared-internals@npm:2.6.2"
dependencies:
babel-import-util: ^2.0.0
debug: ^4.3.2
ember-rfc176-data: ^0.3.17
fs-extra: ^9.1.0
- is-subdir: ^1.2.0
js-string-escape: ^1.0.1
lodash: ^4.17.21
minimatch: ^3.0.4
- pkg-entry-points: ^1.1.0
resolve-package-path: ^4.0.1
semver: ^7.3.5
typescript-memoize: ^1.0.1
- checksum: ec70adf4a21c93d92b9fd88bf2b988c124021510645185f64d9e30c7cbf5ce4455b0445c783f42e89ef80df02cb4706473de325852e31087e5b8b34c117641a7
+ checksum: 391b4a9fd6aae640533862c50ffdc459083ec891bca294d4487340124de54436d487d2680e7eecde0ae1ff58b8ca61288a2314e15afcf17642b30a63ff835c6f
languageName: node
linkType: hard
-"@embroider/util@npm:^1.0.0, @embroider/util@npm:^1.13.2":
+"@embroider/util@npm:^1.0.0, @embroider/util@npm:^1.13.1":
version: 1.13.2
resolution: "@embroider/util@npm:1.13.2"
dependencies:
@@ -1887,20 +2101,20 @@ __metadata:
linkType: hard
"@eslint-community/eslint-utils@npm:^4.1.2, @eslint-community/eslint-utils@npm:^4.2.0, @eslint-community/eslint-utils@npm:^4.4.0":
- version: 4.4.1
- resolution: "@eslint-community/eslint-utils@npm:4.4.1"
+ version: 4.4.0
+ resolution: "@eslint-community/eslint-utils@npm:4.4.0"
dependencies:
- eslint-visitor-keys: ^3.4.3
+ eslint-visitor-keys: ^3.3.0
peerDependencies:
eslint: ^6.0.0 || ^7.0.0 || >=8.0.0
- checksum: a7ffc838eb6a9ef594cda348458ccf38f34439ac77dc090fa1c120024bcd4eb911dfd74d5ef44d42063e7949fa7c5123ce714a015c4abb917d4124be1bd32bfe
+ checksum: cdfe3ae42b4f572cbfb46d20edafe6f36fc5fb52bf2d90875c58aefe226892b9677fef60820e2832caf864a326fe4fc225714c46e8389ccca04d5f9288aabd22
languageName: node
linkType: hard
"@eslint-community/regexpp@npm:^4.11.0, @eslint-community/regexpp@npm:^4.4.0, @eslint-community/regexpp@npm:^4.6.1":
- version: 4.12.1
- resolution: "@eslint-community/regexpp@npm:4.12.1"
- checksum: 0d628680e204bc316d545b4993d3658427ca404ae646ce541fcc65306b8c712c340e5e573e30fb9f85f4855c0c5f6dca9868931f2fcced06417fbe1a0c6cd2d6
+ version: 4.11.0
+ resolution: "@eslint-community/regexpp@npm:4.11.0"
+ checksum: 97d2fe46690b69417a551bd19a3dc53b6d9590d2295c43cc4c4e44e64131af541e2f4a44d5c12e87de990403654d3dae9d33600081f3a2f0386b368abc9111ec
languageName: node
linkType: hard
@@ -1921,49 +2135,49 @@ __metadata:
languageName: node
linkType: hard
-"@eslint/js@npm:8.57.1":
- version: 8.57.1
- resolution: "@eslint/js@npm:8.57.1"
- checksum: 2afb77454c06e8316793d2e8e79a0154854d35e6782a1217da274ca60b5044d2c69d6091155234ed0551a1e408f86f09dd4ece02752c59568fa403e60611e880
+"@eslint/js@npm:8.57.0":
+ version: 8.57.0
+ resolution: "@eslint/js@npm:8.57.0"
+ checksum: 315dc65b0e9893e2bff139bddace7ea601ad77ed47b4550e73da8c9c2d2766c7a575c3cddf17ef85b8fd6a36ff34f91729d0dcca56e73ca887c10df91a41b0bb
languageName: node
linkType: hard
"@floating-ui/core@npm:^1.6.0":
- version: 1.6.8
- resolution: "@floating-ui/core@npm:1.6.8"
+ version: 1.6.5
+ resolution: "@floating-ui/core@npm:1.6.5"
dependencies:
- "@floating-ui/utils": ^0.2.8
- checksum: 82faa6ea9d57e466779324e51308d6d49c098fb9d184a08d9bb7f4fad83f08cc070fc491f8d56f0cad44a16215fb43f9f829524288413e6c33afcb17303698de
+ "@floating-ui/utils": ^0.2.5
+ checksum: 8e6c62a6e9223fba9afbcaca8afe408788a2bc8ab1b2f5734a26d5b02d4017a2baffc7176a938a610fd243e6a983ada605f259b35c88813e2230dd29906a78fd
languageName: node
linkType: hard
"@floating-ui/dom@npm:^1.6.3":
- version: 1.6.12
- resolution: "@floating-ui/dom@npm:1.6.12"
+ version: 1.6.8
+ resolution: "@floating-ui/dom@npm:1.6.8"
dependencies:
"@floating-ui/core": ^1.6.0
- "@floating-ui/utils": ^0.2.8
- checksum: 956514ed100c0c853e73ace9e3c877b7e535444d7c31326f687a7690d49cb1e59ef457e9c93b76141aea0d280e83ed5a983bb852718b62eea581f755454660f6
+ "@floating-ui/utils": ^0.2.5
+ checksum: bab6954bdde69afeaf8dbbf335818fe710c6eae1c62856ae1e09fa6abdc056bf5995e053638b76fa6661b8384c363ca2af874ab0448c3f6943808f4f8f77f3ea
languageName: node
linkType: hard
-"@floating-ui/utils@npm:^0.2.8":
- version: 0.2.8
- resolution: "@floating-ui/utils@npm:0.2.8"
- checksum: deb98bba017c4e073c7ad5740d4dec33a4d3e0942d412e677ac0504f3dade15a68fc6fd164d43c93c0bb0bcc5dc5015c1f4080dfb1a6161140fe660624f7c875
+"@floating-ui/utils@npm:^0.2.5":
+ version: 0.2.5
+ resolution: "@floating-ui/utils@npm:0.2.5"
+ checksum: 32834fe0fec5ee89187f8defd0b10813d725dab7dc6ed1545ded6655630bac5d438f0c991d019d675585e118846f12391236fc2886a5c73a57576e7de3eca3f9
languageName: node
linkType: hard
-"@glimmer/compiler@npm:0.87.1":
- version: 0.87.1
- resolution: "@glimmer/compiler@npm:0.87.1"
+"@glimmer/compiler@npm:0.84.3":
+ version: 0.84.3
+ resolution: "@glimmer/compiler@npm:0.84.3"
dependencies:
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/syntax": ^0.87.1
- "@glimmer/util": ^0.87.1
- "@glimmer/vm": ^0.87.1
- "@glimmer/wire-format": ^0.87.1
- checksum: cce261bcc862a6f73552c2c465076b1da16693d04acf7fe40b3ce079360d06a8c079ff3dbcc8c88996c5af0c447502e46c8c49a1866df6a78834e8f4c7e03eee
+ "@glimmer/interfaces": 0.84.3
+ "@glimmer/syntax": 0.84.3
+ "@glimmer/util": 0.84.3
+ "@glimmer/wire-format": 0.84.3
+ "@simple-dom/interface": ^1.4.0
+ checksum: 5d0e9f5eee87f6e3f2a7ee4d05177b19f9e3e519010f27ae2abbbbb016fd96a6d92b3686593e76d9a0df71764022069e97b8a47cc6e2bbd7a7cac82cf379d74f
languageName: node
linkType: hard
@@ -1989,26 +2203,15 @@ __metadata:
languageName: node
linkType: hard
-"@glimmer/debug@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/debug@npm:0.87.1"
- dependencies:
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/util": ^0.87.1
- "@glimmer/vm": ^0.87.1
- checksum: 5522fc922a287182e2e4453337c3758e54501bb3f64bd77e8289811108cbf5590709cd2b09ae8221fcf0aa86b4e1044a16a37a5688fabaf79d09bd85cad2f0ae
- languageName: node
- linkType: hard
-
-"@glimmer/destroyable@npm:0.87.1, @glimmer/destroyable@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/destroyable@npm:0.87.1"
+"@glimmer/destroyable@npm:0.84.3":
+ version: 0.84.3
+ resolution: "@glimmer/destroyable@npm:0.84.3"
dependencies:
"@glimmer/env": 0.1.7
- "@glimmer/global-context": ^0.87.1
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/util": ^0.87.1
- checksum: 14baba57435837e232ff06e8a30e3e23650626362b6b1f956f2b6d9741c83fa0b13b65df22956d4d431dd2679a5fb6e123efcd9b8dc34d457d5b781f1e72a1e5
+ "@glimmer/global-context": 0.84.3
+ "@glimmer/interfaces": 0.84.3
+ "@glimmer/util": 0.84.3
+ checksum: 4c162eb35b942e9949b918d641cefe8616fc5e1a1d89737c20252942fc1dd85f7a42667bc87f09729b8d2bd592b4d8dc4ba5326e14b7faa97ee78c1204bc051e
languageName: node
linkType: hard
@@ -2019,13 +2222,14 @@ __metadata:
languageName: node
linkType: hard
-"@glimmer/encoder@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/encoder@npm:0.87.1"
+"@glimmer/encoder@npm:0.84.3":
+ version: 0.84.3
+ resolution: "@glimmer/encoder@npm:0.84.3"
dependencies:
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/vm": ^0.87.1
- checksum: 463f57aa7b9b48844889139f8711cff5d4942e673bcac13a58531647f0da9fcfa4b119cb2098adf402d4ab56bc9ba1ffa65316c2e9e2653840b2dacb3decd873
+ "@glimmer/env": 0.1.7
+ "@glimmer/interfaces": 0.84.3
+ "@glimmer/vm": 0.84.3
+ checksum: 54581ce831590e73e411ab0d6d9bf28fb9a7977601ff27f4804cf52d5344a698ff2d4125f402c99fdb73192028a43b3d8c1051785bd0884743cde3b5c69b3d28
languageName: node
linkType: hard
@@ -2045,13 +2249,6 @@ __metadata:
languageName: node
linkType: hard
-"@glimmer/global-context@npm:0.87.1, @glimmer/global-context@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/global-context@npm:0.87.1"
- checksum: 1436b35eb12a9bb3689303f2ea98aeb127daa2e6eaa51ea0518b0a7b4585b7d5847d2ed680615fcffc2cad400d4e7f7b2ae03d6a5df440e4f30f70b7a8f2b427
- languageName: node
- linkType: hard
-
"@glimmer/interfaces@npm:0.84.3":
version: 0.84.3
resolution: "@glimmer/interfaces@npm:0.84.3"
@@ -2061,101 +2258,80 @@ __metadata:
languageName: node
linkType: hard
-"@glimmer/interfaces@npm:0.87.1, @glimmer/interfaces@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/interfaces@npm:0.87.1"
- dependencies:
- "@simple-dom/interface": ^1.4.0
- checksum: 5218fd6b555fa33be4673fed43f42e81c7490aaa081461ac6fbca8b18230ba3e3a96932386d5a6890ce1ef85687bf1911b0e2a8c7d905b023cafacb8e52fb289
+"@glimmer/low-level@npm:0.78.2":
+ version: 0.78.2
+ resolution: "@glimmer/low-level@npm:0.78.2"
+ checksum: 9c2b20f054dda48f2da8243c5a06b82c36a20b4ea0a6159e0f9cce334380fceeadbbdc34c7f287001c939d773a986429032ad742702f2988de075717eeafc5fd
languageName: node
linkType: hard
-"@glimmer/manager@npm:0.87.1, @glimmer/manager@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/manager@npm:0.87.1"
+"@glimmer/manager@npm:0.84.3":
+ version: 0.84.3
+ resolution: "@glimmer/manager@npm:0.84.3"
dependencies:
- "@glimmer/debug": ^0.87.1
- "@glimmer/destroyable": ^0.87.1
+ "@glimmer/destroyable": 0.84.3
"@glimmer/env": 0.1.7
- "@glimmer/global-context": ^0.87.1
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/reference": ^0.87.1
- "@glimmer/util": ^0.87.1
- "@glimmer/validator": ^0.87.1
- "@glimmer/vm": ^0.87.1
- checksum: 920a660c463f645e392092d297d83fbd70c694afefce02884d5dcebf454ba601de291d3fb6db9a70bf44f61f2ac12f246b4bfa57e31198175f1d567b8d0e3648
+ "@glimmer/global-context": 0.84.3
+ "@glimmer/interfaces": 0.84.3
+ "@glimmer/reference": 0.84.3
+ "@glimmer/util": 0.84.3
+ "@glimmer/validator": 0.84.3
+ checksum: 736653029bc34071492b75d5cc88334a523041885f3130473f072b98b631c1fc7a4217ff948744107b5c0c1b03b9610eb4dea10d904a684f00e21823612ea582
languageName: node
linkType: hard
-"@glimmer/node@npm:0.87.1":
- version: 0.87.1
- resolution: "@glimmer/node@npm:0.87.1"
+"@glimmer/node@npm:0.84.3":
+ version: 0.84.3
+ resolution: "@glimmer/node@npm:0.84.3"
dependencies:
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/runtime": ^0.87.1
- "@glimmer/util": ^0.87.1
+ "@glimmer/interfaces": 0.84.3
+ "@glimmer/runtime": 0.84.3
+ "@glimmer/util": 0.84.3
"@simple-dom/document": ^1.4.0
- checksum: 6e41dabeb584d6056c2f57dfbe089f0cdc796256185462b94d037de392393c820770ccb3c0612a86dfb23354c733b3f10e3673544c37b05620eaa31befabc2d9
+ "@simple-dom/interface": ^1.4.0
+ checksum: f7c5eaaa2133b73adb7b398b40fa443167e5a2c23c67202707bed5a6d930f2299cbac5c4bc2fd2c2ccad56985bc0b19bc80dae09c97646ee96180f529c8fa94b
languageName: node
linkType: hard
-"@glimmer/opcode-compiler@npm:0.87.1, @glimmer/opcode-compiler@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/opcode-compiler@npm:0.87.1"
+"@glimmer/opcode-compiler@npm:0.84.3":
+ version: 0.84.3
+ resolution: "@glimmer/opcode-compiler@npm:0.84.3"
dependencies:
- "@glimmer/debug": ^0.87.1
- "@glimmer/encoder": ^0.87.1
+ "@glimmer/encoder": 0.84.3
"@glimmer/env": 0.1.7
- "@glimmer/global-context": ^0.87.1
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/manager": ^0.87.1
- "@glimmer/reference": ^0.87.1
- "@glimmer/util": ^0.87.1
- "@glimmer/vm": ^0.87.1
- "@glimmer/wire-format": ^0.87.1
- checksum: 22943e0bfe7e65306e7dc337cbb4a9a7490282bcd0ea3b53ad74a8918715b928e1ad613070387712967cdcaef06690289df14e9bb1ccc5846fbfbffbbdc62b82
+ "@glimmer/interfaces": 0.84.3
+ "@glimmer/reference": 0.84.3
+ "@glimmer/util": 0.84.3
+ "@glimmer/vm": 0.84.3
+ "@glimmer/wire-format": 0.84.3
+ checksum: f0d66298b8f4722e9a010eb3d88c607c7b7ca0b7eae5a74657ba07d7e66e0fd2e77b7ab9a7429713213d189292bba15ee824388018d95a4d233de1356b171cd7
languageName: node
linkType: hard
-"@glimmer/owner@npm:0.87.1, @glimmer/owner@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/owner@npm:0.87.1"
+"@glimmer/owner@npm:0.84.3":
+ version: 0.84.3
+ resolution: "@glimmer/owner@npm:0.84.3"
dependencies:
- "@glimmer/util": ^0.87.1
- checksum: 296e575ebf9e807bbea3ede47a9bee5fbe32a3ad65ab75b0bcd3dbd29c00132ce2772abed1aba39e02447e7e8cffba5f8d94848f83d6187dc15df849d34f063f
+ "@glimmer/util": 0.84.3
+ checksum: ba5196b1144d94d189a4674b2493b6ecfc05877d1b0a2e1171c2742d6914cdc4cae36377e215f01265d2bb7d49aa4b07488b561e5358fb1268cabcaf859f1e7b
languageName: node
linkType: hard
-"@glimmer/program@npm:0.87.1, @glimmer/program@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/program@npm:0.87.1"
+"@glimmer/program@npm:0.84.3":
+ version: 0.84.3
+ resolution: "@glimmer/program@npm:0.84.3"
dependencies:
- "@glimmer/encoder": ^0.87.1
+ "@glimmer/encoder": 0.84.3
"@glimmer/env": 0.1.7
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/manager": ^0.87.1
- "@glimmer/opcode-compiler": ^0.87.1
- "@glimmer/util": ^0.87.1
- "@glimmer/vm": ^0.87.1
- "@glimmer/wire-format": ^0.87.1
- checksum: 40d247fa7c931dfabdfb1354818a32260bb23ad1ad342d75c9128211e519b4f3e2f4c7d154c38ca5e3b47f347b2c169269ad6504abf14312837b9c59d559a3d3
- languageName: node
- linkType: hard
-
-"@glimmer/reference@npm:0.87.1, @glimmer/reference@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/reference@npm:0.87.1"
- dependencies:
- "@glimmer/env": ^0.1.7
- "@glimmer/global-context": ^0.87.1
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/util": ^0.87.1
- "@glimmer/validator": ^0.87.1
- checksum: 8232ff681a88d629cdbb08c13f8fdf708a8a415a98d5b4d939553310bbf3f8b2fe15109b3016f3ad8469fb81497d358af8e80dae8f14ac2ba5cfe0585ea3126c
+ "@glimmer/interfaces": 0.84.3
+ "@glimmer/manager": 0.84.3
+ "@glimmer/opcode-compiler": 0.84.3
+ "@glimmer/util": 0.84.3
+ checksum: 0890297248910862686c42c9038f3fd7c13baa464b0f54609668016661216fea07c4ba076a30233ea9d2c48d586baa7eb62e60b30da5e46a9713c2ebd033da7a
languageName: node
linkType: hard
-"@glimmer/reference@npm:^0.84.3":
+"@glimmer/reference@npm:0.84.3, @glimmer/reference@npm:^0.84.3":
version: 0.84.3
resolution: "@glimmer/reference@npm:0.84.3"
dependencies:
@@ -2168,40 +2344,28 @@ __metadata:
languageName: node
linkType: hard
-"@glimmer/runtime@npm:0.87.1, @glimmer/runtime@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/runtime@npm:0.87.1"
+"@glimmer/runtime@npm:0.84.3":
+ version: 0.84.3
+ resolution: "@glimmer/runtime@npm:0.84.3"
dependencies:
- "@glimmer/destroyable": ^0.87.1
+ "@glimmer/destroyable": 0.84.3
"@glimmer/env": 0.1.7
- "@glimmer/global-context": ^0.87.1
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/manager": ^0.87.1
- "@glimmer/owner": ^0.87.1
- "@glimmer/program": ^0.87.1
- "@glimmer/reference": ^0.87.1
- "@glimmer/util": ^0.87.1
- "@glimmer/validator": ^0.87.1
- "@glimmer/vm": ^0.87.1
- "@glimmer/wire-format": ^0.87.1
- checksum: ed7c28d839eef383b2283f8a51cfcc6cb7e566070952b0388b0e333ddb791c5b3b895863e5e28a04917d8e553eb186d16172beb10abd4ce3fba09b6c47b546b3
- languageName: node
- linkType: hard
-
-"@glimmer/syntax@npm:0.87.1, @glimmer/syntax@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/syntax@npm:0.87.1"
- dependencies:
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/util": ^0.87.1
- "@glimmer/wire-format": ^0.87.1
- "@handlebars/parser": ~2.0.0
- simple-html-tokenizer: ^0.5.11
- checksum: 887ea4a4c975d9ae0d48b17725d55b5c6c886b49356d73ab60d322f5aa91fa8c7a3544f7f877babbf656c924e4655a75480d408e253f3a13200398522986d56f
+ "@glimmer/global-context": 0.84.3
+ "@glimmer/interfaces": 0.84.3
+ "@glimmer/low-level": 0.78.2
+ "@glimmer/owner": 0.84.3
+ "@glimmer/program": 0.84.3
+ "@glimmer/reference": 0.84.3
+ "@glimmer/util": 0.84.3
+ "@glimmer/validator": 0.84.3
+ "@glimmer/vm": 0.84.3
+ "@glimmer/wire-format": 0.84.3
+ "@simple-dom/interface": ^1.4.0
+ checksum: 9af668d7f5a6d6be161ce2a9431cfb372fe04a8ec2c22f17d68f8beb1ab8099176006d9e5aad677bace538453436c1520feac281195ddf94b4b5fbb867db327a
languageName: node
linkType: hard
-"@glimmer/syntax@npm:^0.84.2, @glimmer/syntax@npm:^0.84.3":
+"@glimmer/syntax@npm:0.84.3, @glimmer/syntax@npm:^0.84.2, @glimmer/syntax@npm:^0.84.3":
version: 0.84.3
resolution: "@glimmer/syntax@npm:0.84.3"
dependencies:
@@ -2234,16 +2398,6 @@ __metadata:
languageName: node
linkType: hard
-"@glimmer/util@npm:0.87.1, @glimmer/util@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/util@npm:0.87.1"
- dependencies:
- "@glimmer/env": 0.1.7
- "@glimmer/interfaces": ^0.87.1
- checksum: c3f74f6e00d8f5b7391f3c26921bb56b1c220afe2703164f1579a45dc6f154b7671e045729e0a36b8bd5ad28cdf045ef79a77e87a11905054b536f55e7fe83d1
- languageName: node
- linkType: hard
-
"@glimmer/util@npm:^0.44.0":
version: 0.44.0
resolution: "@glimmer/util@npm:0.44.0"
@@ -2261,18 +2415,6 @@ __metadata:
languageName: node
linkType: hard
-"@glimmer/validator@npm:0.87.1, @glimmer/validator@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/validator@npm:0.87.1"
- dependencies:
- "@glimmer/env": ^0.1.7
- "@glimmer/global-context": ^0.87.1
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/util": ^0.87.1
- checksum: c2d6f883e78795bf776206c6d5c53e001005547e72614b9f0b7aa7394133dd12683d93f0f4351851d436dc8efe5598e0ab70db6c8a4a6383e4d96a7649230d8b
- languageName: node
- linkType: hard
-
"@glimmer/validator@npm:^0.44.0":
version: 0.44.0
resolution: "@glimmer/validator@npm:0.44.0"
@@ -2280,32 +2422,32 @@ __metadata:
languageName: node
linkType: hard
-"@glimmer/vm-babel-plugins@npm:0.87.1":
- version: 0.87.1
- resolution: "@glimmer/vm-babel-plugins@npm:0.87.1"
+"@glimmer/vm-babel-plugins@npm:0.84.3":
+ version: 0.84.3
+ resolution: "@glimmer/vm-babel-plugins@npm:0.84.3"
dependencies:
babel-plugin-debug-macros: ^0.3.4
- checksum: 8f123d38a537e6602e837f4a8ea7d6d5dd0f00ff45f7ee776d7f7fa23ec25e7b563cffb38e8fb08e1fbb13f43e7aa51a720aff7d3560f3d8ca17e6964b34973f
+ checksum: e299d6083845425bb8ae1da454836dfa8244181c3729b8e764b5d88681e4c988d01e42896f596c2c7b0fde0ca583341541ee17fc45a2567c8bc16e0dd8d15f8c
languageName: node
linkType: hard
-"@glimmer/vm@npm:0.87.1, @glimmer/vm@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/vm@npm:0.87.1"
+"@glimmer/vm@npm:0.84.3":
+ version: 0.84.3
+ resolution: "@glimmer/vm@npm:0.84.3"
dependencies:
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/util": ^0.87.1
- checksum: e2d4f6ab21c6cf3d5c2045955ce14194daa806cdc49af5f8aa68b99210516e1a7b535059db3006983ae50e97bdbc3f69bce88497fbf5fbed7992cd03c87cf0bb
+ "@glimmer/interfaces": 0.84.3
+ "@glimmer/util": 0.84.3
+ checksum: d47cbb9440d6c146f74ef1c072e7c429b936e0678e3d57a5988f74ee18e0552a4b966407cac4bb041d3efc1c90a23916a7bac5153425dae26b5c396761a9fdca
languageName: node
linkType: hard
-"@glimmer/wire-format@npm:^0.87.1":
- version: 0.87.1
- resolution: "@glimmer/wire-format@npm:0.87.1"
+"@glimmer/wire-format@npm:0.84.3":
+ version: 0.84.3
+ resolution: "@glimmer/wire-format@npm:0.84.3"
dependencies:
- "@glimmer/interfaces": ^0.87.1
- "@glimmer/util": ^0.87.1
- checksum: 0695a515b5c45c9cea78ecceeb5ee47dc76c59601c5594a7294af309037cf704e1d6007b26173c75503e58b58649ff8275986d7b8a98663033874c6ddd956c79
+ "@glimmer/interfaces": 0.84.3
+ "@glimmer/util": 0.84.3
+ checksum: a453d23dbf79d771d3c99a7ef9cc1e0f2a79d1cfaab2e6ee246e5b1e594538987fd19db0c07ebb5708e290914af7d63b842f04cccba4c0ceb5d7f33be3f4ffee
languageName: node
linkType: hard
@@ -2316,42 +2458,43 @@ __metadata:
languageName: node
linkType: hard
-"@hashicorp/design-system-components@npm:~4.13.0":
- version: 4.13.1
- resolution: "@hashicorp/design-system-components@npm:4.13.1"
+"@hashicorp/design-system-components@npm:~4.7.0":
+ version: 4.7.0
+ resolution: "@hashicorp/design-system-components@npm:4.7.0"
dependencies:
"@ember/render-modifiers": ^2.0.5
"@ember/string": ^3.1.1
"@ember/test-waiters": ^3.1.0
"@embroider/addon-shim": ^1.8.7
"@floating-ui/dom": ^1.6.3
- "@hashicorp/design-system-tokens": ^2.2.1
- "@hashicorp/flight-icons": ^3.7.0
+ "@hashicorp/design-system-tokens": ^2.1.0
+ "@hashicorp/ember-flight-icons": ^5.1.3
+ "@hashicorp/flight-icons": ^3.5.0
decorator-transforms: ^1.1.0
- ember-a11y-refocus: ^4.1.3
+ ember-a11y-refocus: ^4.1.0
ember-cli-sass: ^11.0.1
ember-composable-helpers: ^5.0.0
ember-element-helper: ^0.8.5
ember-focus-trap: ^1.1.0
- ember-get-config: ^2.1.1
+ ember-keyboard: ^8.2.1
ember-modifier: ^4.1.0
ember-power-select: ^8.2.0
ember-stargate: ^0.4.3
- ember-style-modifier: ^4.4.0
+ ember-style-modifier: ^3.0.1
ember-truth-helpers: ^4.0.3
prismjs: ^1.29.0
sass: ^1.69.5
tippy.js: ^6.3.7
peerDependencies:
ember-source: ^3.28.0 || ^4.0.0 || ^5.3.0
- checksum: d8a07a37f0be0ca8b3cbaf4e96c6f41fcdbbf1dd5b8ac927b5607b537908de7d0dd3e8e31b9132761bd87f0a0ad0d4a41ccc6aae71d2e7916e62d91a51b62ff1
+ checksum: 88cafa21f11d761a226d9b065af715d632b1ea38699aac8294d035a7f7ab5518263c894813d0d4dc23f11dea440aaf6e58408531c7bbb3d9b2455a9dc9a9ba5c
languageName: node
linkType: hard
-"@hashicorp/design-system-tokens@npm:^2.2.1":
- version: 2.2.1
- resolution: "@hashicorp/design-system-tokens@npm:2.2.1"
- checksum: 0e4348ab27b2da4725068b5dab83474ad496895d2b422708b2c08cfc39289830f3756a1b352aff6e6095f64e3476ac227034ab02c7d2e0cc49d13a81ef69f6f3
+"@hashicorp/design-system-tokens@npm:^2.1.0":
+ version: 2.2.0
+ resolution: "@hashicorp/design-system-tokens@npm:2.2.0"
+ checksum: 654978be98a94c1f478e472793b9c1b62762bb30f9e0a097c2e1effd4fb2b4619eeef347e39599aea4dd63c451e0f41e698d887827ca2496de83a9e1e1dd8525
languageName: node
linkType: hard
@@ -2367,21 +2510,21 @@ __metadata:
languageName: node
linkType: hard
-"@hashicorp/flight-icons@npm:^3.5.0, @hashicorp/flight-icons@npm:^3.7.0":
- version: 3.7.0
- resolution: "@hashicorp/flight-icons@npm:3.7.0"
- checksum: 9d043a8df428ce47475a8f2605ad31119a9da766b2310eeba207fd311ae5c0422c19fb91b636b744c590a5c0d19075cb295787961d3e46fdd5e21ef17b2df606
+"@hashicorp/flight-icons@npm:^3.5.0":
+ version: 3.5.0
+ resolution: "@hashicorp/flight-icons@npm:3.5.0"
+ checksum: a06f6606d4df682d2756eddebf92765b18774cdcc8e2050e440c85cff16126ae72455869b967e2aef15d97a5caf517578d66cf6fc098f1d3564a9deec9a95ebf
languageName: node
linkType: hard
-"@humanwhocodes/config-array@npm:^0.13.0":
- version: 0.13.0
- resolution: "@humanwhocodes/config-array@npm:0.13.0"
+"@humanwhocodes/config-array@npm:^0.11.14":
+ version: 0.11.14
+ resolution: "@humanwhocodes/config-array@npm:0.11.14"
dependencies:
- "@humanwhocodes/object-schema": ^2.0.3
+ "@humanwhocodes/object-schema": ^2.0.2
debug: ^4.3.1
minimatch: ^3.0.5
- checksum: eae69ff9134025dd2924f0b430eb324981494be26f0fddd267a33c28711c4db643242cf9fddf7dadb9d16c96b54b2d2c073e60a56477df86e0173149313bd5d6
+ checksum: 861ccce9eaea5de19546653bccf75bf09fe878bc39c3aab00aeee2d2a0e654516adad38dd1098aab5e3af0145bbcbf3f309bdf4d964f8dab9dcd5834ae4c02f2
languageName: node
linkType: hard
@@ -2392,7 +2535,7 @@ __metadata:
languageName: node
linkType: hard
-"@humanwhocodes/object-schema@npm:^2.0.3":
+"@humanwhocodes/object-schema@npm:^2.0.2":
version: 2.0.3
resolution: "@humanwhocodes/object-schema@npm:2.0.3"
checksum: d3b78f6c5831888c6ecc899df0d03bcc25d46f3ad26a11d7ea52944dc36a35ef543fad965322174238d677a43d5c694434f6607532cff7077062513ad7022631
@@ -2407,9 +2550,9 @@ __metadata:
linkType: hard
"@inquirer/figures@npm:^1.0.3":
- version: 1.0.7
- resolution: "@inquirer/figures@npm:1.0.7"
- checksum: 82edc998d0ace2f147eb332177f451c02e6a4a6e829d47817f5a4b3341c12cd0850b92ee3187d483328cce5824b870ed75e868850b6ac819447b9d56501f01cb
+ version: 1.0.5
+ resolution: "@inquirer/figures@npm:1.0.5"
+ checksum: 01dc7b95fe7b030b0577d59f45c4fa5c002dccb43ac75ff106d7142825e09dee63a6f9c42b044da2bc964bf38c40229a112a26505a68f3912b15dc8304106bbc
languageName: node
linkType: hard
@@ -2462,7 +2605,7 @@ __metadata:
languageName: node
linkType: hard
-"@jridgewell/sourcemap-codec@npm:^1.4.10, @jridgewell/sourcemap-codec@npm:^1.4.14, @jridgewell/sourcemap-codec@npm:^1.5.0":
+"@jridgewell/sourcemap-codec@npm:^1.4.10, @jridgewell/sourcemap-codec@npm:^1.4.14, @jridgewell/sourcemap-codec@npm:^1.4.15":
version: 1.5.0
resolution: "@jridgewell/sourcemap-codec@npm:1.5.0"
checksum: 05df4f2538b3b0f998ea4c1cd34574d0feba216fa5d4ccaef0187d12abf82eafe6021cec8b49f9bb4d90f2ba4582ccc581e72986a5fcf4176ae0cfeb04cf52ec
@@ -2519,15 +2662,15 @@ __metadata:
linkType: hard
"@mdn/browser-compat-data@npm:^5.2.34, @mdn/browser-compat-data@npm:^5.3.13":
- version: 5.6.14
- resolution: "@mdn/browser-compat-data@npm:5.6.14"
- checksum: 8990ee469aca8e7d79b3ae79cc0670fe2ba42982ede71c0c70d161a58b445f9a642e3bbe74d2da1db3ef10943e8b9458ee63a7d992fc06bf868f9c452cc5dd52
+ version: 5.5.40
+ resolution: "@mdn/browser-compat-data@npm:5.5.40"
+ checksum: 58030a5b95b4d92aa695e72cfe8319e4605e985d11e52fedf8bb7469ff8496ccb4c0501bcb68ae5caeb3f69439f3422611996c6de9e4f4377dbb0b3af12fe55c
languageName: node
linkType: hard
"@messageformat/core@npm:^3.0.1":
- version: 3.4.0
- resolution: "@messageformat/core@npm:3.4.0"
+ version: 3.3.0
+ resolution: "@messageformat/core@npm:3.3.0"
dependencies:
"@messageformat/date-skeleton": ^1.0.0
"@messageformat/number-skeleton": ^1.0.0
@@ -2535,14 +2678,14 @@ __metadata:
"@messageformat/runtime": ^3.0.1
make-plural: ^7.0.0
safe-identifier: ^0.4.1
- checksum: c9b75b510a8b0193a4ee6d600c97f7adacbb98125bd7c0ef60c9ebfb97591d0ecf8247868373919e79adcff1fc6aecd04a905160e212976d2846e4f0dc3d705f
+ checksum: 9c4e8cb878567711b132305ba798358751f5db43cdbcd450469dd6bc880fcf7edb313bf3f0eb8ad3e78c86aeaa7c32050b611dfd3b8ec2ae4e58373973082ea0
languageName: node
linkType: hard
"@messageformat/date-skeleton@npm:^1.0.0":
- version: 1.1.0
- resolution: "@messageformat/date-skeleton@npm:1.1.0"
- checksum: 599adc2aba1639b9505420bce61035ce8cbf1a38bf77b39be5de7b9ecb4c4a2290ae3ebc2ee94ba5aabf52d22654b5febb961694dc52d9a4e6fb859d758aaea8
+ version: 1.0.1
+ resolution: "@messageformat/date-skeleton@npm:1.0.1"
+ checksum: 0832029a18ae54c81d4473eaa764cebbabe084d1a3253a6d4975e5802bff7416a51d43522aad9292eb9663735282a7667e2818efc92905c497ca87424d822ceb
languageName: node
linkType: hard
@@ -2643,150 +2786,6 @@ __metadata:
languageName: node
linkType: hard
-"@parcel/watcher-android-arm64@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-android-arm64@npm:2.5.0"
- conditions: os=android & cpu=arm64
- languageName: node
- linkType: hard
-
-"@parcel/watcher-darwin-arm64@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-darwin-arm64@npm:2.5.0"
- conditions: os=darwin & cpu=arm64
- languageName: node
- linkType: hard
-
-"@parcel/watcher-darwin-x64@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-darwin-x64@npm:2.5.0"
- conditions: os=darwin & cpu=x64
- languageName: node
- linkType: hard
-
-"@parcel/watcher-freebsd-x64@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-freebsd-x64@npm:2.5.0"
- conditions: os=freebsd & cpu=x64
- languageName: node
- linkType: hard
-
-"@parcel/watcher-linux-arm-glibc@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-linux-arm-glibc@npm:2.5.0"
- conditions: os=linux & cpu=arm & libc=glibc
- languageName: node
- linkType: hard
-
-"@parcel/watcher-linux-arm-musl@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-linux-arm-musl@npm:2.5.0"
- conditions: os=linux & cpu=arm & libc=musl
- languageName: node
- linkType: hard
-
-"@parcel/watcher-linux-arm64-glibc@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-linux-arm64-glibc@npm:2.5.0"
- conditions: os=linux & cpu=arm64 & libc=glibc
- languageName: node
- linkType: hard
-
-"@parcel/watcher-linux-arm64-musl@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-linux-arm64-musl@npm:2.5.0"
- conditions: os=linux & cpu=arm64 & libc=musl
- languageName: node
- linkType: hard
-
-"@parcel/watcher-linux-x64-glibc@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-linux-x64-glibc@npm:2.5.0"
- conditions: os=linux & cpu=x64 & libc=glibc
- languageName: node
- linkType: hard
-
-"@parcel/watcher-linux-x64-musl@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-linux-x64-musl@npm:2.5.0"
- conditions: os=linux & cpu=x64 & libc=musl
- languageName: node
- linkType: hard
-
-"@parcel/watcher-win32-arm64@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-win32-arm64@npm:2.5.0"
- conditions: os=win32 & cpu=arm64
- languageName: node
- linkType: hard
-
-"@parcel/watcher-win32-ia32@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-win32-ia32@npm:2.5.0"
- conditions: os=win32 & cpu=ia32
- languageName: node
- linkType: hard
-
-"@parcel/watcher-win32-x64@npm:2.5.0":
- version: 2.5.0
- resolution: "@parcel/watcher-win32-x64@npm:2.5.0"
- conditions: os=win32 & cpu=x64
- languageName: node
- linkType: hard
-
-"@parcel/watcher@npm:^2.4.1":
- version: 2.5.0
- resolution: "@parcel/watcher@npm:2.5.0"
- dependencies:
- "@parcel/watcher-android-arm64": 2.5.0
- "@parcel/watcher-darwin-arm64": 2.5.0
- "@parcel/watcher-darwin-x64": 2.5.0
- "@parcel/watcher-freebsd-x64": 2.5.0
- "@parcel/watcher-linux-arm-glibc": 2.5.0
- "@parcel/watcher-linux-arm-musl": 2.5.0
- "@parcel/watcher-linux-arm64-glibc": 2.5.0
- "@parcel/watcher-linux-arm64-musl": 2.5.0
- "@parcel/watcher-linux-x64-glibc": 2.5.0
- "@parcel/watcher-linux-x64-musl": 2.5.0
- "@parcel/watcher-win32-arm64": 2.5.0
- "@parcel/watcher-win32-ia32": 2.5.0
- "@parcel/watcher-win32-x64": 2.5.0
- detect-libc: ^1.0.3
- is-glob: ^4.0.3
- micromatch: ^4.0.5
- node-addon-api: ^7.0.0
- node-gyp: latest
- dependenciesMeta:
- "@parcel/watcher-android-arm64":
- optional: true
- "@parcel/watcher-darwin-arm64":
- optional: true
- "@parcel/watcher-darwin-x64":
- optional: true
- "@parcel/watcher-freebsd-x64":
- optional: true
- "@parcel/watcher-linux-arm-glibc":
- optional: true
- "@parcel/watcher-linux-arm-musl":
- optional: true
- "@parcel/watcher-linux-arm64-glibc":
- optional: true
- "@parcel/watcher-linux-arm64-musl":
- optional: true
- "@parcel/watcher-linux-x64-glibc":
- optional: true
- "@parcel/watcher-linux-x64-musl":
- optional: true
- "@parcel/watcher-win32-arm64":
- optional: true
- "@parcel/watcher-win32-ia32":
- optional: true
- "@parcel/watcher-win32-x64":
- optional: true
- checksum: 253f93c5f443dfbb638df58712df077fe46ff7e01e7c78df0c4ceb001e8f5b31db01eb7ddac3ae4159722c4d1525894cd4ce5be49f5e6c14a3a52cbbf9f41cbf
- languageName: node
- linkType: hard
-
"@pkgjs/parseargs@npm:^0.11.0":
version: 0.11.0
resolution: "@pkgjs/parseargs@npm:0.11.0"
@@ -2896,13 +2895,6 @@ __metadata:
languageName: node
linkType: hard
-"@scarf/scarf@npm:=1.4.0":
- version: 1.4.0
- resolution: "@scarf/scarf@npm:1.4.0"
- checksum: def62aa403f7e63165ccb219efd2c420fc0b7357b0ba43397f635e4aa813ace1cdf3855a93fc559b4619bcc0469ae4767b8cb72af30ea5c0522bf4a2ecb18198
- languageName: node
- linkType: hard
-
"@simple-dom/document@npm:^1.4.0":
version: 1.4.0
resolution: "@simple-dom/document@npm:1.4.0"
@@ -2926,7 +2918,16 @@ __metadata:
languageName: node
linkType: hard
-"@sinonjs/commons@npm:^3.0.0, @sinonjs/commons@npm:^3.0.1":
+"@sinonjs/commons@npm:^2.0.0":
+ version: 2.0.0
+ resolution: "@sinonjs/commons@npm:2.0.0"
+ dependencies:
+ type-detect: 4.0.8
+ checksum: 5023ba17edf2b85ed58262313b8e9b59e23c6860681a9af0200f239fe939e2b79736d04a260e8270ddd57196851dde3ba754d7230be5c5234e777ae2ca8af137
+ languageName: node
+ linkType: hard
+
+"@sinonjs/commons@npm:^3.0.0":
version: 3.0.1
resolution: "@sinonjs/commons@npm:3.0.1"
dependencies:
@@ -2936,29 +2937,29 @@ __metadata:
linkType: hard
"@sinonjs/fake-timers@npm:^11.2.2":
- version: 11.3.1
- resolution: "@sinonjs/fake-timers@npm:11.3.1"
+ version: 11.2.2
+ resolution: "@sinonjs/fake-timers@npm:11.2.2"
dependencies:
- "@sinonjs/commons": ^3.0.1
- checksum: 173376bb02e870467705829b003c996bcac958f34238875458961ac6483c6029cd9623950d20c68b648499635a0e6d04c26aac822e4f5c120cc7c217aeba6553
+ "@sinonjs/commons": ^3.0.0
+ checksum: 68c29b0e1856fdc280df03ddbf57c726420b78e9f943a241b471edc018fb14ff36fdc1daafd6026cba08c3c7f50c976fb7ae11b88ff44cd7f609692ca7d25158
languageName: node
linkType: hard
"@sinonjs/samsam@npm:^8.0.0":
- version: 8.0.2
- resolution: "@sinonjs/samsam@npm:8.0.2"
+ version: 8.0.0
+ resolution: "@sinonjs/samsam@npm:8.0.0"
dependencies:
- "@sinonjs/commons": ^3.0.1
+ "@sinonjs/commons": ^2.0.0
lodash.get: ^4.4.2
- type-detect: ^4.1.0
- checksum: 7dc24a388ea108e513c88edaaacf98cf4ebcbda8c715551b02954ce50db0e26d6071d98ba9594e737da7fe750079a2af94633d7d46ff1481cb940383b441f29b
+ type-detect: ^4.0.8
+ checksum: 95e40d0bb9f7288e27c379bee1b03c3dc51e7e78b9d5ea6aef66a690da7e81efc4715145b561b449cefc5361a171791e3ce30fb1a46ab247d4c0766024c60a60
languageName: node
linkType: hard
"@sinonjs/text-encoding@npm:^0.7.2":
- version: 0.7.3
- resolution: "@sinonjs/text-encoding@npm:0.7.3"
- checksum: d53f3a3fc94d872b171f7f0725662f4d863e32bca8b44631be4fe67708f13058925ad7297524f882ea232144d7ab978c7fe62c5f79218fca7544cf91be3d233d
+ version: 0.7.2
+ resolution: "@sinonjs/text-encoding@npm:0.7.2"
+ checksum: fe690002a32ba06906cf87e2e8fe84d1590294586f2a7fd180a65355b53660c155c3273d8011a5f2b77209b819aa7306678ae6e4aea0df014bd7ffd4bbbcf1ab
languageName: node
linkType: hard
@@ -3010,6 +3011,15 @@ __metadata:
languageName: node
linkType: hard
+"@types/broccoli-plugin@npm:^3.0.0":
+ version: 3.0.0
+ resolution: "@types/broccoli-plugin@npm:3.0.0"
+ dependencies:
+ broccoli-plugin: "*"
+ checksum: c5daf3b3ff689a00fa18c90c08e2998c373b7ee11235fcd63ad5ad03ff5d9b844f2b84fca966682490853a443714db4d2f0b389208478a0c1d4e7666f85ca04f
+ languageName: node
+ linkType: hard
+
"@types/chai-as-promised@npm:^7.1.2":
version: 7.1.8
resolution: "@types/chai-as-promised@npm:7.1.8"
@@ -3019,19 +3029,10 @@ __metadata:
languageName: node
linkType: hard
-"@types/chai@npm:*":
- version: 5.0.1
- resolution: "@types/chai@npm:5.0.1"
- dependencies:
- "@types/deep-eql": "*"
- checksum: 53d813cbca3755c025381ad4ac8b51b17897df90316350247f9527bdba3adb48b3b1315308fbd717d9013d8e60375c0ab4bd004dc72330133486ff5db4cb0b2c
- languageName: node
- linkType: hard
-
-"@types/chai@npm:^4.2.9":
- version: 4.3.20
- resolution: "@types/chai@npm:4.3.20"
- checksum: 7c5b0c9148f1a844a8d16cb1e16c64f2e7749cab2b8284155b9e494a6b34054846e22fb2b38df6b290f9bf57e6beebb2e121940c5896bc086ad7bab7ed429f06
+"@types/chai@npm:*, @types/chai@npm:^4.2.9":
+ version: 4.3.16
+ resolution: "@types/chai@npm:4.3.16"
+ checksum: bb5f52d1b70534ed8b4bf74bd248add003ffe1156303802ea367331607c06b494da885ffbc2b674a66b4f90c9ee88759790a5f243879f6759f124f22328f5e95
languageName: node
linkType: hard
@@ -3067,39 +3068,52 @@ __metadata:
languageName: node
linkType: hard
-"@types/deep-eql@npm:*":
- version: 4.0.2
- resolution: "@types/deep-eql@npm:4.0.2"
- checksum: 249a27b0bb22f6aa28461db56afa21ec044fa0e303221a62dff81831b20c8530502175f1a49060f7099e7be06181078548ac47c668de79ff9880241968d43d0c
+"@types/eslint-scope@npm:^3.7.3":
+ version: 3.7.7
+ resolution: "@types/eslint-scope@npm:3.7.7"
+ dependencies:
+ "@types/eslint": "*"
+ "@types/estree": "*"
+ checksum: e2889a124aaab0b89af1bab5959847c5bec09809209255de0e63b9f54c629a94781daa04adb66bffcdd742f5e25a17614fb933965093c0eea64aacda4309380e
+ languageName: node
+ linkType: hard
+
+"@types/eslint@npm:*":
+ version: 9.6.0
+ resolution: "@types/eslint@npm:9.6.0"
+ dependencies:
+ "@types/estree": "*"
+ "@types/json-schema": "*"
+ checksum: 7be4b1d24f3df30b28e9cbaac6a5fa14ec1ceca7c173d9605c0ec6e0d1dcdba0452d326dd695dd980f5c14b42aa09fe41675c4f09ffc82db4f466588d3f837cb
languageName: node
linkType: hard
"@types/eslint@npm:^8.4.2, @types/eslint@npm:^8.4.9":
- version: 8.56.12
- resolution: "@types/eslint@npm:8.56.12"
+ version: 8.56.11
+ resolution: "@types/eslint@npm:8.56.11"
dependencies:
"@types/estree": "*"
"@types/json-schema": "*"
- checksum: 0f7710ee02a256c499514251f527f84de964bb29487db840408e4cde79283124a38935597636d2265756c34dd1d902e1b00ae78930d4a0b55111909cb7b80d84
+ checksum: 181a7f11bdc70523142554e4751b8571fa546f71f25fdc363298744857a01e830c9c009a61e81c1a0fd4f01a46f91d6d7098f582142fec94da8f86b94bb50b7a
languageName: node
linkType: hard
-"@types/estree@npm:*, @types/estree@npm:^1.0.5":
- version: 1.0.6
- resolution: "@types/estree@npm:1.0.6"
- checksum: 8825d6e729e16445d9a1dd2fb1db2edc5ed400799064cd4d028150701031af012ba30d6d03fe9df40f4d7a437d0de6d2b256020152b7b09bde9f2e420afdffd9
+"@types/estree@npm:*, @types/estree@npm:^1.0.0":
+ version: 1.0.5
+ resolution: "@types/estree@npm:1.0.5"
+ checksum: dd8b5bed28e6213b7acd0fb665a84e693554d850b0df423ac8076cc3ad5823a6bc26b0251d080bdc545af83179ede51dd3f6fa78cad2c46ed1f29624ddf3e41a
languageName: node
linkType: hard
"@types/express-serve-static-core@npm:^4.17.33":
- version: 4.19.6
- resolution: "@types/express-serve-static-core@npm:4.19.6"
+ version: 4.19.5
+ resolution: "@types/express-serve-static-core@npm:4.19.5"
dependencies:
"@types/node": "*"
"@types/qs": "*"
"@types/range-parser": "*"
"@types/send": "*"
- checksum: b0576eddc2d25ccdf10e68ba09598b87a4d7b2ad04a81dc847cb39fe56beb0b6a5cc017b1e00aa0060cb3b38e700384ce96d291a116a0f1e54895564a104aae9
+ checksum: 72076c2f8df55e89136d4343fc874050d56c0f4afd885772a8aa506b98c3f4f3ddc7dcba42295a8b931c61000234fd679aec79ef50db15f376bf37d46234939a
languageName: node
linkType: hard
@@ -3170,11 +3184,11 @@ __metadata:
linkType: hard
"@types/jquery@npm:^3.5.14":
- version: 3.5.32
- resolution: "@types/jquery@npm:3.5.32"
+ version: 3.5.30
+ resolution: "@types/jquery@npm:3.5.30"
dependencies:
"@types/sizzle": "*"
- checksum: 7fe7251bda3fc9d80417ef7c88ad963ccb591706bca2fb84ea5d9600498a8f9911c32302f28fd1910bb212c34193f38f63818757a9833b70e1f46c3a98a17625
+ checksum: 4594d10fa9b347062883d254a23c9259ae814ef5989ce1985f093dcc7ad4475e324ac3343aef10599c478ea4951726f0e7f79d8ed471ab04de394b7e724d6d13
languageName: node
linkType: hard
@@ -3193,12 +3207,12 @@ __metadata:
linkType: hard
"@types/markdown-it@npm:^14.1.1":
- version: 14.1.2
- resolution: "@types/markdown-it@npm:14.1.2"
+ version: 14.1.1
+ resolution: "@types/markdown-it@npm:14.1.1"
dependencies:
"@types/linkify-it": ^5
"@types/mdurl": ^2
- checksum: ad66e0b377d6af09a155bb65f675d1e2cb27d20a3d407377fe4508eb29cde1e765430b99d5129f89012e2524abb5525d629f7057a59ff9fd0967e1ff645b9ec6
+ checksum: 48279558c7c8a836d2cc598e90937a23a43516716005b4697374d320cea80ad37950d8b378839b8d449f70c296e7377db60c118deda3aaae551474617d350d15
languageName: node
linkType: hard
@@ -3247,11 +3261,11 @@ __metadata:
linkType: hard
"@types/node@npm:*, @types/node@npm:>=10.0.0":
- version: 22.9.0
- resolution: "@types/node@npm:22.9.0"
+ version: 20.14.11
+ resolution: "@types/node@npm:20.14.11"
dependencies:
- undici-types: ~6.19.8
- checksum: c014eb3b8a110f1b87b614a40ef288d13e6b08ae9d5dafbd38951a2eebc24d352dc55330ed9d00c97ee9e64483c3cc14c4aa914c5df7ca7b9eaa1a30b2340dbd
+ undici-types: ~5.26.4
+ checksum: 24396dea2bc803c2d2ebfdd31a3e6e93818ba1a5933d63cd0f64fad1e2955a8280ba09338a48ffe68cd84748eec8bee27135045f15661aa389656f67fe0b0924
languageName: node
linkType: hard
@@ -3284,16 +3298,16 @@ __metadata:
linkType: hard
"@types/qs@npm:*":
- version: 6.9.17
- resolution: "@types/qs@npm:6.9.17"
- checksum: fc3beda0be70e820ddabaa361e8dfec5e09b482b8f6cf1515615479a027dd06cd5ba0ffbd612b654c2605523f45f484c8905a475623d6cd0c4cadcf5d0c517f5
+ version: 6.9.15
+ resolution: "@types/qs@npm:6.9.15"
+ checksum: 97d8208c2b82013b618e7a9fc14df6bd40a73e1385ac479b6896bafc7949a46201c15f42afd06e86a05e914f146f495f606b6fb65610cc60cf2e0ff743ec38a2
languageName: node
linkType: hard
"@types/qunit@npm:^2.19.4":
- version: 2.19.12
- resolution: "@types/qunit@npm:2.19.12"
- checksum: 00f29247692f3a59977d272474e7d3682d0149cbd4c26abe31025e51763f7cae0918e20545a1dad76c213fe6187473fbb41457c052c358fdde9ec4c002544b3e
+ version: 2.19.10
+ resolution: "@types/qunit@npm:2.19.10"
+ checksum: 039d53d6397361da440616215202b42d4595d22ee9907c96d3ab9c10d01b78d5efaaf788f2d3d7e36f80bc8d3a3bc78e8d169861efad501d64c0d1c287bcfa6e
languageName: node
linkType: hard
@@ -3373,9 +3387,9 @@ __metadata:
linkType: hard
"@types/sizzle@npm:*":
- version: 2.3.9
- resolution: "@types/sizzle@npm:2.3.9"
- checksum: 413811a79e7e9f1d8f47e6047ae0aea1530449d612304cdda1c30018e3d053b8544861ec2c70bdeca75a0a010192e6bb78efc6fb4caaafdd65c4eee90066686a
+ version: 2.3.8
+ resolution: "@types/sizzle@npm:2.3.8"
+ checksum: 2ac62443dc917f5f903cbd9afc51c7d6cc1c6569b4e1a15faf04aea5b13b486e7f208650014c3dc4fed34653eded3e00fe5abffe0e6300cbf0e8a01beebf11a6
languageName: node
linkType: hard
@@ -3394,9 +3408,9 @@ __metadata:
linkType: hard
"@types/unist@npm:^2, @types/unist@npm:^2.0.0, @types/unist@npm:^2.0.2, @types/unist@npm:^2.0.3":
- version: 2.0.11
- resolution: "@types/unist@npm:2.0.11"
- checksum: 6d436e832bc35c6dde9f056ac515ebf2b3384a1d7f63679d12358766f9b313368077402e9c1126a14d827f10370a5485e628bf61aa91117cf4fc882423191a4e
+ version: 2.0.10
+ resolution: "@types/unist@npm:2.0.10"
+ checksum: e2924e18dedf45f68a5c6ccd6015cd62f1643b1b43baac1854efa21ae9e70505db94290434a23da1137d9e31eb58e54ca175982005698ac37300a1c889f6c4aa
languageName: node
linkType: hard
@@ -3528,177 +3542,154 @@ __metadata:
languageName: node
linkType: hard
-"@warp-drive/build-config@npm:0.0.0-beta.7":
- version: 0.0.0-beta.7
- resolution: "@warp-drive/build-config@npm:0.0.0-beta.7"
- dependencies:
- "@embroider/addon-shim": ^1.8.9
- "@embroider/macros": ^1.16.6
- babel-import-util: ^2.1.1
- broccoli-funnel: ^3.0.8
- semver: ^7.6.3
- checksum: dccd601024de2a7e5f0c28aa1ab193d21a85a66ecd6f54ab08d28585e23a60374dcf8f41490c13426ebd347d898443f380cbbe22f8621a81ff9fb2c1c762c56c
- languageName: node
- linkType: hard
-
-"@warp-drive/core-types@npm:0.0.0-beta.12":
- version: 0.0.0-beta.12
- resolution: "@warp-drive/core-types@npm:0.0.0-beta.12"
- dependencies:
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
- checksum: beb6273048cb328aa41b48e06018b4fd594c15c8f8bb5212ca3046d08efc6ef83e4dcfedf6e3ded515655e8bdbdd69a04f01dd5c7c86b79c457f7c798f291bfa
- languageName: node
- linkType: hard
-
-"@webassemblyjs/ast@npm:1.14.1, @webassemblyjs/ast@npm:^1.12.1":
- version: 1.14.1
- resolution: "@webassemblyjs/ast@npm:1.14.1"
+"@webassemblyjs/ast@npm:1.12.1, @webassemblyjs/ast@npm:^1.11.5":
+ version: 1.12.1
+ resolution: "@webassemblyjs/ast@npm:1.12.1"
dependencies:
- "@webassemblyjs/helper-numbers": 1.13.2
- "@webassemblyjs/helper-wasm-bytecode": 1.13.2
- checksum: f9154ad9ea14f6f2374ebe918c221fd69a4d4514126a1acc6fa4966e8d27ab28cb550a5e6880032cf620e19640578658a7e5a55bd2aad1e3db4e9d598b8f2099
+ "@webassemblyjs/helper-numbers": 1.11.6
+ "@webassemblyjs/helper-wasm-bytecode": 1.11.6
+ checksum: 31bcc64147236bd7b1b6d29d1f419c1f5845c785e1e42dc9e3f8ca2e05a029e9393a271b84f3a5bff2a32d35f51ff59e2181a6e5f953fe88576acd6750506202
languageName: node
linkType: hard
-"@webassemblyjs/floating-point-hex-parser@npm:1.13.2":
- version: 1.13.2
- resolution: "@webassemblyjs/floating-point-hex-parser@npm:1.13.2"
- checksum: e866ec8433f4a70baa511df5e8f2ebcd6c24f4e2cc6274c7c5aabe2bcce3459ea4680e0f35d450e1f3602acf3913b6b8e4f15069c8cfd34ae8609fb9a7d01795
+"@webassemblyjs/floating-point-hex-parser@npm:1.11.6":
+ version: 1.11.6
+ resolution: "@webassemblyjs/floating-point-hex-parser@npm:1.11.6"
+ checksum: 29b08758841fd8b299c7152eda36b9eb4921e9c584eb4594437b5cd90ed6b920523606eae7316175f89c20628da14326801090167cc7fbffc77af448ac84b7e2
languageName: node
linkType: hard
-"@webassemblyjs/helper-api-error@npm:1.13.2":
- version: 1.13.2
- resolution: "@webassemblyjs/helper-api-error@npm:1.13.2"
- checksum: 48b5df7fd3095bb252f59a139fe2cbd999a62ac9b488123e9a0da3906ad8a2f2da7b2eb21d328c01a90da987380928706395c2897d1f3ed9e2125b6d75a920d0
+"@webassemblyjs/helper-api-error@npm:1.11.6":
+ version: 1.11.6
+ resolution: "@webassemblyjs/helper-api-error@npm:1.11.6"
+ checksum: e8563df85161096343008f9161adb138a6e8f3c2cc338d6a36011aa55eabb32f2fd138ffe63bc278d009ada001cc41d263dadd1c0be01be6c2ed99076103689f
languageName: node
linkType: hard
-"@webassemblyjs/helper-buffer@npm:1.14.1":
- version: 1.14.1
- resolution: "@webassemblyjs/helper-buffer@npm:1.14.1"
- checksum: b611e981dfd6a797c3d8fc3a772de29a6e55033737c2c09c31bb66c613bdbb2d25f915df1dee62a602c6acc057ca71128432fa8c3e22a893e1219dc454f14ede
+"@webassemblyjs/helper-buffer@npm:1.12.1":
+ version: 1.12.1
+ resolution: "@webassemblyjs/helper-buffer@npm:1.12.1"
+ checksum: c3ffb723024130308db608e86e2bdccd4868bbb62dffb0a9a1530606496f79c87f8565bd8e02805ce64912b71f1a70ee5fb00307258b0c082c3abf961d097eca
languageName: node
linkType: hard
-"@webassemblyjs/helper-numbers@npm:1.13.2":
- version: 1.13.2
- resolution: "@webassemblyjs/helper-numbers@npm:1.13.2"
+"@webassemblyjs/helper-numbers@npm:1.11.6":
+ version: 1.11.6
+ resolution: "@webassemblyjs/helper-numbers@npm:1.11.6"
dependencies:
- "@webassemblyjs/floating-point-hex-parser": 1.13.2
- "@webassemblyjs/helper-api-error": 1.13.2
+ "@webassemblyjs/floating-point-hex-parser": 1.11.6
+ "@webassemblyjs/helper-api-error": 1.11.6
"@xtuc/long": 4.2.2
- checksum: 49e2c9bf9b66997e480f6b44d80f895b3cde4de52ac135921d28e144565edca6903a519f627f4089b5509de1d7f9e5023f0e1a94ff78a36c9e2eb30e7c18ffd2
+ checksum: f4b562fa219f84368528339e0f8d273ad44e047a07641ffcaaec6f93e5b76fd86490a009aa91a294584e1436d74b0a01fa9fde45e333a4c657b58168b04da424
languageName: node
linkType: hard
-"@webassemblyjs/helper-wasm-bytecode@npm:1.13.2":
- version: 1.13.2
- resolution: "@webassemblyjs/helper-wasm-bytecode@npm:1.13.2"
- checksum: 8e059e1c1f0294f4fc3df8e4eaff3c5ef6e2e1358f34ebc118eaf5070ed59e56ed7fc92b28be734ebde17c8d662d5d27e06ade686c282445135da083ae11c128
+"@webassemblyjs/helper-wasm-bytecode@npm:1.11.6":
+ version: 1.11.6
+ resolution: "@webassemblyjs/helper-wasm-bytecode@npm:1.11.6"
+ checksum: 3535ef4f1fba38de3475e383b3980f4bbf3de72bbb631c2b6584c7df45be4eccd62c6ff48b5edd3f1bcff275cfd605a37679ec199fc91fd0a7705d7f1e3972dc
languageName: node
linkType: hard
-"@webassemblyjs/helper-wasm-section@npm:1.14.1":
- version: 1.14.1
- resolution: "@webassemblyjs/helper-wasm-section@npm:1.14.1"
+"@webassemblyjs/helper-wasm-section@npm:1.12.1":
+ version: 1.12.1
+ resolution: "@webassemblyjs/helper-wasm-section@npm:1.12.1"
dependencies:
- "@webassemblyjs/ast": 1.14.1
- "@webassemblyjs/helper-buffer": 1.14.1
- "@webassemblyjs/helper-wasm-bytecode": 1.13.2
- "@webassemblyjs/wasm-gen": 1.14.1
- checksum: 0a08d454a63192cd66abf91b6f060ac4b466cef341262246e9dcc828dd4c8536195dea9b46a1244b1eac65b59b8b502164a771a190052a92ff0a0a2ded0f8f53
+ "@webassemblyjs/ast": 1.12.1
+ "@webassemblyjs/helper-buffer": 1.12.1
+ "@webassemblyjs/helper-wasm-bytecode": 1.11.6
+ "@webassemblyjs/wasm-gen": 1.12.1
+ checksum: c19810cdd2c90ff574139b6d8c0dda254d42d168a9e5b3d353d1bc085f1d7164ccd1b3c05592a45a939c47f7e403dc8d03572bb686642f06a3d02932f6f0bc8f
languageName: node
linkType: hard
-"@webassemblyjs/ieee754@npm:1.13.2":
- version: 1.13.2
- resolution: "@webassemblyjs/ieee754@npm:1.13.2"
+"@webassemblyjs/ieee754@npm:1.11.6":
+ version: 1.11.6
+ resolution: "@webassemblyjs/ieee754@npm:1.11.6"
dependencies:
"@xtuc/ieee754": ^1.2.0
- checksum: d7e3520baa37a7309fa7db4d73d69fb869878853b1ebd4b168821bd03fcc4c0e1669c06231315b0039035d9a7a462e53de3ad982da4a426a4b0743b5888e8673
+ checksum: 13574b8e41f6ca39b700e292d7edf102577db5650fe8add7066a320aa4b7a7c09a5056feccac7a74eb68c10dea9546d4461412af351f13f6b24b5f32379b49de
languageName: node
linkType: hard
-"@webassemblyjs/leb128@npm:1.13.2":
- version: 1.13.2
- resolution: "@webassemblyjs/leb128@npm:1.13.2"
+"@webassemblyjs/leb128@npm:1.11.6":
+ version: 1.11.6
+ resolution: "@webassemblyjs/leb128@npm:1.11.6"
dependencies:
"@xtuc/long": 4.2.2
- checksum: 64083507f7cff477a6d71a9e325d95665cea78ec8df99ca7c050e1cfbe300fbcf0842ca3dcf3b4fa55028350135588a4f879398d3dd2b6a8de9913ce7faf5333
+ checksum: 7ea942dc9777d4b18a5ebfa3a937b30ae9e1d2ce1fee637583ed7f376334dd1d4274f813d2e250056cca803e0952def4b954913f1a3c9068bcd4ab4ee5143bf0
languageName: node
linkType: hard
-"@webassemblyjs/utf8@npm:1.13.2":
- version: 1.13.2
- resolution: "@webassemblyjs/utf8@npm:1.13.2"
- checksum: 95ec6052f30eefa8d50c9b2a3394d08b17d53a4aa52821451d41d774c126fa8f39b988fbf5bff56da86852a87c16d676e576775a4071e5e5ccf020cc85a4b281
+"@webassemblyjs/utf8@npm:1.11.6":
+ version: 1.11.6
+ resolution: "@webassemblyjs/utf8@npm:1.11.6"
+ checksum: 807fe5b5ce10c390cfdd93e0fb92abda8aebabb5199980681e7c3743ee3306a75729bcd1e56a3903980e96c885ee53ef901fcbaac8efdfa480f9c0dae1d08713
languageName: node
linkType: hard
-"@webassemblyjs/wasm-edit@npm:^1.12.1":
- version: 1.14.1
- resolution: "@webassemblyjs/wasm-edit@npm:1.14.1"
+"@webassemblyjs/wasm-edit@npm:^1.11.5":
+ version: 1.12.1
+ resolution: "@webassemblyjs/wasm-edit@npm:1.12.1"
dependencies:
- "@webassemblyjs/ast": 1.14.1
- "@webassemblyjs/helper-buffer": 1.14.1
- "@webassemblyjs/helper-wasm-bytecode": 1.13.2
- "@webassemblyjs/helper-wasm-section": 1.14.1
- "@webassemblyjs/wasm-gen": 1.14.1
- "@webassemblyjs/wasm-opt": 1.14.1
- "@webassemblyjs/wasm-parser": 1.14.1
- "@webassemblyjs/wast-printer": 1.14.1
- checksum: 9341c3146bb1b7863f03d6050c2a66990f20384ca137388047bbe1feffacb599e94fca7b7c18287d17e2449ffb4005fdc7f41f674a6975af9ad8522756f8ffff
+ "@webassemblyjs/ast": 1.12.1
+ "@webassemblyjs/helper-buffer": 1.12.1
+ "@webassemblyjs/helper-wasm-bytecode": 1.11.6
+ "@webassemblyjs/helper-wasm-section": 1.12.1
+ "@webassemblyjs/wasm-gen": 1.12.1
+ "@webassemblyjs/wasm-opt": 1.12.1
+ "@webassemblyjs/wasm-parser": 1.12.1
+ "@webassemblyjs/wast-printer": 1.12.1
+ checksum: ae23642303f030af888d30c4ef37b08dfec7eab6851a9575a616e65d1219f880d9223913a39056dd654e49049d76e97555b285d1f7e56935047abf578cce0692
languageName: node
linkType: hard
-"@webassemblyjs/wasm-gen@npm:1.14.1":
- version: 1.14.1
- resolution: "@webassemblyjs/wasm-gen@npm:1.14.1"
+"@webassemblyjs/wasm-gen@npm:1.12.1":
+ version: 1.12.1
+ resolution: "@webassemblyjs/wasm-gen@npm:1.12.1"
dependencies:
- "@webassemblyjs/ast": 1.14.1
- "@webassemblyjs/helper-wasm-bytecode": 1.13.2
- "@webassemblyjs/ieee754": 1.13.2
- "@webassemblyjs/leb128": 1.13.2
- "@webassemblyjs/utf8": 1.13.2
- checksum: 401b12bec7431c4fc29d9414bbe40d3c6dc5be04d25a116657c42329f5481f0129f3b5834c293f26f0e42681ceac9157bf078ce9bdb6a7f78037c650373f98b2
+ "@webassemblyjs/ast": 1.12.1
+ "@webassemblyjs/helper-wasm-bytecode": 1.11.6
+ "@webassemblyjs/ieee754": 1.11.6
+ "@webassemblyjs/leb128": 1.11.6
+ "@webassemblyjs/utf8": 1.11.6
+ checksum: 5787626bb7f0b033044471ddd00ce0c9fe1ee4584e8b73e232051e3a4c99ba1a102700d75337151c8b6055bae77eefa4548960c610a5e4a504e356bd872138ff
languageName: node
linkType: hard
-"@webassemblyjs/wasm-opt@npm:1.14.1":
- version: 1.14.1
- resolution: "@webassemblyjs/wasm-opt@npm:1.14.1"
+"@webassemblyjs/wasm-opt@npm:1.12.1":
+ version: 1.12.1
+ resolution: "@webassemblyjs/wasm-opt@npm:1.12.1"
dependencies:
- "@webassemblyjs/ast": 1.14.1
- "@webassemblyjs/helper-buffer": 1.14.1
- "@webassemblyjs/wasm-gen": 1.14.1
- "@webassemblyjs/wasm-parser": 1.14.1
- checksum: 60c697a9e9129d8d23573856df0791ba33cea4a3bc2339044cae73128c0983802e5e50a42157b990eeafe1237eb8e7653db6de5f02b54a0ae7b81b02dcdf2ae9
+ "@webassemblyjs/ast": 1.12.1
+ "@webassemblyjs/helper-buffer": 1.12.1
+ "@webassemblyjs/wasm-gen": 1.12.1
+ "@webassemblyjs/wasm-parser": 1.12.1
+ checksum: 0e8fa8a0645304a1e18ff40d3db5a2e9233ebaa169b19fcc651d6fc9fe2cac0ce092ddee927318015ae735d9cd9c5d97c0cafb6a51dcd2932ac73587b62df991
languageName: node
linkType: hard
-"@webassemblyjs/wasm-parser@npm:1.14.1, @webassemblyjs/wasm-parser@npm:^1.12.1":
- version: 1.14.1
- resolution: "@webassemblyjs/wasm-parser@npm:1.14.1"
+"@webassemblyjs/wasm-parser@npm:1.12.1, @webassemblyjs/wasm-parser@npm:^1.11.5":
+ version: 1.12.1
+ resolution: "@webassemblyjs/wasm-parser@npm:1.12.1"
dependencies:
- "@webassemblyjs/ast": 1.14.1
- "@webassemblyjs/helper-api-error": 1.13.2
- "@webassemblyjs/helper-wasm-bytecode": 1.13.2
- "@webassemblyjs/ieee754": 1.13.2
- "@webassemblyjs/leb128": 1.13.2
- "@webassemblyjs/utf8": 1.13.2
- checksum: 93f1fe2676da465b4e824419d9812a3d7218de4c3addd4e916c04bc86055fa134416c1b67e4b7cbde8d728c0dce2721d06cc0bfe7a7db7c093a0898009937405
+ "@webassemblyjs/ast": 1.12.1
+ "@webassemblyjs/helper-api-error": 1.11.6
+ "@webassemblyjs/helper-wasm-bytecode": 1.11.6
+ "@webassemblyjs/ieee754": 1.11.6
+ "@webassemblyjs/leb128": 1.11.6
+ "@webassemblyjs/utf8": 1.11.6
+ checksum: 176015de3551ac068cd4505d837414f258d9ade7442bd71efb1232fa26c9f6d7d4e11a5c816caeed389943f409af7ebff6899289a992d7a70343cb47009d21a8
languageName: node
linkType: hard
-"@webassemblyjs/wast-printer@npm:1.14.1":
- version: 1.14.1
- resolution: "@webassemblyjs/wast-printer@npm:1.14.1"
+"@webassemblyjs/wast-printer@npm:1.12.1":
+ version: 1.12.1
+ resolution: "@webassemblyjs/wast-printer@npm:1.12.1"
dependencies:
- "@webassemblyjs/ast": 1.14.1
+ "@webassemblyjs/ast": 1.12.1
"@xtuc/long": 4.2.2
- checksum: 517881a0554debe6945de719d100b2d8883a2d24ddf47552cdeda866341e2bb153cd824a864bc7e2a61190a4b66b18f9899907e0074e9e820d2912ac0789ea60
+ checksum: 2974b5dda8d769145ba0efd886ea94a601e61fb37114c14f9a9a7606afc23456799af652ac3052f284909bd42edc3665a76bc9b50f95f0794c053a8a1757b713
languageName: node
linkType: hard
@@ -3744,7 +3735,7 @@ __metadata:
languageName: node
linkType: hard
-"accepts@npm:~1.3.4, accepts@npm:~1.3.7, accepts@npm:~1.3.8":
+"accepts@npm:~1.3.4, accepts@npm:~1.3.5, accepts@npm:~1.3.7, accepts@npm:~1.3.8":
version: 1.3.8
resolution: "accepts@npm:1.3.8"
dependencies:
@@ -3754,12 +3745,12 @@ __metadata:
languageName: node
linkType: hard
-"acorn-import-attributes@npm:^1.9.5":
- version: 1.9.5
- resolution: "acorn-import-attributes@npm:1.9.5"
+"acorn-import-assertions@npm:^1.9.0":
+ version: 1.9.0
+ resolution: "acorn-import-assertions@npm:1.9.0"
peerDependencies:
acorn: ^8
- checksum: 1c0c49b6a244503964ae46ae850baccf306e84caf99bc2010ed6103c69a423987b07b520a6c619f075d215388bd4923eccac995886a54309eda049ab78a4be95
+ checksum: 944fb2659d0845c467066bdcda2e20c05abe3aaf11972116df457ce2627628a81764d800dd55031ba19de513ee0d43bb771bc679cc0eda66dc8b4fade143bc0c
languageName: node
linkType: hard
@@ -3782,11 +3773,11 @@ __metadata:
linkType: hard
"acorn@npm:^8.5.0, acorn@npm:^8.7.1, acorn@npm:^8.8.2, acorn@npm:^8.9.0":
- version: 8.14.0
- resolution: "acorn@npm:8.14.0"
+ version: 8.12.1
+ resolution: "acorn@npm:8.12.1"
bin:
acorn: bin/acorn
- checksum: 8755074ba55fff94e84e81c72f1013c2d9c78e973c31231c8ae505a5f966859baf654bddd75046bffd73ce816b149298977fff5077a3033dedba0ae2aad152d4
+ checksum: 677880034aee5bdf7434cc2d25b641d7bedb0b5ef47868a78dadabedccf58e1c5457526d9d8249cd253f2df087e081c3fe7d903b448d8e19e5131a3065b83c07
languageName: node
linkType: hard
@@ -3981,9 +3972,9 @@ __metadata:
linkType: hard
"ansi-regex@npm:^6.0.1":
- version: 6.1.0
- resolution: "ansi-regex@npm:6.1.0"
- checksum: 495834a53b0856c02acd40446f7130cb0f8284f4a39afdab20d5dc42b2e198b1196119fe887beed8f9055c4ff2055e3b2f6d4641d0be018cdfb64fedf6fc1aac
+ version: 6.0.1
+ resolution: "ansi-regex@npm:6.0.1"
+ checksum: 1ff8b7667cded1de4fa2c9ae283e979fc87036864317da86a2e546725f96406746411d0d85e87a2d12fa5abd715d90006de7fa4fa0477c92321ad3b4c7d4e169
languageName: node
linkType: hard
@@ -4105,9 +4096,25 @@ __metadata:
linkType: hard
"aria-query@npm:^5.3.0":
- version: 5.3.2
- resolution: "aria-query@npm:5.3.2"
- checksum: d971175c85c10df0f6d14adfe6f1292409196114ab3c62f238e208b53103686f46cc70695a4f775b73bc65f6a09b6a092fd963c4f3a5a7d690c8fc5094925717
+ version: 5.3.0
+ resolution: "aria-query@npm:5.3.0"
+ dependencies:
+ dequal: ^2.0.3
+ checksum: 305bd73c76756117b59aba121d08f413c7ff5e80fa1b98e217a3443fcddb9a232ee790e24e432b59ae7625aebcf4c47cb01c2cac872994f0b426f5bdfcd96ba9
+ languageName: node
+ linkType: hard
+
+"arr-diff@npm:^4.0.0":
+ version: 4.0.0
+ resolution: "arr-diff@npm:4.0.0"
+ checksum: ea7c8834842ad3869297f7915689bef3494fd5b102ac678c13ffccab672d3d1f35802b79e90c4cfec2f424af3392e44112d1ccf65da34562ed75e049597276a0
+ languageName: node
+ linkType: hard
+
+"arr-union@npm:^3.1.0":
+ version: 3.1.0
+ resolution: "arr-union@npm:3.1.0"
+ checksum: b5b0408c6eb7591143c394f3be082fee690ddd21f0fdde0a0a01106799e847f67fcae1b7e56b0a0c173290e29c6aca9562e82b300708a268bc8f88f3d6613cb9
languageName: node
linkType: hard
@@ -4204,6 +4211,13 @@ __metadata:
languageName: node
linkType: hard
+"array-unique@npm:^0.3.2":
+ version: 0.3.2
+ resolution: "array-unique@npm:0.3.2"
+ checksum: da344b89cfa6b0a5c221f965c21638bfb76b57b45184a01135382186924f55973cd9b171d4dad6bf606c6d9d36b0d721d091afdc9791535ead97ccbe78f8a888
+ languageName: node
+ linkType: hard
+
"array.prototype.reduce@npm:^1.0.6":
version: 1.0.7
resolution: "array.prototype.reduce@npm:1.0.7"
@@ -4276,6 +4290,13 @@ __metadata:
languageName: node
linkType: hard
+"assign-symbols@npm:^1.0.0":
+ version: 1.0.0
+ resolution: "assign-symbols@npm:1.0.0"
+ checksum: c0eb895911d05b6b2d245154f70461c5e42c107457972e5ebba38d48967870dee53bcdf6c7047990586daa80fab8dab3cc6300800fbd47b454247fdedd859a2c
+ languageName: node
+ linkType: hard
+
"ast-metadata-inferer@npm:^0.8.0":
version: 0.8.0
resolution: "ast-metadata-inferer@npm:0.8.0"
@@ -4355,6 +4376,15 @@ __metadata:
languageName: node
linkType: hard
+"atob@npm:^2.1.2":
+ version: 2.1.2
+ resolution: "atob@npm:2.1.2"
+ bin:
+ atob: bin/atob.js
+ checksum: dfeeeb70090c5ebea7be4b9f787f866686c645d9f39a0d184c817252d0cf08455ed25267d79c03254d3be1f03ac399992a792edcd5ffb9c91e097ab5ef42833a
+ languageName: node
+ linkType: hard
+
"autosize@npm:^4.0.0":
version: 4.0.4
resolution: "autosize@npm:4.0.4"
@@ -4372,9 +4402,9 @@ __metadata:
linkType: hard
"axe-core@npm:^4.6.3":
- version: 4.10.2
- resolution: "axe-core@npm:4.10.2"
- checksum: 2b9b1c93ea73ea9f206604e4e17bd771d2d835f077bde54517d73028b8865c69b209460e73d5b109968cbdb39ab3d28943efa5695189bd79e16421ce1706719e
+ version: 4.9.1
+ resolution: "axe-core@npm:4.9.1"
+ checksum: 41d9227871781f96c2952e2a777fca73624959dd0e98864f6d82806a77602f82b4fc490852082a7e524d8cd864e50d8b4d9931819b4a150112981d8c932110c5
languageName: node
linkType: hard
@@ -4385,14 +4415,14 @@ __metadata:
languageName: node
linkType: hard
-"babel-import-util@npm:^1.2.2":
+"babel-import-util@npm:^1.2.2, babel-import-util@npm:^1.3.0":
version: 1.4.1
resolution: "babel-import-util@npm:1.4.1"
checksum: a7f43a10141f62f05c0b4b3c80ee31f5f362d5f7a7d614fcef836f3eca2ec3ae0a732f8550674542d1821e87c3fd3d986fea94ed12e2a283a1571b6d1249380a
languageName: node
linkType: hard
-"babel-import-util@npm:^2.0.0, babel-import-util@npm:^2.0.1, babel-import-util@npm:^2.1.1":
+"babel-import-util@npm:^2.0.0, babel-import-util@npm:^2.0.1":
version: 2.1.1
resolution: "babel-import-util@npm:2.1.1"
checksum: a2aa8fd8c20ae2a9a89bd729791223761f565b1d717098546545021b0d6fcac6f20cc0d85fdcb06d19d0dd4b60d431c2e032dfcb8b137a114a9b554290b86fae
@@ -4407,17 +4437,17 @@ __metadata:
linkType: hard
"babel-loader@npm:^8.0.6":
- version: 8.4.1
- resolution: "babel-loader@npm:8.4.1"
+ version: 8.3.0
+ resolution: "babel-loader@npm:8.3.0"
dependencies:
find-cache-dir: ^3.3.1
- loader-utils: ^2.0.4
+ loader-utils: ^2.0.0
make-dir: ^3.1.0
schema-utils: ^2.6.5
peerDependencies:
"@babel/core": ^7.0.0
webpack: ">=2"
- checksum: fa02db1a7d3ebb7b4aab83e926fb51e627a00427943c9dd1b3302c8099c67fa6a242a2adeed37d95abcd39ba619edf558a1dec369ce0849c5a87dc290c90fe2f
+ checksum: d48bcf9e030e598656ad3ff5fb85967db2eaaf38af5b4a4b99d25618a2057f9f100e6b231af2a46c1913206db506115ca7a8cbdf52c9c73d767070dae4352ab5
languageName: node
linkType: hard
@@ -4468,13 +4498,13 @@ __metadata:
languageName: node
linkType: hard
-"babel-plugin-ember-template-compilation@npm:^2.0.0, babel-plugin-ember-template-compilation@npm:^2.0.1, babel-plugin-ember-template-compilation@npm:^2.1.1":
- version: 2.3.0
- resolution: "babel-plugin-ember-template-compilation@npm:2.3.0"
+"babel-plugin-ember-template-compilation@npm:^2.0.0, babel-plugin-ember-template-compilation@npm:^2.0.1":
+ version: 2.2.5
+ resolution: "babel-plugin-ember-template-compilation@npm:2.2.5"
dependencies:
"@glimmer/syntax": ^0.84.3
babel-import-util: ^3.0.0
- checksum: f6045a8b91823a8fe12a016f49367dab986e3b75f1bade5fc709d04f02d3b1a632596247cbccff034882e3c65ae4f3e1029e3b19aa5d92cdec46b977c1878da9
+ checksum: 6f2ca068c53cf7ef97f9935c7dc239b99c8c23fba855630f7f2822df82574cc0140430738b48b0571ed4cf2e80658101e45cc557c3e478efacab52deed1f8f10
languageName: node
linkType: hard
@@ -4549,15 +4579,15 @@ __metadata:
languageName: node
linkType: hard
-"babel-plugin-polyfill-corejs3@npm:^0.10.6":
- version: 0.10.6
- resolution: "babel-plugin-polyfill-corejs3@npm:0.10.6"
+"babel-plugin-polyfill-corejs3@npm:^0.10.1, babel-plugin-polyfill-corejs3@npm:^0.10.4":
+ version: 0.10.4
+ resolution: "babel-plugin-polyfill-corejs3@npm:0.10.4"
dependencies:
- "@babel/helper-define-polyfill-provider": ^0.6.2
- core-js-compat: ^3.38.0
+ "@babel/helper-define-polyfill-provider": ^0.6.1
+ core-js-compat: ^3.36.1
peerDependencies:
"@babel/core": ^7.4.0 || ^8.0.0-0 <8.0.0
- checksum: f762f29f7acca576897c63149c850f0a72babd3fb9ea436a2e36f0c339161c4b912a77828541d8188ce8a91e50965c6687120cf36071eabb1b7aa92f279e2164
+ checksum: b96a54495f7cc8b3797251c8c15f5ed015edddc3110fc122f6b32c94bec33af1e8bc56fa99091808f500bde0cccaaa266889cdc5935d9e6e9cf09898214f02dd
languageName: node
linkType: hard
@@ -4579,6 +4609,13 @@ __metadata:
languageName: node
linkType: hard
+"babel6-plugin-strip-class-callcheck@npm:^6.0.0":
+ version: 6.0.0
+ resolution: "babel6-plugin-strip-class-callcheck@npm:6.0.0"
+ checksum: e765ff21f9aee4e488a270cd8515a456c0de4f56739c2c285a7d8c72712fcca10cd89d24eec47dec0ce8b77a7e27257ece2674c781169a01a90bcb18c825b369
+ languageName: node
+ linkType: hard
+
"backbone@npm:^1.1.2":
version: 1.6.0
resolution: "backbone@npm:1.6.0"
@@ -4630,6 +4667,21 @@ __metadata:
languageName: node
linkType: hard
+"base@npm:^0.11.1":
+ version: 0.11.2
+ resolution: "base@npm:0.11.2"
+ dependencies:
+ cache-base: ^1.0.1
+ class-utils: ^0.3.5
+ component-emitter: ^1.2.1
+ define-property: ^1.0.0
+ isobject: ^3.0.1
+ mixin-deep: ^1.2.0
+ pascalcase: ^0.1.1
+ checksum: a4a146b912e27eea8f66d09cb0c9eab666f32ce27859a7dfd50f38cd069a2557b39f16dba1bc2aecb3b44bf096738dd207b7970d99b0318423285ab1b1994edd
+ languageName: node
+ linkType: hard
+
"basic-auth@npm:~2.0.1":
version: 2.0.1
resolution: "basic-auth@npm:2.0.1"
@@ -4639,15 +4691,6 @@ __metadata:
languageName: node
linkType: hard
-"better-path-resolve@npm:1.0.0":
- version: 1.0.0
- resolution: "better-path-resolve@npm:1.0.0"
- dependencies:
- is-windows: ^1.0.0
- checksum: 5392dbe04e7fe68b944eb37961d9dfa147aaac3ee9ee3f6e13d42e2c9fbe949e68d16e896c14ee9016fa5f8e6e53ec7fd8b5f01b50a32067a7d94ac9cfb9a050
- languageName: node
- linkType: hard
-
"big.js@npm:^5.2.2":
version: 5.2.2
resolution: "big.js@npm:5.2.2"
@@ -5203,6 +5246,21 @@ __metadata:
languageName: node
linkType: hard
+"broccoli-plugin@npm:*, broccoli-plugin@npm:^4.0.0, broccoli-plugin@npm:^4.0.2, broccoli-plugin@npm:^4.0.3, broccoli-plugin@npm:^4.0.5, broccoli-plugin@npm:^4.0.7":
+ version: 4.0.7
+ resolution: "broccoli-plugin@npm:4.0.7"
+ dependencies:
+ broccoli-node-api: ^1.7.0
+ broccoli-output-wrapper: ^3.2.5
+ fs-merger: ^3.2.1
+ promise-map-series: ^0.3.0
+ quick-temp: ^0.1.8
+ rimraf: ^3.0.2
+ symlink-or-copy: ^1.3.1
+ checksum: 49d6a55ebfe1880e73956dc8bf23104ad81c1272d4a06755823e6e1eec5255583d2913de99427b3e0a620e3b56178fdd8ea03c832b7452f0440c166044aa555c
+ languageName: node
+ linkType: hard
+
"broccoli-plugin@npm:1.1.0":
version: 1.1.0
resolution: "broccoli-plugin@npm:1.1.0"
@@ -5239,21 +5297,6 @@ __metadata:
languageName: node
linkType: hard
-"broccoli-plugin@npm:^4.0.0, broccoli-plugin@npm:^4.0.2, broccoli-plugin@npm:^4.0.3, broccoli-plugin@npm:^4.0.5, broccoli-plugin@npm:^4.0.7":
- version: 4.0.7
- resolution: "broccoli-plugin@npm:4.0.7"
- dependencies:
- broccoli-node-api: ^1.7.0
- broccoli-output-wrapper: ^3.2.5
- fs-merger: ^3.2.1
- promise-map-series: ^0.3.0
- quick-temp: ^0.1.8
- rimraf: ^3.0.2
- symlink-or-copy: ^1.3.1
- checksum: 49d6a55ebfe1880e73956dc8bf23104ad81c1272d4a06755823e6e1eec5255583d2913de99427b3e0a620e3b56178fdd8ea03c832b7452f0440c166044aa555c
- languageName: node
- linkType: hard
-
"broccoli-rollup@npm:4.0.0":
version: 4.0.0
resolution: "broccoli-rollup@npm:4.0.0"
@@ -5272,14 +5315,31 @@ __metadata:
languageName: node
linkType: hard
+"broccoli-rollup@npm:^5.0.0":
+ version: 5.0.0
+ resolution: "broccoli-rollup@npm:5.0.0"
+ dependencies:
+ "@types/broccoli-plugin": ^3.0.0
+ broccoli-plugin: ^4.0.7
+ fs-tree-diff: ^2.0.1
+ heimdalljs: ^0.2.6
+ node-modules-path: ^1.0.1
+ rollup: ^2.50.0
+ rollup-pluginutils: ^2.8.1
+ symlink-or-copy: ^1.2.0
+ walk-sync: ^2.2.0
+ checksum: 752725e1b78b8dc6b228b0156b44e293feb948c00bf3b505254ba1f9a706a14334727eb1505c8e01ee22500387cc3059f8cb0ecf7857935346768bdcf1939b1e
+ languageName: node
+ linkType: hard
+
"broccoli-sass-source-maps@npm:^4.0.0":
- version: 4.3.0
- resolution: "broccoli-sass-source-maps@npm:4.3.0"
+ version: 4.2.4
+ resolution: "broccoli-sass-source-maps@npm:4.2.4"
dependencies:
broccoli-caching-writer: ^3.0.3
include-path-searcher: ^0.1.0
rsvp: ^4.8.5
- checksum: a1ccdedd39a3a07891f8162984fa70dda890b8f9cd5b22e99b6d2341659438926dce31a3cd0d74019a583fe81797265f327786374eb8c0a3c790d8c54cf9e5ff
+ checksum: 8a6d3190e09bb19a7953792ebc2f147ec39b8ee263d8e9ffd63f9c59e91e072928ed0e058ff91f275dc43c2d752c663f9bcd5cfd18645be815acb8ff8e1f59ab
languageName: node
linkType: hard
@@ -5494,17 +5554,17 @@ __metadata:
languageName: node
linkType: hard
-"browserslist@npm:^4.21.10, browserslist@npm:^4.24.0, browserslist@npm:^4.24.2":
- version: 4.24.2
- resolution: "browserslist@npm:4.24.2"
+"browserslist@npm:^4.14.5, browserslist@npm:^4.21.10, browserslist@npm:^4.23.0, browserslist@npm:^4.23.1":
+ version: 4.23.2
+ resolution: "browserslist@npm:4.23.2"
dependencies:
- caniuse-lite: ^1.0.30001669
- electron-to-chromium: ^1.5.41
- node-releases: ^2.0.18
- update-browserslist-db: ^1.1.1
+ caniuse-lite: ^1.0.30001640
+ electron-to-chromium: ^1.4.820
+ node-releases: ^2.0.14
+ update-browserslist-db: ^1.1.0
bin:
browserslist: cli.js
- checksum: cf64085f12132d38638f38937a255edb82c7551b164a98577b055dd79719187a816112f7b97b9739e400c4954cd66479c0d7a843cb816e346f4795dc24fd5d97
+ checksum: 8212af37f6ca6355da191cf2d4ad49bd0b82854888b9a7e103638fada70d38cbe36d28feeeaa98344cb15d9128f9f74bcc8ce1bfc9011b5fd14381c1c6fb542c
languageName: node
linkType: hard
@@ -5557,6 +5617,13 @@ __metadata:
languageName: node
linkType: hard
+"bytes@npm:3.0.0":
+ version: 3.0.0
+ resolution: "bytes@npm:3.0.0"
+ checksum: a2b386dd8188849a5325f58eef69c3b73c51801c08ffc6963eddc9be244089ba32d19347caf6d145c86f315ae1b1fc7061a32b0c1aa6379e6a719090287ed101
+ languageName: node
+ linkType: hard
+
"bytes@npm:3.1.2":
version: 3.1.2
resolution: "bytes@npm:3.1.2"
@@ -5591,6 +5658,23 @@ __metadata:
languageName: node
linkType: hard
+"cache-base@npm:^1.0.1":
+ version: 1.0.1
+ resolution: "cache-base@npm:1.0.1"
+ dependencies:
+ collection-visit: ^1.0.0
+ component-emitter: ^1.2.1
+ get-value: ^2.0.6
+ has-value: ^1.0.0
+ isobject: ^3.0.1
+ set-value: ^2.0.0
+ to-object-path: ^0.3.0
+ union-value: ^1.0.0
+ unset-value: ^1.0.0
+ checksum: 9114b8654fe2366eedc390bad0bcf534e2f01b239a888894e2928cb58cdc1e6ea23a73c6f3450dcfd2058aa73a8a981e723cd1e7c670c047bf11afdc65880107
+ languageName: node
+ linkType: hard
+
"cache-point@npm:^2.0.0":
version: 2.0.0
resolution: "cache-point@npm:2.0.0"
@@ -5675,10 +5759,10 @@ __metadata:
languageName: node
linkType: hard
-"caniuse-lite@npm:^1.0.30001524, caniuse-lite@npm:^1.0.30001669":
- version: 1.0.30001679
- resolution: "caniuse-lite@npm:1.0.30001679"
- checksum: 6fca375c6c3a749aaf1246f28c9b182f94b2aa0fb30a98d4c5a3df1a22054fba852df625be50b5ffa1c772a0c53f1aea14dead920d6c5d54030dd4dede0dba98
+"caniuse-lite@npm:^1.0.30001524, caniuse-lite@npm:^1.0.30001640":
+ version: 1.0.30001643
+ resolution: "caniuse-lite@npm:1.0.30001643"
+ checksum: e39991c13a0fd8f5c2aa99c9128188e4c4e9d6a203c3da6270c36285460ef152c5e9410ee4db560aa723904668946afe50541dce9636ab5e61434ba71dc22955
languageName: node
linkType: hard
@@ -5829,26 +5913,22 @@ __metadata:
languageName: node
linkType: hard
-"cheerio@npm:^1.0.0":
- version: 1.0.0
- resolution: "cheerio@npm:1.0.0"
+"cheerio@npm:^1.0.0-rc.12":
+ version: 1.0.0-rc.12
+ resolution: "cheerio@npm:1.0.0-rc.12"
dependencies:
cheerio-select: ^2.1.0
dom-serializer: ^2.0.0
domhandler: ^5.0.3
- domutils: ^3.1.0
- encoding-sniffer: ^0.2.0
- htmlparser2: ^9.1.0
- parse5: ^7.1.2
+ domutils: ^3.0.1
+ htmlparser2: ^8.0.1
+ parse5: ^7.0.0
parse5-htmlparser2-tree-adapter: ^7.0.0
- parse5-parser-stream: ^7.1.2
- undici: ^6.19.5
- whatwg-mimetype: ^4.0.0
- checksum: ade4344811dcad5b5d78392506ef6bab1900c13a65222c869e745a38370d287f4b94838ac6d752883a84d937edb62b5bd0deaf70e6f38054acbfe3da4881574a
+ checksum: 5d4c1b7a53cf22d3a2eddc0aff70cf23cbb30d01a4c79013e703a012475c02461aa1fcd99127e8d83a02216386ed6942b2c8103845fd0812300dd199e6e7e054
languageName: node
linkType: hard
-"chokidar@npm:^3.6.0":
+"chokidar@npm:>=3.0.0 <4.0.0, chokidar@npm:^3.4.0":
version: 3.6.0
resolution: "chokidar@npm:3.6.0"
dependencies:
@@ -5867,15 +5947,6 @@ __metadata:
languageName: node
linkType: hard
-"chokidar@npm:^4.0.0":
- version: 4.0.1
- resolution: "chokidar@npm:4.0.1"
- dependencies:
- readdirp: ^4.0.1
- checksum: 193da9786b0422a895d59c7552195d15c6c636e6a2293ae43d09e34e243e24ccd02d693f007c767846a65abbeae5fea6bfacb8fc2ddec4ea4d397620d552010d
- languageName: node
- linkType: hard
-
"chownr@npm:^2.0.0":
version: 2.0.0
resolution: "chownr@npm:2.0.0"
@@ -5904,6 +5975,18 @@ __metadata:
languageName: node
linkType: hard
+"class-utils@npm:^0.3.5":
+ version: 0.3.6
+ resolution: "class-utils@npm:0.3.6"
+ dependencies:
+ arr-union: ^3.1.0
+ define-property: ^0.2.5
+ isobject: ^3.0.0
+ static-extend: ^0.1.1
+ checksum: be108900801e639e50f96a7e4bfa8867c753a7750a7603879f3981f8b0a89cba657497a2d5f40cd4ea557ff15d535a100818bb486baf6e26fe5d7872e75f1078
+ languageName: node
+ linkType: hard
+
"clean-base-url@npm:^1.0.0":
version: 1.0.0
resolution: "clean-base-url@npm:1.0.0"
@@ -6083,9 +6166,9 @@ __metadata:
linkType: hard
"codemirror@npm:^5.58.2":
- version: 5.65.18
- resolution: "codemirror@npm:5.65.18"
- checksum: 950015d587e0790cceae157423bbc70bf1da8256050c8f6739fe967045b050e22c63b332de6388ed6d9526d253a834806ace76c875006fc8078e2c15c9f275a7
+ version: 5.65.17
+ resolution: "codemirror@npm:5.65.17"
+ checksum: 8bc853524c6416826364d776b012f488b3f4736899e5c8026062f43927e09de773051dd1b34e8cfd25642d7e358679ca5b113f0034fdd6a295f4193b04f8c528
languageName: node
linkType: hard
@@ -6099,6 +6182,16 @@ __metadata:
languageName: node
linkType: hard
+"collection-visit@npm:^1.0.0":
+ version: 1.0.0
+ resolution: "collection-visit@npm:1.0.0"
+ dependencies:
+ map-visit: ^1.0.0
+ object-visit: ^1.0.0
+ checksum: 15d9658fe6eb23594728346adad5433b86bb7a04fd51bbab337755158722f9313a5376ef479de5b35fbc54140764d0d39de89c339f5d25b959ed221466981da9
+ languageName: node
+ linkType: hard
+
"color-convert@npm:^1.9.0":
version: 1.9.3
resolution: "color-convert@npm:1.9.3"
@@ -6294,7 +6387,14 @@ __metadata:
languageName: node
linkType: hard
-"compressible@npm:~2.0.18":
+"component-emitter@npm:^1.2.1":
+ version: 1.3.1
+ resolution: "component-emitter@npm:1.3.1"
+ checksum: 94550aa462c7bd5a61c1bc480e28554aa306066930152d1b1844a0dd3845d4e5db7e261ddec62ae184913b3e59b55a2ad84093b9d3596a8f17c341514d6c483d
+ languageName: node
+ linkType: hard
+
+"compressible@npm:~2.0.16":
version: 2.0.18
resolution: "compressible@npm:2.0.18"
dependencies:
@@ -6304,17 +6404,17 @@ __metadata:
linkType: hard
"compression@npm:^1.7.4":
- version: 1.7.5
- resolution: "compression@npm:1.7.5"
+ version: 1.7.4
+ resolution: "compression@npm:1.7.4"
dependencies:
- bytes: 3.1.2
- compressible: ~2.0.18
+ accepts: ~1.3.5
+ bytes: 3.0.0
+ compressible: ~2.0.16
debug: 2.6.9
- negotiator: ~0.6.4
on-headers: ~1.0.2
- safe-buffer: 5.2.1
+ safe-buffer: 5.1.2
vary: ~1.1.2
- checksum: d624b5562492518eee82c4f1381ea36f69f1f10b4283bfc2dcafd7d4d7eeed17c3f0e8f2951798594b7064db7ac5a6198df34816bde2d56bb7c75ce1570880e9
+ checksum: 35c0f2eb1f28418978615dc1bc02075b34b1568f7f56c62d60f4214d4b7cc00d0f6d282b5f8a954f59872396bd770b6b15ffd8aa94c67d4bce9b8887b906999b
languageName: node
linkType: hard
@@ -6418,13 +6518,6 @@ __metadata:
languageName: node
linkType: hard
-"content-tag@npm:^1.2.2":
- version: 1.2.2
- resolution: "content-tag@npm:1.2.2"
- checksum: 38087560728d3e2a0ea62786fb795a1b34d77e83e2729af0ef84f9f67a45a080a2ee2efac7f452c9e8bb95c81ef2f5f92c0dd00e5653799bfc48c997bca84ef2
- languageName: node
- linkType: hard
-
"content-type@npm:~1.0.4, content-type@npm:~1.0.5":
version: 1.0.5
resolution: "content-type@npm:1.0.5"
@@ -6453,17 +6546,17 @@ __metadata:
languageName: node
linkType: hard
-"cookie@npm:0.7.1":
- version: 0.7.1
- resolution: "cookie@npm:0.7.1"
- checksum: cec5e425549b3650eb5c3498a9ba3cde0b9cd419e3b36e4b92739d30b4d89e0b678b98c1ddc209ce7cf958cd3215671fd6ac47aec21f10c2a0cc68abd399d8a7
+"cookie@npm:0.6.0":
+ version: 0.6.0
+ resolution: "cookie@npm:0.6.0"
+ checksum: f56a7d32a07db5458e79c726b77e3c2eff655c36792f2b6c58d351fb5f61531e5b1ab7f46987150136e366c65213cbe31729e02a3eaed630c3bf7334635fb410
languageName: node
linkType: hard
-"cookie@npm:~0.7.2":
- version: 0.7.2
- resolution: "cookie@npm:0.7.2"
- checksum: 9bf8555e33530affd571ea37b615ccad9b9a34febbf2c950c86787088eb00a8973690833b0f8ebd6b69b753c62669ea60cec89178c1fb007bf0749abed74f93e
+"cookie@npm:~0.4.1":
+ version: 0.4.2
+ resolution: "cookie@npm:0.4.2"
+ checksum: a00833c998bedf8e787b4c342defe5fa419abd96b32f4464f718b91022586b8f1bafbddd499288e75c037642493c83083da426c6a9080d309e3bd90fd11baa9b
languageName: node
linkType: hard
@@ -6474,12 +6567,19 @@ __metadata:
languageName: node
linkType: hard
-"core-js-compat@npm:^3.38.0, core-js-compat@npm:^3.38.1":
- version: 3.39.0
- resolution: "core-js-compat@npm:3.39.0"
+"copy-descriptor@npm:^0.1.0":
+ version: 0.1.1
+ resolution: "copy-descriptor@npm:0.1.1"
+ checksum: d4b7b57b14f1d256bb9aa0b479241048afd7f5bcf22035fc7b94e8af757adeae247ea23c1a774fe44869fd5694efba4a969b88d966766c5245fdee59837fe45b
+ languageName: node
+ linkType: hard
+
+"core-js-compat@npm:^3.36.1, core-js-compat@npm:^3.37.1":
+ version: 3.37.1
+ resolution: "core-js-compat@npm:3.37.1"
dependencies:
- browserslist: ^4.24.2
- checksum: 2d7d087c3271d711d03a55203d4756f6288317a1ce35cdc8bafaf1833ef21fd67a92a50cff8dcf7df1325ac63720906ab3cf514c85b238c95f65fca1040f6ad6
+ browserslist: ^4.23.0
+ checksum: 5e7430329358bced08c30950512d2081aea0a5652b4c5892cbb3c4a6db05b0d3893a191a955162a07fdb5f4fe74e61b6429fdb503f54e062336d76e43c9555d9
languageName: node
linkType: hard
@@ -6491,9 +6591,9 @@ __metadata:
linkType: hard
"core-js@npm:^3.24.1":
- version: 3.39.0
- resolution: "core-js@npm:3.39.0"
- checksum: 7a3670e9a2a89e0a049daa288d742d09f6e16d27a8945c5e2ef6fc45dc57e5c4bc5db589da05947486f54ae978d14cf27bd3fb1db0b9907000a611e8af37355b
+ version: 3.37.1
+ resolution: "core-js@npm:3.37.1"
+ checksum: 2d58a5c599f05c3e04abc8bc5e64b88eb17d914c0f552f670fb800afa74ec54b4fcc7f231ad6bd45badaf62c0fb0ce30e6fe89cedb6bb6d54e6f19115c3c17ff
languageName: node
linkType: hard
@@ -6567,13 +6667,13 @@ __metadata:
linkType: hard
"cross-spawn@npm:^7.0.0, cross-spawn@npm:^7.0.2, cross-spawn@npm:^7.0.3":
- version: 7.0.5
- resolution: "cross-spawn@npm:7.0.5"
+ version: 7.0.3
+ resolution: "cross-spawn@npm:7.0.3"
dependencies:
path-key: ^3.1.0
shebang-command: ^2.0.0
which: ^2.0.1
- checksum: 55c50004cb6bbea3649784caac6e7b8ddd03fa8c1e14dbd5a1f15896708378006eb7526a52a0f48770c768c9b8aed48a5888eb8e785ff59ff7749e74f66cd96b
+ checksum: 671cc7c7288c3a8406f3c69a3ae2fc85555c04169e9d611def9a675635472614f1c0ed0ef80955d5b6d4e724f6ced67f0ad1bb006c2ea643488fcfef994d7f52
languageName: node
linkType: hard
@@ -6585,9 +6685,9 @@ __metadata:
linkType: hard
"css-functions-list@npm:^3.2.1":
- version: 3.2.3
- resolution: "css-functions-list@npm:3.2.3"
- checksum: 25f12fb0ef1384b1cf45a6e7e0afd596a19bee90b90316d9e50f7820888f4a8f265be7a6a96b10a5c81e403bd7a5ff8010fa936144f84959d9d91c9350cda0d4
+ version: 3.2.2
+ resolution: "css-functions-list@npm:3.2.2"
+ checksum: b8a564118b93b87b63236a57132a3ef581416896a70c1d0df73360a9ec43dc582f7c2a586b578feb8476179518e557c6657570a8b6185b16300c7232a84d43e3
languageName: node
linkType: hard
@@ -6870,7 +6970,7 @@ __metadata:
languageName: node
linkType: hard
-"debug@npm:2.6.9, debug@npm:^2.1.1, debug@npm:^2.1.3, debug@npm:^2.2.0, debug@npm:^2.6.8":
+"debug@npm:2.6.9, debug@npm:^2.1.1, debug@npm:^2.1.3, debug@npm:^2.2.0, debug@npm:^2.3.3, debug@npm:^2.6.8":
version: 2.6.9
resolution: "debug@npm:2.6.9"
dependencies:
@@ -6889,14 +6989,14 @@ __metadata:
linkType: hard
"debug@npm:^4.0.0, debug@npm:^4.1.0, debug@npm:^4.1.1, debug@npm:^4.2.0, debug@npm:^4.3.1, debug@npm:^4.3.2, debug@npm:^4.3.3, debug@npm:^4.3.4, debug@npm:~4.3.1, debug@npm:~4.3.2, debug@npm:~4.3.4":
- version: 4.3.7
- resolution: "debug@npm:4.3.7"
+ version: 4.3.5
+ resolution: "debug@npm:4.3.5"
dependencies:
- ms: ^2.1.3
+ ms: 2.1.2
peerDependenciesMeta:
supports-color:
optional: true
- checksum: 822d74e209cd910ef0802d261b150314bbcf36c582ccdbb3e70f0894823c17e49a50d3e66d96b633524263975ca16b6a833f3e3b7e030c157169a5fabac63160
+ checksum: 7c002b51e256257f936dda09eb37167df952758c57badf6bf44bdc40b89a4bcb8e5a0a2e4c7b53f97c69e2970dd5272d33a757378a12c8f8e64ea7bf99e8e86e
languageName: node
linkType: hard
@@ -6933,6 +7033,13 @@ __metadata:
languageName: node
linkType: hard
+"decode-uri-component@npm:^0.2.0":
+ version: 0.2.2
+ resolution: "decode-uri-component@npm:0.2.2"
+ checksum: 95476a7d28f267292ce745eac3524a9079058bbb35767b76e3ee87d42e34cd0275d2eb19d9d08c3e167f97556e8a2872747f5e65cbebcac8b0c98d83e285f139
+ languageName: node
+ linkType: hard
+
"decorator-transforms@npm:^1.0.1, decorator-transforms@npm:^1.1.0":
version: 1.2.1
resolution: "decorator-transforms@npm:1.2.1"
@@ -6944,12 +7051,12 @@ __metadata:
linkType: hard
"decorator-transforms@npm:^2.0.0":
- version: 2.3.0
- resolution: "decorator-transforms@npm:2.3.0"
+ version: 2.0.0
+ resolution: "decorator-transforms@npm:2.0.0"
dependencies:
"@babel/plugin-syntax-decorators": ^7.23.3
babel-import-util: ^3.0.0
- checksum: 00f9755366cede42cdb41e177e134492d24edbda624aadac28057c10480e7b2c4b3b6a8b362c8aac7e595f64b01c2e2264f8ca779f1d12677391e86fee3f51c6
+ checksum: 1736a83181be2484e7eb5f1e7b60543712b6cbf25711dfc55e4a948ea4d10a7be8aef3d8011fb3f733ae61e983446ffa7ae88b02ae445c113406527097c70e1a
languageName: node
linkType: hard
@@ -7012,6 +7119,34 @@ __metadata:
languageName: node
linkType: hard
+"define-property@npm:^0.2.5":
+ version: 0.2.5
+ resolution: "define-property@npm:0.2.5"
+ dependencies:
+ is-descriptor: ^0.1.0
+ checksum: 85af107072b04973b13f9e4128ab74ddfda48ec7ad2e54b193c0ffb57067c4ce5b7786a7b4ae1f24bd03e87c5d18766b094571810b314d7540f86d4354dbd394
+ languageName: node
+ linkType: hard
+
+"define-property@npm:^1.0.0":
+ version: 1.0.0
+ resolution: "define-property@npm:1.0.0"
+ dependencies:
+ is-descriptor: ^1.0.0
+ checksum: 5fbed11dace44dd22914035ba9ae83ad06008532ca814d7936a53a09e897838acdad5b108dd0688cc8d2a7cf0681acbe00ee4136cf36743f680d10517379350a
+ languageName: node
+ linkType: hard
+
+"define-property@npm:^2.0.2":
+ version: 2.0.2
+ resolution: "define-property@npm:2.0.2"
+ dependencies:
+ is-descriptor: ^1.0.2
+ isobject: ^3.0.1
+ checksum: 3217ed53fc9eed06ba8da6f4d33e28c68a82e2f2a8ab4d562c4920d8169a166fe7271453675e6c69301466f36a65d7f47edf0cf7f474b9aa52a5ead9c1b13c99
+ languageName: node
+ linkType: hard
+
"delegates@npm:^1.0.0":
version: 1.0.0
resolution: "delegates@npm:1.0.0"
@@ -7033,6 +7168,13 @@ __metadata:
languageName: node
linkType: hard
+"dequal@npm:^2.0.3":
+ version: 2.0.3
+ resolution: "dequal@npm:2.0.3"
+ checksum: 8679b850e1a3d0ebbc46ee780d5df7b478c23f335887464023a631d1b9af051ad4a6595a44220f9ff8ff95a8ddccf019b5ad778a976fd7bbf77383d36f412f90
+ languageName: node
+ linkType: hard
+
"destroy@npm:1.2.0":
version: 1.2.0
resolution: "destroy@npm:1.2.0"
@@ -7054,15 +7196,6 @@ __metadata:
languageName: node
linkType: hard
-"detect-libc@npm:^1.0.3":
- version: 1.0.3
- resolution: "detect-libc@npm:1.0.3"
- bin:
- detect-libc: ./bin/detect-libc.js
- checksum: daaaed925ffa7889bd91d56e9624e6c8033911bb60f3a50a74a87500680652969dbaab9526d1e200a4c94acf80fc862a22131841145a0a8482d60a99c24f4a3e
- languageName: node
- linkType: hard
-
"detect-newline@npm:3.1.0":
version: 3.1.0
resolution: "detect-newline@npm:3.1.0"
@@ -7100,14 +7233,14 @@ __metadata:
languageName: node
linkType: hard
-"dmd@npm:^6.2.3":
- version: 6.2.3
- resolution: "dmd@npm:6.2.3"
+"dmd@npm:^6.2.1":
+ version: 6.2.2
+ resolution: "dmd@npm:6.2.2"
dependencies:
array-back: ^6.2.2
cache-point: ^2.0.0
common-sequence: ^2.0.2
- file-set: ^4.0.2
+ fast-glob: ^3.3.2
handlebars: ^4.7.8
marked: ^4.3.0
object-get: ^2.1.1
@@ -7116,7 +7249,7 @@ __metadata:
reduce-without: ^1.0.1
test-value: ^3.0.0
walk-back: ^5.1.0
- checksum: e3cdb5731361ab2b20eb1bb4476a50158c90ae35d46868d48ab50e31f60c2510fe57c120eed2fc0225d7863b5d80c18dcf7bc363cfbf05ae4b3bdb83c5c471b0
+ checksum: 4e84d3a0d3fcee163d8705f09d0607a022af177e0655eef41d16e325f9fab809d6d96340b73e7979ce94aed3e6889552cb9e446dbb2b2857c59f3f325c69c9bd
languageName: node
linkType: hard
@@ -7217,9 +7350,9 @@ __metadata:
linkType: hard
"dompurify@npm:^3.0.2":
- version: 3.1.7
- resolution: "dompurify@npm:3.1.7"
- checksum: 0a9b811bbc94f3dba60cf6486962362b0f1a5b4ab789f5e1cbd4749b6ba1a1fad190a677a962dc8850ce28764424765fe425e9d6508e4e93ba648ef15d54bc24
+ version: 3.1.6
+ resolution: "dompurify@npm:3.1.6"
+ checksum: cc4fc4ccd9261fbceb2a1627a985c70af231274a26ddd3f643fd0616a0a44099bd9e4480940ce3655612063be4a1fe9f5e9309967526f8c0a99f931602323866
languageName: node
linkType: hard
@@ -7244,7 +7377,7 @@ __metadata:
languageName: node
linkType: hard
-"domutils@npm:^3.0.1, domutils@npm:^3.1.0":
+"domutils@npm:^3.0.1":
version: 3.1.0
resolution: "domutils@npm:3.1.0"
dependencies:
@@ -7305,26 +7438,26 @@ __metadata:
languageName: node
linkType: hard
-"electron-to-chromium@npm:^1.5.41":
- version: 1.5.55
- resolution: "electron-to-chromium@npm:1.5.55"
- checksum: 7c797418ef30b0021b586d023eec1e87400632add6f37285e993716292f2f3451a36d6266c8502df34c776000bca01ad8e17b736b1f7b3de022a82a45d0dc06f
+"electron-to-chromium@npm:^1.4.820":
+ version: 1.4.832
+ resolution: "electron-to-chromium@npm:1.4.832"
+ checksum: a1f71cf7665441d28cfe5ff31415d7a64036d83226c40322c1412de118091ad5010fd0da831dc04de115d978e91074756b7fbc9e7788e4f98888f0e194b5bdac
languageName: node
linkType: hard
-"ember-a11y-refocus@npm:^4.1.3":
- version: 4.1.4
- resolution: "ember-a11y-refocus@npm:4.1.4"
+"ember-a11y-refocus@npm:^4.1.0":
+ version: 4.1.1
+ resolution: "ember-a11y-refocus@npm:4.1.1"
dependencies:
ember-cli-babel: ^7.26.11
ember-cli-htmlbars: ^6.0.1
- checksum: c1a79f9b7792f3bac674e010b231561da4ca5fc2f3f6a9a6e2263908a6c781546e1baccdb84838f367ac9104e2c76b7f688a57f06f0bd6c194a6f1a71910e422
+ checksum: b9f861f1359e8c720bf844161da3eecbe2218149739211961d216b6fcaec5e78dfd51debe5ec2707ae0d31fdbbd9cf692349e3f0f5b47d1f12bd963c021494ac
languageName: node
linkType: hard
"ember-a11y-testing@npm:^7.0.1":
- version: 7.0.2
- resolution: "ember-a11y-testing@npm:7.0.2"
+ version: 7.0.1
+ resolution: "ember-a11y-testing@npm:7.0.1"
dependencies:
"@ember/test-waiters": ^2.4.3 || ^3.0.0
"@scalvert/ember-setup-middleware-reporter": ^0.1.1
@@ -7344,7 +7477,7 @@ __metadata:
peerDependenciesMeta:
qunit:
optional: true
- checksum: 3d8bd7ced90d69a93ec4fd6b2484da33dbbc67cbf2b41752ea21604a83778d22939de498528d9b8516a5faa833818c9382250e57bc9a4e4a36bdd9bf30101f0a
+ checksum: d546eecd628c34161b435a7fe877a5c5e15b98d2635d4b5215510832d687e41a3bdcfdd0e56ff9dd54f574d0a679431a45b29004482a4f9dbbac1c22f715aba0
languageName: node
linkType: hard
@@ -7374,9 +7507,9 @@ __metadata:
languageName: node
linkType: hard
-"ember-auto-import@npm:^2.2.4, ember-auto-import@npm:^2.4.1, ember-auto-import@npm:^2.6.0, ember-auto-import@npm:^2.6.3, ember-auto-import@npm:^2.7.0, ember-auto-import@npm:^2.7.2":
- version: 2.10.0
- resolution: "ember-auto-import@npm:2.10.0"
+"ember-auto-import@npm:^2.2.4, ember-auto-import@npm:^2.4.1, ember-auto-import@npm:^2.5.0, ember-auto-import@npm:^2.6.0, ember-auto-import@npm:^2.6.1, ember-auto-import@npm:^2.6.3, ember-auto-import@npm:^2.7.0, ember-auto-import@npm:^2.7.2":
+ version: 2.7.4
+ resolution: "ember-auto-import@npm:2.7.4"
dependencies:
"@babel/core": ^7.16.7
"@babel/plugin-proposal-class-properties": ^7.16.7
@@ -7401,43 +7534,41 @@ __metadata:
fs-extra: ^10.0.0
fs-tree-diff: ^2.0.0
handlebars: ^4.3.1
- is-subdir: ^1.2.0
js-string-escape: ^1.0.1
lodash: ^4.17.19
mini-css-extract-plugin: ^2.5.2
minimatch: ^3.0.0
parse5: ^6.0.1
- pkg-entry-points: ^1.1.0
resolve: ^1.20.0
resolve-package-path: ^4.0.3
semver: ^7.3.4
style-loader: ^2.0.0
typescript-memoize: ^1.0.0-alpha.3
walk-sync: ^3.0.0
- checksum: f122f7e42bd9c4cfd39e951d79531d23a5b03278bba762d7638951d017d9fe0ce32d2852284ea5761233cd4a66555b9e3dcadaa8c6370fc00581795f28c20f79
+ checksum: fe214cf2c28328cc573c70ea278a92798b785e5f26aeabe1b9451e782676632dda566d03cdd467796ceb26c66626ac5401deded4819ea4397399c6d07ebdea10
languageName: node
linkType: hard
"ember-basic-dropdown@npm:^8.0.4":
- version: 8.3.0
- resolution: "ember-basic-dropdown@npm:8.3.0"
+ version: 8.1.0
+ resolution: "ember-basic-dropdown@npm:8.1.0"
dependencies:
- "@babel/core": ^7.25.2
- "@embroider/addon-shim": ^1.8.9
- "@embroider/macros": ^1.16.5
- "@embroider/util": ^1.13.2
- decorator-transforms: ^2.0.0
+ "@babel/core": ^7.24.5
+ "@embroider/addon-shim": ^1.8.7
+ "@embroider/macros": ^1.16.1
+ "@embroider/util": ^1.13.1
+ decorator-transforms: ^1.1.0
ember-element-helper: ^0.8.6
ember-lifeline: ^7.0.0
- ember-modifier: ^4.2.0
- ember-style-modifier: ^4.4.0
+ ember-modifier: ^4.1.0
+ ember-style-modifier: ^4.3.1
ember-truth-helpers: ^4.0.3
peerDependencies:
- "@ember/test-helpers": ^2.9.4 || ^3.2.1 || ^4.0.2
+ "@ember/test-helpers": ^2.9.4 || ^3.2.1
"@glimmer/component": ^1.1.2
"@glimmer/tracking": ^1.1.2
ember-source: ^3.28.0 || ^4.0.0 || >=5.0.0
- checksum: 5a90789d16f3c5a7d590a4b8c13df7bc65b5b978bd04ea784ecebf2ab283ba9ee684892c36333f40289542adaa6ce4cb3482866402910818d01984b61f551043
+ checksum: e3c84f18634af3a166ee0a05dfc27913489b2dad4fdea0c077a7b97ce4e9931af8b750994cce8e223f6a4db0580800ee5592b5dc338f42ffa013d924751f8d00
languageName: node
linkType: hard
@@ -7476,7 +7607,7 @@ __metadata:
languageName: node
linkType: hard
-"ember-cli-babel@npm:^7.1.2, ember-cli-babel@npm:^7.1.3, ember-cli-babel@npm:^7.10.0, ember-cli-babel@npm:^7.13.0, ember-cli-babel@npm:^7.18.0, ember-cli-babel@npm:^7.22.1, ember-cli-babel@npm:^7.23.0, ember-cli-babel@npm:^7.26.11, ember-cli-babel@npm:^7.26.3, ember-cli-babel@npm:^7.26.4, ember-cli-babel@npm:^7.26.5, ember-cli-babel@npm:^7.26.6, ember-cli-babel@npm:^7.26.8, ember-cli-babel@npm:^7.5.0, ember-cli-babel@npm:^7.7.3":
+"ember-cli-babel@npm:^7.1.2, ember-cli-babel@npm:^7.1.3, ember-cli-babel@npm:^7.10.0, ember-cli-babel@npm:^7.13.0, ember-cli-babel@npm:^7.18.0, ember-cli-babel@npm:^7.20.0, ember-cli-babel@npm:^7.22.1, ember-cli-babel@npm:^7.23.0, ember-cli-babel@npm:^7.26.11, ember-cli-babel@npm:^7.26.3, ember-cli-babel@npm:^7.26.4, ember-cli-babel@npm:^7.26.5, ember-cli-babel@npm:^7.26.6, ember-cli-babel@npm:^7.26.8, ember-cli-babel@npm:^7.5.0, ember-cli-babel@npm:^7.7.3":
version: 7.26.11
resolution: "ember-cli-babel@npm:7.26.11"
dependencies:
@@ -7694,8 +7825,8 @@ __metadata:
linkType: hard
"ember-cli-mirage@npm:^3.0.3":
- version: 3.0.4
- resolution: "ember-cli-mirage@npm:3.0.4"
+ version: 3.0.3
+ resolution: "ember-cli-mirage@npm:3.0.3"
dependencies:
"@babel/core": ^7.22.20
"@embroider/macros": ^1.13.2
@@ -7705,7 +7836,7 @@ __metadata:
ember-auto-import: ^2.6.3
ember-cli-babel: ^8.0.0
ember-get-config: 0.2.4 - 0.5.0 || ^1.0.0 || ^2.1.1
- ember-inflector: ^2.0.0 || ^3.0.0 || ^4.0.2 || ^5.0.0
+ ember-inflector: ^2.0.0 || ^3.0.0 || ^4.0.2
peerDependencies:
"@ember-data/model": "*"
"@ember/test-helpers": "*"
@@ -7722,7 +7853,7 @@ __metadata:
optional: true
ember-qunit:
optional: true
- checksum: 09abad623b3cc9f6104d7ef22948080ab3f75ae58f2b0d6d5f16d0e9d2963f5a11f2186a3d7497b69a0b8d729427006dfcec98a2ac2d3010b19467609ed306f0
+ checksum: 2d198315def0dd3634a9a11f0ba2bbe4a17a702384e8dd3fca2783670d9bfa524c5cb4e779b00c36102b0f2b5b28ff4be3ce7f3ee4956d6dd5c3bf021c009eac
languageName: node
linkType: hard
@@ -7736,8 +7867,8 @@ __metadata:
linkType: hard
"ember-cli-page-object@npm:^2.3.0":
- version: 2.3.1
- resolution: "ember-cli-page-object@npm:2.3.1"
+ version: 2.3.0
+ resolution: "ember-cli-page-object@npm:2.3.0"
dependencies:
"@embroider/addon-shim": ^1.8.0
"@ro0gr/ceibo": ^2.2.0
@@ -7749,7 +7880,7 @@ __metadata:
peerDependenciesMeta:
"@ember/jquery":
optional: true
- checksum: d688fb4dd8924345f91e0a34d9adc00adfe7932fdfd0e870eac782f8b5e183024f0795b2133d96433f93c49b752a32d41573d79ff5436fced60b496f88d714e8
+ checksum: 665fe48e5398fd30b003c808fd764fd69753cdebc1ea21810e43633c8143e624f0a73888b39630949d14d303a6ddd75212a16302210cf712048fd0318130894a
languageName: node
linkType: hard
@@ -7976,9 +8107,9 @@ __metadata:
languageName: node
linkType: hard
-"ember-cli@npm:~5.8.0":
- version: 5.8.1
- resolution: "ember-cli@npm:5.8.1"
+"ember-cli@npm:~5.4.2":
+ version: 5.4.2
+ resolution: "ember-cli@npm:5.4.2"
dependencies:
"@pnpm/find-workspace-dir": ^6.0.2
broccoli: ^3.5.2
@@ -8002,7 +8133,6 @@ __metadata:
compression: ^1.7.4
configstore: ^5.0.1
console-ui: ^3.1.2
- content-tag: ^1.2.2
core-object: ^3.1.5
dag-map: ^2.0.2
diff: ^5.1.0
@@ -8066,7 +8196,7 @@ __metadata:
yam: ^1.0.0
bin:
ember: bin/ember
- checksum: cba88bd2ae0498500ccec2ac6245118388ef8e9245e2e5082265d4b5ef0e16f77f26aa1601cd58674e9efc460d1093bfe7b6da45b19bf445e0f4986a54e65793
+ checksum: 69456799fec719de441cd67832c904fbbeae055ec1cc017cba970ef1554f9cf4a4a9e83903182affdf5958daec1542d1b1e4aef59f313c02d2743ffa7b6d0cce
languageName: node
linkType: hard
@@ -8114,37 +8244,31 @@ __metadata:
languageName: node
linkType: hard
-"ember-data@npm:~5.3.2":
- version: 5.3.9
- resolution: "ember-data@npm:5.3.9"
+"ember-data@npm:~4.12.4":
+ version: 4.12.8
+ resolution: "ember-data@npm:4.12.8"
dependencies:
- "@ember-data/adapter": 5.3.9
- "@ember-data/debug": 5.3.9
- "@ember-data/graph": 5.3.9
- "@ember-data/json-api": 5.3.9
- "@ember-data/legacy-compat": 5.3.9
- "@ember-data/model": 5.3.9
- "@ember-data/request": 5.3.9
- "@ember-data/request-utils": 5.3.9
- "@ember-data/serializer": 5.3.9
- "@ember-data/store": 5.3.9
- "@ember-data/tracking": 5.3.9
+ "@ember-data/adapter": 4.12.8
+ "@ember-data/debug": 4.12.8
+ "@ember-data/graph": 4.12.8
+ "@ember-data/json-api": 4.12.8
+ "@ember-data/legacy-compat": 4.12.8
+ "@ember-data/model": 4.12.8
+ "@ember-data/private-build-infra": 4.12.8
+ "@ember-data/request": 4.12.8
+ "@ember-data/serializer": 4.12.8
+ "@ember-data/store": 4.12.8
+ "@ember-data/tracking": 4.12.8
"@ember/edition-utils": ^1.2.0
- "@embroider/macros": ^1.16.6
- "@warp-drive/build-config": 0.0.0-beta.7
- "@warp-drive/core-types": 0.0.0-beta.12
+ "@embroider/macros": ^1.10.0
+ "@glimmer/env": ^0.1.7
+ broccoli-merge-trees: ^4.2.0
+ ember-auto-import: ^2.6.1
+ ember-cli-babel: ^7.26.11
+ ember-inflector: ^4.0.2
peerDependencies:
- "@ember/test-helpers": ^3.3.0 || ^4.0.4
- "@ember/test-waiters": ^3.1.0
- qunit: ^2.18.0
- peerDependenciesMeta:
- "@ember/test-helpers":
- optional: true
- "@ember/test-waiters":
- optional: true
- qunit:
- optional: true
- checksum: 420c3d928c2bf7cedf54d0fcd7a892a5c16689d2dfaf282f1aa11abbe74b0126409d377213d71f879905210107aed5c4620da7bfcb7f80f7512371e62f90f9b9
+ "@ember/string": ^3.0.1
+ checksum: 19af592deb9a10c131a9ddbd2514fe665854f8fe1cabe9f70a73220c800de1fc660c672fdb8b349c4e6dc21165d9450fc31a8ab1615bfceb517e4b8d832b4fd2
languageName: node
linkType: hard
@@ -8159,7 +8283,7 @@ __metadata:
languageName: node
linkType: hard
-"ember-destroyable-polyfill@npm:^2.0.1":
+"ember-destroyable-polyfill@npm:^2.0.1, ember-destroyable-polyfill@npm:^2.0.3":
version: 2.0.3
resolution: "ember-destroyable-polyfill@npm:2.0.3"
dependencies:
@@ -8290,13 +8414,31 @@ __metadata:
languageName: node
linkType: hard
-"ember-inflector@npm:^2.0.0 || ^3.0.0 || ^4.0.2 || ^5.0.0":
- version: 5.0.2
- resolution: "ember-inflector@npm:5.0.2"
+"ember-inflector@npm:^2.0.0 || ^3.0.0 || ^4.0.2, ember-inflector@npm:^4.0.2":
+ version: 4.0.3
+ resolution: "ember-inflector@npm:4.0.3"
dependencies:
- "@embroider/addon-shim": ^1.8.7
- decorator-transforms: ^2.0.0
- checksum: 7bdc8a30559a514df0d423aa9155bee9a6be308d40a2644202a66d2aef916fc40ada329e3cbd2357e0aff719e87a650a0731dc6db5275852707faa625a0e0d90
+ ember-cli-babel: ^7.26.11
+ peerDependencies:
+ ember-source: ^3.16.0 || ^4.0.0 || ^5.0.0
+ checksum: b4dbd31e6f1141082cee70236aa5575dd32f9d1562599911fa920d34de458781df4f7a3aee108fd86062ecaa1337064345aee1e55271a3c1e1975947057ce037
+ languageName: node
+ linkType: hard
+
+"ember-keyboard@npm:^8.2.1":
+ version: 8.2.1
+ resolution: "ember-keyboard@npm:8.2.1"
+ dependencies:
+ "@embroider/addon-shim": ^1.8.4
+ ember-destroyable-polyfill: ^2.0.3
+ ember-modifier: ^2.1.2 || ^3.1.0 || ^4.0.0
+ ember-modifier-manager-polyfill: ^1.2.0
+ peerDependencies:
+ "@ember/test-helpers": ^2.6.0 || ^3.0.0
+ peerDependenciesMeta:
+ "@ember/test-helpers":
+ optional: true
+ checksum: cfb4120aaf3b1ff3aba8ba1619b0597c6738abde31d1e0719d8bbf69512fb9967fc76bc85557351ec42856bb9b6c47354634f22f0accab0e15743d9f0e3f2be4
languageName: node
linkType: hard
@@ -8355,20 +8497,7 @@ __metadata:
languageName: node
linkType: hard
-"ember-modifier@npm:^3.2.0, ember-modifier@npm:^3.2.7":
- version: 3.2.7
- resolution: "ember-modifier@npm:3.2.7"
- dependencies:
- ember-cli-babel: ^7.26.6
- ember-cli-normalize-entity-name: ^1.0.0
- ember-cli-string-utils: ^1.1.0
- ember-cli-typescript: ^5.0.0
- ember-compatibility-helpers: ^1.2.5
- checksum: 2e96d9ec3939de178a9ce704d5a9360fd73f0276ffa4a9cce9f100a4087fdc8fae4a8b28bf1be22b05a4d1c61e1bb1ab93ca60576810c6556bc4d9323864c66a
- languageName: node
- linkType: hard
-
-"ember-modifier@npm:^3.2.7 || ^4.0.0, ember-modifier@npm:^4.1.0, ember-modifier@npm:^4.2.0":
+"ember-modifier@npm:^2.1.2 || ^3.1.0 || ^4.0.0, ember-modifier@npm:^3.2.7 || ^4.0.0, ember-modifier@npm:^4.1.0":
version: 4.2.0
resolution: "ember-modifier@npm:4.2.0"
dependencies:
@@ -8385,6 +8514,19 @@ __metadata:
languageName: node
linkType: hard
+"ember-modifier@npm:^3.2.0, ember-modifier@npm:^3.2.7":
+ version: 3.2.7
+ resolution: "ember-modifier@npm:3.2.7"
+ dependencies:
+ ember-cli-babel: ^7.26.6
+ ember-cli-normalize-entity-name: ^1.0.0
+ ember-cli-string-utils: ^1.1.0
+ ember-cli-typescript: ^5.0.0
+ ember-compatibility-helpers: ^1.2.5
+ checksum: 2e96d9ec3939de178a9ce704d5a9360fd73f0276ffa4a9cce9f100a4087fdc8fae4a8b28bf1be22b05a4d1c61e1bb1ab93ca60576810c6556bc4d9323864c66a
+ languageName: node
+ linkType: hard
+
"ember-page-title@npm:^8.0.0":
version: 8.2.3
resolution: "ember-page-title@npm:8.2.3"
@@ -8398,24 +8540,24 @@ __metadata:
linkType: hard
"ember-power-select@npm:^8.1.0, ember-power-select@npm:^8.2.0":
- version: 8.3.1
- resolution: "ember-power-select@npm:8.3.1"
+ version: 8.2.0
+ resolution: "ember-power-select@npm:8.2.0"
dependencies:
- "@embroider/addon-shim": ^1.8.9
- "@embroider/util": ^1.13.2
- decorator-transforms: ^2.0.0
+ "@embroider/addon-shim": ^1.8.7
+ "@embroider/util": ^1.13.1
+ decorator-transforms: ^1.1.0
ember-assign-helper: ^0.5.0
ember-lifeline: ^7.0.0
- ember-modifier: ^4.2.0
+ ember-modifier: ^4.1.0
ember-truth-helpers: ^4.0.3
peerDependencies:
- "@ember/test-helpers": ^2.9.4 || ^3.2.1 || ^4.0.2
+ "@ember/test-helpers": ^2.9.4 || ^3.2.1
"@glimmer/component": ^1.1.2
"@glimmer/tracking": ^1.1.2
- ember-basic-dropdown: ^8.2.0
+ ember-basic-dropdown: ^8.1.0
ember-concurrency: ^4.0.2
ember-source: ^3.28.0 || ^4.0.0 || >=5.0.0
- checksum: 2b21a97f03443235bcf8e45e11d81c3718cc5c1905cdf323af1806603454a18b6c27896e4518f22251a04cec7d60cf4f1c31114521f8becaba6e1208067b9ee5
+ checksum: 9251606e71a0c789ceb106a661e820c10a2ead2a7936c111d93b5474f4ffaffaeb842e3a61c78f51cf7b36bfccc2336e4301c802dc24671aa3981726d153d25a
languageName: node
linkType: hard
@@ -8429,8 +8571,8 @@ __metadata:
linkType: hard
"ember-qunit@npm:^8.0.1":
- version: 8.1.1
- resolution: "ember-qunit@npm:8.1.1"
+ version: 8.1.0
+ resolution: "ember-qunit@npm:8.1.0"
dependencies:
"@embroider/addon-shim": ^1.8.6
"@embroider/macros": ^1.13.1
@@ -8440,7 +8582,7 @@ __metadata:
"@ember/test-helpers": ">=3.0.3"
ember-source: ">=4.0.0"
qunit: ^2.13.0
- checksum: 9cacb25e1d26d04b469b2d5eddacb2970ecd038e74c517f0a0d02f6b6b4f386624eb3a44b27c414ac62107866ebae7bcd549de53e7b0bc91148fe80cf3bc2aa2
+ checksum: ec388ff38f4b299066093b14271ed7c1ff7236140b59a73d99cfc88a0a8638b69b413e38260a98a529ab51ac14469c628d43d70a4168ea7bc37f333abb4965fc
languageName: node
linkType: hard
@@ -8521,6 +8663,15 @@ __metadata:
languageName: node
linkType: hard
+"ember-router-helpers@npm:^0.4.0":
+ version: 0.4.0
+ resolution: "ember-router-helpers@npm:0.4.0"
+ dependencies:
+ ember-cli-babel: ^7.20.0
+ checksum: e847ceb1061f87416d6bb5d72ef539fda738a24086051bc94d740117c6353b3406c65247ac8190b8572df008a70d9f801e224d38489170129c5ca6c4ec7f206e
+ languageName: node
+ linkType: hard
+
"ember-service-worker@meirish/ember-service-worker#configurable-scope":
version: 9.0.1
resolution: "ember-service-worker@https://github.com/meirish/ember-service-worker.git#commit=dda14187aace0d73ecdb6a55beac2194a3aec01b"
@@ -8555,33 +8706,32 @@ __metadata:
languageName: node
linkType: hard
-"ember-source@npm:~5.8.0":
- version: 5.8.0
- resolution: "ember-source@npm:5.8.0"
+"ember-source@npm:~5.4.0":
+ version: 5.4.1
+ resolution: "ember-source@npm:5.4.1"
dependencies:
"@babel/helper-module-imports": ^7.16.7
+ "@babel/plugin-transform-block-scoping": ^7.22.5
"@ember/edition-utils": ^1.2.0
- "@glimmer/compiler": 0.87.1
+ "@glimmer/compiler": 0.84.3
"@glimmer/component": ^1.1.2
- "@glimmer/destroyable": 0.87.1
+ "@glimmer/destroyable": 0.84.3
"@glimmer/env": ^0.1.7
- "@glimmer/global-context": 0.87.1
- "@glimmer/interfaces": 0.87.1
- "@glimmer/manager": 0.87.1
- "@glimmer/node": 0.87.1
- "@glimmer/opcode-compiler": 0.87.1
- "@glimmer/owner": 0.87.1
- "@glimmer/program": 0.87.1
- "@glimmer/reference": 0.87.1
- "@glimmer/runtime": 0.87.1
- "@glimmer/syntax": 0.87.1
- "@glimmer/util": 0.87.1
- "@glimmer/validator": 0.87.1
- "@glimmer/vm": 0.87.1
- "@glimmer/vm-babel-plugins": 0.87.1
+ "@glimmer/global-context": 0.84.3
+ "@glimmer/interfaces": 0.84.3
+ "@glimmer/manager": 0.84.3
+ "@glimmer/node": 0.84.3
+ "@glimmer/opcode-compiler": 0.84.3
+ "@glimmer/owner": 0.84.3
+ "@glimmer/program": 0.84.3
+ "@glimmer/reference": 0.84.3
+ "@glimmer/runtime": 0.84.3
+ "@glimmer/syntax": 0.84.3
+ "@glimmer/util": 0.84.3
+ "@glimmer/validator": 0.84.3
+ "@glimmer/vm-babel-plugins": 0.84.3
"@simple-dom/interface": ^1.4.0
babel-plugin-debug-macros: ^0.3.4
- babel-plugin-ember-template-compilation: ^2.1.1
babel-plugin-filter-imports: ^4.0.0
backburner.js: ^2.8.0
broccoli-concat: ^4.2.5
@@ -8601,14 +8751,14 @@ __metadata:
ember-cli-version-checker: ^5.1.2
ember-router-generator: ^2.0.0
inflection: ^2.0.1
+ resolve: ^1.22.2
route-recognizer: ^0.3.4
router_js: ^8.0.3
semver: ^7.5.2
silent-error: ^1.1.1
- simple-html-tokenizer: ^0.5.11
peerDependencies:
"@glimmer/component": ^1.1.2
- checksum: c7108d512543cedae8929dfba06ee8f8eaeffeaa4553971174f52a2e9e7c8d50c1c5bd27ed3639987f6e6f96fcea045c02d5ca82ba29c7762c9aa293f71e525f
+ checksum: 2be39c4006aac1f81c056a081e8cf40db13dc79b0d23a2f1de4a247277dfd43a8cc287b7d4dac9f58870a677cef92b765f1a917a607a31a8c6ca228795f207ef
languageName: node
linkType: hard
@@ -8625,7 +8775,20 @@ __metadata:
languageName: node
linkType: hard
-"ember-style-modifier@npm:^4.1.0, ember-style-modifier@npm:^4.4.0":
+"ember-style-modifier@npm:^3.0.1":
+ version: 3.1.1
+ resolution: "ember-style-modifier@npm:3.1.1"
+ dependencies:
+ ember-auto-import: ^2.5.0
+ ember-cli-babel: ^7.26.11
+ ember-modifier: ^3.2.7 || ^4.0.0
+ peerDependencies:
+ "@ember/string": ^3.0.1
+ checksum: 53984539a55b34b47f041f48979da096d922406f70e6940adfb6548e0e5479d5e90690948c28c2b5bf0fee2e2e76f8a0b01c624bbb749ea656f48703cfeeec47
+ languageName: node
+ linkType: hard
+
+"ember-style-modifier@npm:^4.1.0, ember-style-modifier@npm:^4.3.1":
version: 4.4.0
resolution: "ember-style-modifier@npm:4.4.0"
dependencies:
@@ -8640,9 +8803,9 @@ __metadata:
languageName: node
linkType: hard
-"ember-svg-jar@npm:2.6.0":
- version: 2.6.0
- resolution: "ember-svg-jar@npm:2.6.0"
+"ember-svg-jar@npm:2.4.4":
+ version: 2.4.4
+ resolution: "ember-svg-jar@npm:2.4.4"
dependencies:
"@embroider/macros": ^1.12.2
broccoli-caching-writer: ^3.0.3
@@ -8653,13 +8816,13 @@ __metadata:
broccoli-plugin: ^4.0.7
broccoli-string-replace: ^0.1.2
broccoli-svg-optimizer: ^2.1.0
- cheerio: ^1.0.0
+ cheerio: ^1.0.0-rc.12
console-ui: ^3.1.1
ember-cli-babel: ^7.26.6
ember-cli-htmlbars: ^5.7.1
lodash: ^4.17.15
safe-stable-stringify: ^2.2.0
- checksum: 2848b112898d37f72593ef4d1319d2a4f883f7a6fc9c4115d13669fb2ff5b42d9b954339ab666cb6bf8f71de71ab5f59df1789ebbb53046e0a5705df744ad3fb
+ checksum: e50efb0d503cebd5c85b620fd73866725e1a6804a119318835c07a24b60f9d7684fc86e853c693beb72b1386ea9f1124f2e49c4ae5de9e7505c2d06180c4e980
languageName: node
linkType: hard
@@ -8722,8 +8885,8 @@ __metadata:
linkType: hard
"ember-template-recast@npm:^6.1.4":
- version: 6.1.5
- resolution: "ember-template-recast@npm:6.1.5"
+ version: 6.1.4
+ resolution: "ember-template-recast@npm:6.1.4"
dependencies:
"@glimmer/reference": ^0.84.3
"@glimmer/syntax": ^0.84.3
@@ -8738,7 +8901,7 @@ __metadata:
workerpool: ^6.4.0
bin:
ember-template-recast: lib/bin.js
- checksum: 4473f1edf2b849706478d8807f414937eb1d04f5f37f8584ea93e6225211bc8c9ddf8df6ce1d58cd3d56e5b5ae8754ccc6804893dd5df310f8a033e1a67d43fd
+ checksum: a492e19c99080e0808fb7b4e3e3e9af47906a4a0b628c1c317414725e82b0c984fe327e1b7265718dc06e3f57b759bf432ef5b7a857cd68b571a7ed1d73d0225
languageName: node
linkType: hard
@@ -8851,23 +9014,6 @@ __metadata:
languageName: node
linkType: hard
-"encodeurl@npm:~2.0.0":
- version: 2.0.0
- resolution: "encodeurl@npm:2.0.0"
- checksum: abf5cd51b78082cf8af7be6785813c33b6df2068ce5191a40ca8b1afe6a86f9230af9a9ce694a5ce4665955e5c1120871826df9c128a642e09c58d592e2807fe
- languageName: node
- linkType: hard
-
-"encoding-sniffer@npm:^0.2.0":
- version: 0.2.0
- resolution: "encoding-sniffer@npm:0.2.0"
- dependencies:
- iconv-lite: ^0.6.3
- whatwg-encoding: ^3.1.1
- checksum: 05ad76b674066e62abc80427eb9e89ecf5ed50f4d20c392f7465992d309215687e3ae1ae8b5d5694fb258f4517c759694c3b413d6c724e1024e1cf98750390eb
- languageName: node
- linkType: hard
-
"encoding@npm:^0.1.13":
version: 0.1.13
resolution: "encoding@npm:0.1.13"
@@ -8893,31 +9039,31 @@ __metadata:
languageName: node
linkType: hard
-"engine.io@npm:~6.6.0":
- version: 6.6.2
- resolution: "engine.io@npm:6.6.2"
+"engine.io@npm:~6.5.2":
+ version: 6.5.5
+ resolution: "engine.io@npm:6.5.5"
dependencies:
"@types/cookie": ^0.4.1
"@types/cors": ^2.8.12
"@types/node": ">=10.0.0"
accepts: ~1.3.4
base64id: 2.0.0
- cookie: ~0.7.2
+ cookie: ~0.4.1
cors: ~2.8.5
debug: ~4.3.1
engine.io-parser: ~5.2.1
ws: ~8.17.1
- checksum: c474feff30fe8c816cccf1642b2f4980cacbff51afcda53c522cbeec4d0ed4047dfbcbeaff694bd88a5de51b3df832fbfb58293bbbf8ddba85459cb45be5f9da
+ checksum: 358d337dd007b81cd6d7f39d0161ec8ec3a86097f0fbb0e10240eace51f836741f93c3e6bd69322b9ce0ad0fd89253a41e09335b6eb412d13e5357a054a90c4a
languageName: node
linkType: hard
-"enhanced-resolve@npm:^5.17.1":
- version: 5.17.1
- resolution: "enhanced-resolve@npm:5.17.1"
+"enhanced-resolve@npm:^5.15.0":
+ version: 5.17.0
+ resolution: "enhanced-resolve@npm:5.17.0"
dependencies:
graceful-fs: ^4.2.4
tapable: ^2.2.0
- checksum: 4bc38cf1cea96456f97503db7280394177d1bc46f8f87c267297d04f795ac5efa81e48115a2f5b6273c781027b5b6bfc5f62b54df629e4d25fa7001a86624f59
+ checksum: 1066000454da6a7aeabdbe1f433d912d1e39e6892142a78a37b6577aab27e0436091fa1399d857ad87085b1c3b73a0f811c8874da3dbdc40fbd5ebe89a5568e6
languageName: node
linkType: hard
@@ -8952,7 +9098,7 @@ __metadata:
languageName: node
linkType: hard
-"entities@npm:^4.2.0, entities@npm:^4.4.0, entities@npm:^4.5.0":
+"entities@npm:^4.2.0, entities@npm:^4.4.0":
version: 4.5.0
resolution: "entities@npm:4.5.0"
checksum: 853f8ebd5b425d350bffa97dd6958143179a5938352ccae092c62d1267c4e392a039be1bae7d51b6e4ffad25f51f9617531fedf5237f15df302ccfb452cbf2d7
@@ -9139,10 +9285,10 @@ __metadata:
languageName: node
linkType: hard
-"escalade@npm:^3.1.1, escalade@npm:^3.2.0":
- version: 3.2.0
- resolution: "escalade@npm:3.2.0"
- checksum: 47b029c83de01b0d17ad99ed766347b974b0d628e848de404018f3abee728e987da0d2d370ad4574aa3d5b5bfc368754fd085d69a30f8e75903486ec4b5b709e
+"escalade@npm:^3.1.1, escalade@npm:^3.1.2":
+ version: 3.1.2
+ resolution: "escalade@npm:3.1.2"
+ checksum: 1ec0977aa2772075493002bdbd549d595ff6e9393b1cb0d7d6fcaf78c750da0c158f180938365486f75cb69fba20294351caddfce1b46552a7b6c3cde52eaa02
languageName: node
linkType: hard
@@ -9298,12 +9444,12 @@ __metadata:
linkType: hard
"eslint-plugin-qunit@npm:^8.0.1":
- version: 8.1.2
- resolution: "eslint-plugin-qunit@npm:8.1.2"
+ version: 8.1.1
+ resolution: "eslint-plugin-qunit@npm:8.1.1"
dependencies:
eslint-utils: ^3.0.0
requireindex: ^1.2.0
- checksum: a908a585e56c1de5e4de10e3d8386574a22102a550d63bbb97ddbad4599fdacf02a00038cb303f81687db83a7632b50204f50320bb7a0fb8e2bf178b00a1e92f
+ checksum: ba71971ebdcc15c1ce7a164d265fe79ec006b9598368006575e6bf14cd1b5d2630d8c4f86cc1dce2e14cfa3faae19c54e96df51fd3086d0bdd1088020ffc7d87
languageName: node
linkType: hard
@@ -9358,14 +9504,14 @@ __metadata:
linkType: hard
"eslint@npm:^8.21.0, eslint@npm:^8.52.0, eslint@npm:^8.7.0":
- version: 8.57.1
- resolution: "eslint@npm:8.57.1"
+ version: 8.57.0
+ resolution: "eslint@npm:8.57.0"
dependencies:
"@eslint-community/eslint-utils": ^4.2.0
"@eslint-community/regexpp": ^4.6.1
"@eslint/eslintrc": ^2.1.4
- "@eslint/js": 8.57.1
- "@humanwhocodes/config-array": ^0.13.0
+ "@eslint/js": 8.57.0
+ "@humanwhocodes/config-array": ^0.11.14
"@humanwhocodes/module-importer": ^1.0.1
"@nodelib/fs.walk": ^1.2.8
"@ungap/structured-clone": ^1.2.0
@@ -9401,7 +9547,7 @@ __metadata:
text-table: ^0.2.0
bin:
eslint: bin/eslint.js
- checksum: e2489bb7f86dd2011967759a09164e65744ef7688c310bc990612fc26953f34cc391872807486b15c06833bdff737726a23e9b4cdba5de144c311377dc41d91b
+ checksum: 3a48d7ff85ab420a8447e9810d8087aea5b1df9ef68c9151732b478de698389ee656fd895635b5f2871c89ee5a2652b3f343d11e9db6f8486880374ebc74a2d9
languageName: node
linkType: hard
@@ -9614,6 +9760,21 @@ __metadata:
languageName: node
linkType: hard
+"expand-brackets@npm:^2.1.4":
+ version: 2.1.4
+ resolution: "expand-brackets@npm:2.1.4"
+ dependencies:
+ debug: ^2.3.3
+ define-property: ^0.2.5
+ extend-shallow: ^2.0.1
+ posix-character-classes: ^0.1.0
+ regex-not: ^1.0.0
+ snapdragon: ^0.8.1
+ to-regex: ^3.0.1
+ checksum: 1781d422e7edfa20009e2abda673cadb040a6037f0bd30fcd7357304f4f0c284afd420d7622722ca4a016f39b6d091841ab57b401c1f7e2e5131ac65b9f14fa1
+ languageName: node
+ linkType: hard
+
"expand-tilde@npm:^2.0.0, expand-tilde@npm:^2.0.2":
version: 2.0.2
resolution: "expand-tilde@npm:2.0.2"
@@ -9631,41 +9792,60 @@ __metadata:
linkType: hard
"express@npm:^4.10.7, express@npm:^4.18.1":
- version: 4.21.1
- resolution: "express@npm:4.21.1"
+ version: 4.19.2
+ resolution: "express@npm:4.19.2"
dependencies:
accepts: ~1.3.8
array-flatten: 1.1.1
- body-parser: 1.20.3
+ body-parser: 1.20.2
content-disposition: 0.5.4
content-type: ~1.0.4
- cookie: 0.7.1
+ cookie: 0.6.0
cookie-signature: 1.0.6
debug: 2.6.9
depd: 2.0.0
- encodeurl: ~2.0.0
+ encodeurl: ~1.0.2
escape-html: ~1.0.3
etag: ~1.8.1
- finalhandler: 1.3.1
+ finalhandler: 1.2.0
fresh: 0.5.2
http-errors: 2.0.0
- merge-descriptors: 1.0.3
+ merge-descriptors: 1.0.1
methods: ~1.1.2
on-finished: 2.4.1
parseurl: ~1.3.3
- path-to-regexp: 0.1.10
+ path-to-regexp: 0.1.7
proxy-addr: ~2.0.7
- qs: 6.13.0
+ qs: 6.11.0
range-parser: ~1.2.1
safe-buffer: 5.2.1
- send: 0.19.0
- serve-static: 1.16.2
+ send: 0.18.0
+ serve-static: 1.15.0
setprototypeof: 1.2.0
statuses: 2.0.1
type-is: ~1.6.18
utils-merge: 1.0.1
vary: ~1.1.2
- checksum: 5ac2b26d8aeddda5564fc0907227d29c100f90c0ead2ead9d474dc5108e8fb306c2de2083c4e3ba326e0906466f2b73417dbac16961f4075ff9f03785fd940fe
+ checksum: 212dbd6c2c222a96a61bc927639c95970a53b06257080bb9e2838adb3bffdb966856551fdad1ab5dd654a217c35db94f987d0aa88d48fb04d306340f5f34dca5
+ languageName: node
+ linkType: hard
+
+"extend-shallow@npm:^2.0.1":
+ version: 2.0.1
+ resolution: "extend-shallow@npm:2.0.1"
+ dependencies:
+ is-extendable: ^0.1.0
+ checksum: 8fb58d9d7a511f4baf78d383e637bd7d2e80843bd9cd0853649108ea835208fb614da502a553acc30208e1325240bb7cc4a68473021612496bb89725483656d8
+ languageName: node
+ linkType: hard
+
+"extend-shallow@npm:^3.0.0, extend-shallow@npm:^3.0.2":
+ version: 3.0.2
+ resolution: "extend-shallow@npm:3.0.2"
+ dependencies:
+ assign-symbols: ^1.0.0
+ is-extendable: ^1.0.1
+ checksum: a920b0cd5838a9995ace31dfd11ab5e79bf6e295aa566910ce53dff19f4b1c0fda2ef21f26b28586c7a2450ca2b42d97bd8c0f5cec9351a819222bf861e02461
languageName: node
linkType: hard
@@ -9687,6 +9867,22 @@ __metadata:
languageName: node
linkType: hard
+"extglob@npm:^2.0.4":
+ version: 2.0.4
+ resolution: "extglob@npm:2.0.4"
+ dependencies:
+ array-unique: ^0.3.2
+ define-property: ^1.0.0
+ expand-brackets: ^2.1.4
+ extend-shallow: ^2.0.1
+ fragment-cache: ^0.2.1
+ regex-not: ^1.0.0
+ snapdragon: ^0.8.1
+ to-regex: ^3.0.1
+ checksum: a41531b8934735b684cef5e8c5a01d0f298d7d384500ceca38793a9ce098125aab04ee73e2d75d5b2901bc5dddd2b64e1b5e3bf19139ea48bac52af4a92f1d00
+ languageName: node
+ linkType: hard
+
"extract-stack@npm:^2.0.0":
version: 2.0.0
resolution: "extract-stack@npm:2.0.0"
@@ -9767,9 +9963,9 @@ __metadata:
linkType: hard
"fast-uri@npm:^3.0.1":
- version: 3.0.3
- resolution: "fast-uri@npm:3.0.3"
- checksum: c52e6c86465f5c240e84a4485fb001088cc743d261a4b54b0050ce4758b1648bdbe53da1328ef9620149dca1435e3de64184f226d7c0a3656cb5837b3491e149
+ version: 3.0.1
+ resolution: "fast-uri@npm:3.0.1"
+ checksum: 106143ff83705995225dcc559411288f3337e732bb2e264e79788f1914b6bd8f8bc3683102de60b15ba00e6ebb443633cabac77d4ebc5cb228c47cf955e199ff
languageName: node
linkType: hard
@@ -9852,20 +10048,10 @@ __metadata:
languageName: node
linkType: hard
-"file-set@npm:^4.0.2":
- version: 4.0.2
- resolution: "file-set@npm:4.0.2"
- dependencies:
- array-back: ^5.0.0
- glob: ^7.1.6
- checksum: 6eacb9df4a0a95fbfb09e8ccdf8c3ef7f30de8c0b5043ff0530a79c0c0003550a0725eba517a5c7fe1c452df57c3b4d506e91859ccc248aebb2f038790eb66e6
- languageName: node
- linkType: hard
-
"filesize@npm:^10.0.8":
- version: 10.1.6
- resolution: "filesize@npm:10.1.6"
- checksum: a797a9d41c8f27a9ae334d23f99fc5d903eac5d03c82190dc163901205435b56626fe1260c779ba3e87a2a34d426f19ff264c3f7d956e00f2d3ac69760b52e33
+ version: 10.1.4
+ resolution: "filesize@npm:10.1.4"
+ checksum: b54949fb1a2ecf2407afeb08f943f59a81da382a83ad2b8472ca2a64ba08345ecd489cb44914f44e48dd125c3658f19687d2d4920ae4505e6356f1054c139dcf
languageName: node
linkType: hard
@@ -9900,18 +10086,18 @@ __metadata:
languageName: node
linkType: hard
-"finalhandler@npm:1.3.1":
- version: 1.3.1
- resolution: "finalhandler@npm:1.3.1"
+"finalhandler@npm:1.2.0":
+ version: 1.2.0
+ resolution: "finalhandler@npm:1.2.0"
dependencies:
debug: 2.6.9
- encodeurl: ~2.0.0
+ encodeurl: ~1.0.2
escape-html: ~1.0.3
on-finished: 2.4.1
parseurl: ~1.3.3
statuses: 2.0.1
unpipe: ~1.0.0
- checksum: a8c58cd97c9cd47679a870f6833a7b417043f5a288cd6af6d0f49b476c874a506100303a128b6d3b654c3d74fa4ff2ffed68a48a27e8630cda5c918f2977dcf4
+ checksum: 92effbfd32e22a7dff2994acedbd9bcc3aa646a3e919ea6a53238090e87097f8ef07cced90aa2cc421abdf993aefbdd5b00104d55c7c5479a8d00ed105b45716
languageName: node
linkType: hard
@@ -9926,11 +10112,12 @@ __metadata:
linkType: hard
"find-babel-config@npm:^2.1.1":
- version: 2.1.2
- resolution: "find-babel-config@npm:2.1.2"
+ version: 2.1.1
+ resolution: "find-babel-config@npm:2.1.1"
dependencies:
json5: ^2.2.3
- checksum: 268f29cb38ee086b0f953c89f762dcea30b5b0e14abee2b39516410c00b49baa6821f598bd50346c93584e5625c5740f5c8b7e34993f568787a068f84dacc8c2
+ path-exists: ^4.0.0
+ checksum: 4be54397339520e0cd49870acb10366684ffc001fd0b7bffedd0fe9d3e1d82234692d3cb4e5ba95280a35887238ba6f82dc79569a13a3749ae3931c23e0b3a99
languageName: node
linkType: hard
@@ -9961,18 +10148,6 @@ __metadata:
languageName: node
linkType: hard
-"find-replace@npm:^5.0.1":
- version: 5.0.2
- resolution: "find-replace@npm:5.0.2"
- peerDependencies:
- "@75lb/nature": "*"
- peerDependenciesMeta:
- "@75lb/nature":
- optional: true
- checksum: 964fb76cf084638c4202628c65c03763fd8627b84b18c0948470b429371d18c5a0340167097961222d4decbcc4502880d776c57c1c3ef5f3d0081b8fde0e17ea
- languageName: node
- linkType: hard
-
"find-up@npm:^2.1.0":
version: 2.1.0
resolution: "find-up@npm:2.1.0"
@@ -10173,12 +10348,12 @@ __metadata:
linkType: hard
"follow-redirects@npm:^1.0.0":
- version: 1.15.9
- resolution: "follow-redirects@npm:1.15.9"
+ version: 1.15.6
+ resolution: "follow-redirects@npm:1.15.6"
peerDependenciesMeta:
debug:
optional: true
- checksum: 859e2bacc7a54506f2bf9aacb10d165df78c8c1b0ceb8023f966621b233717dab56e8d08baadc3ad3b9db58af290413d585c999694b7c146aaf2616340c3d2a6
+ checksum: a62c378dfc8c00f60b9c80cab158ba54e99ba0239a5dd7c81245e5a5b39d10f0c35e249c3379eae719ff0285fff88c365dd446fab19dee771f1d76252df1bbf5
languageName: node
linkType: hard
@@ -10191,13 +10366,20 @@ __metadata:
languageName: node
linkType: hard
+"for-in@npm:^1.0.2":
+ version: 1.0.2
+ resolution: "for-in@npm:1.0.2"
+ checksum: 09f4ae93ce785d253ac963d94c7f3432d89398bf25ac7a24ed034ca393bf74380bdeccc40e0f2d721a895e54211b07c8fad7132e8157827f6f7f059b70b4043d
+ languageName: node
+ linkType: hard
+
"foreground-child@npm:^3.1.0":
- version: 3.3.0
- resolution: "foreground-child@npm:3.3.0"
+ version: 3.2.1
+ resolution: "foreground-child@npm:3.2.1"
dependencies:
cross-spawn: ^7.0.0
signal-exit: ^4.0.1
- checksum: 1989698488f725b05b26bc9afc8a08f08ec41807cd7b92ad85d96004ddf8243fd3e79486b8348c64a3011ae5cc2c9f0936af989e1f28339805d8bc178a75b451
+ checksum: 3e2e844d6003c96d70affe8ae98d7eaaba269a868c14d997620c088340a8775cd5d2d9043e6ceebae1928d8d9a874911c4d664b9a267e8995945df20337aebc0
languageName: node
linkType: hard
@@ -10215,6 +10397,15 @@ __metadata:
languageName: node
linkType: hard
+"fragment-cache@npm:^0.2.1":
+ version: 0.2.1
+ resolution: "fragment-cache@npm:0.2.1"
+ dependencies:
+ map-cache: ^0.2.2
+ checksum: 1cbbd0b0116b67d5790175de0038a11df23c1cd2e8dcdbade58ebba5594c2d641dade6b4f126d82a7b4a6ffc2ea12e3d387dbb64ea2ae97cf02847d436f60fdc
+ languageName: node
+ linkType: hard
+
"fresh@npm:0.5.2":
version: 0.5.2
resolution: "fresh@npm:0.5.2"
@@ -10609,11 +10800,18 @@ __metadata:
linkType: hard
"get-tsconfig@npm:^4.7.0":
- version: 4.8.1
- resolution: "get-tsconfig@npm:4.8.1"
+ version: 4.7.6
+ resolution: "get-tsconfig@npm:4.7.6"
dependencies:
resolve-pkg-maps: ^1.0.0
- checksum: 12df01672e691d2ff6db8cf7fed1ddfef90ed94a5f3d822c63c147a26742026d582acd86afcd6f65db67d809625d17dd7f9d34f4d3f38f69bc2f48e19b2bdd5b
+ checksum: ebfd86f0b356cde98e2a7afe63b58d92e02b8e413ff95551933d277702bf725386ee82c5c0092fe45fb2ba60002340c94ee70777b3220bbfeca83ab45dda1544
+ languageName: node
+ linkType: hard
+
+"get-value@npm:^2.0.3, get-value@npm:^2.0.6":
+ version: 2.0.6
+ resolution: "get-value@npm:2.0.6"
+ checksum: 5c3b99cb5398ea8016bf46ff17afc5d1d286874d2ad38ca5edb6e87d75c0965b0094cb9a9dddef2c59c23d250702323539a7fbdd870620db38c7e7d7ec87c1eb
languageName: node
linkType: hard
@@ -10719,7 +10917,7 @@ __metadata:
languageName: node
linkType: hard
-"glob@npm:^9.3.3":
+"glob@npm:^9.3.3, glob@npm:^9.3.4":
version: 9.3.5
resolution: "glob@npm:9.3.5"
dependencies:
@@ -10875,7 +11073,7 @@ __metadata:
languageName: node
linkType: hard
-"graceful-fs@npm:^4.1.2, graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.1.9, graceful-fs@npm:^4.2.0, graceful-fs@npm:^4.2.11, graceful-fs@npm:^4.2.4, graceful-fs@npm:^4.2.6":
+"graceful-fs@npm:^4.1.2, graceful-fs@npm:^4.1.6, graceful-fs@npm:^4.1.9, graceful-fs@npm:^4.2.0, graceful-fs@npm:^4.2.4, graceful-fs@npm:^4.2.6, graceful-fs@npm:^4.2.9":
version: 4.2.11
resolution: "graceful-fs@npm:4.2.11"
checksum: ac85f94da92d8eb6b7f5a8b20ce65e43d66761c55ce85ac96df6865308390da45a8d3f0296dd3a663de65d30ba497bd46c696cc1e248c72b13d6d567138a4fc7
@@ -11031,6 +11229,45 @@ __metadata:
languageName: node
linkType: hard
+"has-value@npm:^0.3.1":
+ version: 0.3.1
+ resolution: "has-value@npm:0.3.1"
+ dependencies:
+ get-value: ^2.0.3
+ has-values: ^0.1.4
+ isobject: ^2.0.0
+ checksum: 29e2a1e6571dad83451b769c7ce032fce6009f65bccace07c2962d3ad4d5530b6743d8f3229e4ecf3ea8e905d23a752c5f7089100c1f3162039fa6dc3976558f
+ languageName: node
+ linkType: hard
+
+"has-value@npm:^1.0.0":
+ version: 1.0.0
+ resolution: "has-value@npm:1.0.0"
+ dependencies:
+ get-value: ^2.0.6
+ has-values: ^1.0.0
+ isobject: ^3.0.0
+ checksum: b9421d354e44f03d3272ac39fd49f804f19bc1e4fa3ceef7745df43d6b402053f828445c03226b21d7d934a21ac9cf4bc569396dc312f496ddff873197bbd847
+ languageName: node
+ linkType: hard
+
+"has-values@npm:^0.1.4":
+ version: 0.1.4
+ resolution: "has-values@npm:0.1.4"
+ checksum: ab1c4bcaf811ccd1856c11cfe90e62fca9e2b026ebe474233a3d282d8d67e3b59ed85b622c7673bac3db198cb98bd1da2b39300a2f98e453729b115350af49bc
+ languageName: node
+ linkType: hard
+
+"has-values@npm:^1.0.0":
+ version: 1.0.0
+ resolution: "has-values@npm:1.0.0"
+ dependencies:
+ is-number: ^3.0.0
+ kind-of: ^4.0.0
+ checksum: 77e6693f732b5e4cf6c38dfe85fdcefad0fab011af74995c3e83863fabf5e3a836f406d83565816baa0bc0a523c9410db8b990fe977074d61aeb6d8f4fcffa11
+ languageName: node
+ linkType: hard
+
"hash-for-dep@npm:^1.0.2, hash-for-dep@npm:^1.4.7, hash-for-dep@npm:^1.5.0, hash-for-dep@npm:^1.5.1":
version: 1.5.1
resolution: "hash-for-dep@npm:1.5.1"
@@ -11194,15 +11431,15 @@ __metadata:
languageName: node
linkType: hard
-"htmlparser2@npm:^9.1.0":
- version: 9.1.0
- resolution: "htmlparser2@npm:9.1.0"
+"htmlparser2@npm:^8.0.1":
+ version: 8.0.2
+ resolution: "htmlparser2@npm:8.0.2"
dependencies:
domelementtype: ^2.3.0
domhandler: ^5.0.3
- domutils: ^3.1.0
- entities: ^4.5.0
- checksum: e5f8d5193967e4a500226f37bdf2c0f858cecb39dde14d0439f24bf2c461a4342778740d988fbaba652b0e4cb6052f7f2e99e69fc1a329a86c629032bb76e7c8
+ domutils: ^3.0.1
+ entities: ^4.4.0
+ checksum: 29167a0f9282f181da8a6d0311b76820c8a59bc9e3c87009e21968264c2987d2723d6fde5a964d4b7b6cba663fca96ffb373c06d8223a85f52a6089ced942700
languageName: node
linkType: hard
@@ -11313,7 +11550,7 @@ __metadata:
languageName: node
linkType: hard
-"iconv-lite@npm:0.6.3, iconv-lite@npm:^0.6.2, iconv-lite@npm:^0.6.3":
+"iconv-lite@npm:^0.6.2":
version: 0.6.3
resolution: "iconv-lite@npm:0.6.3"
dependencies:
@@ -11339,9 +11576,9 @@ __metadata:
linkType: hard
"ignore@npm:^5.1.1, ignore@npm:^5.2.0, ignore@npm:^5.2.4":
- version: 5.3.2
- resolution: "ignore@npm:5.3.2"
- checksum: 2acfd32a573260ea522ea0bfeff880af426d68f6831f973129e2ba7363f422923cf53aab62f8369cbf4667c7b25b6f8a3761b34ecdb284ea18e87a5262a865be
+ version: 5.3.1
+ resolution: "ignore@npm:5.3.1"
+ checksum: 71d7bb4c1dbe020f915fd881108cbe85a0db3d636a0ea3ba911393c53946711d13a9b1143c7e70db06d571a5822c0a324a6bcde5c9904e7ca5047f01f1bf8cd3
languageName: node
linkType: hard
@@ -11404,20 +11641,13 @@ __metadata:
languageName: node
linkType: hard
-"inflection@npm:^2.0.1":
+"inflection@npm:^2.0.1, inflection@npm:~2.0.1":
version: 2.0.1
resolution: "inflection@npm:2.0.1"
checksum: bb095b495e10a77afc043cc349ae0f7c8c53e4d1fbcd7781111c18d17bde87ce31ea08bd883774bcbb2ff50c301dd4835b5448c80eb50b5e4e080165b6030f3b
languageName: node
linkType: hard
-"inflection@npm:~3.0.0":
- version: 3.0.0
- resolution: "inflection@npm:3.0.0"
- checksum: 6a3b21cb011a9bca1c045ef92d199c927cfd8c4b97809152fabc9c325d36789a7499397b50d46157d080efd19843807ac73e38a2fe79d8f2cf291323998fb5b5
- languageName: node
- linkType: hard
-
"inflight@npm:^1.0.4":
version: 1.0.6
resolution: "inflight@npm:1.0.6"
@@ -11507,8 +11737,8 @@ __metadata:
linkType: hard
"inquirer@npm:^9.1.5":
- version: 9.3.7
- resolution: "inquirer@npm:9.3.7"
+ version: 9.3.6
+ resolution: "inquirer@npm:9.3.6"
dependencies:
"@inquirer/figures": ^1.0.3
ansi-escapes: ^4.3.2
@@ -11522,7 +11752,7 @@ __metadata:
strip-ansi: ^6.0.1
wrap-ansi: ^6.2.0
yoctocolors-cjs: ^2.1.2
- checksum: 4d6e2f51b80051a6b9cc583ed5143e0a2c5e51938ffc0e91bbf8038216090566990f36ccb7856038390891fa69ea8d43ec389c70dcd097b67d351dc365dfc345
+ checksum: f1fd086585e301ec17ce016355e9eb6eb87329c6de578cde35b10d5e4b57443b9f8f1f304d3ab570e5dad2cbc55851c476480296e15793f76836c0c33cf2e713
languageName: node
linkType: hard
@@ -11568,6 +11798,15 @@ __metadata:
languageName: node
linkType: hard
+"is-accessor-descriptor@npm:^1.0.1":
+ version: 1.0.1
+ resolution: "is-accessor-descriptor@npm:1.0.1"
+ dependencies:
+ hasown: ^2.0.0
+ checksum: 8db44c02230a5e9b9dec390a343178791f073d5d5556a400527d2fd67a72d93b226abab2bd4123305c268f5dc22831bfdbd38430441fda82ea9e0b95ddc6b267
+ languageName: node
+ linkType: hard
+
"is-alphabetical@npm:^1.0.0":
version: 1.0.4
resolution: "is-alphabetical@npm:1.0.4"
@@ -11654,11 +11893,20 @@ __metadata:
linkType: hard
"is-core-module@npm:^2.12.1, is-core-module@npm:^2.13.0, is-core-module@npm:^2.5.0":
- version: 2.15.1
- resolution: "is-core-module@npm:2.15.1"
+ version: 2.15.0
+ resolution: "is-core-module@npm:2.15.0"
dependencies:
hasown: ^2.0.2
- checksum: df134c168115690724b62018c37b2f5bba0d5745fa16960b329c5a00883a8bea6a5632fdb1e3efcce237c201826ba09f93197b7cd95577ea56b0df335be23633
+ checksum: a9f7a52707c9b59d7164094d183bda892514fc3ba3139f245219c7abe7f6e8d3e2cdcf861f52a891a467f785f1dfa5d549f73b0ee715f4ba56e8882d335ea585
+ languageName: node
+ linkType: hard
+
+"is-data-descriptor@npm:^1.0.1":
+ version: 1.0.1
+ resolution: "is-data-descriptor@npm:1.0.1"
+ dependencies:
+ hasown: ^2.0.0
+ checksum: fc6da5be5177149d554c5612cc382e9549418ed72f2d3ed5a3e6511b03dd119ae1b2258320ca94931df50b7e9ee012894eccd4ca45bbcadf0d5b27da6faeb15a
languageName: node
linkType: hard
@@ -11687,6 +11935,26 @@ __metadata:
languageName: node
linkType: hard
+"is-descriptor@npm:^0.1.0":
+ version: 0.1.7
+ resolution: "is-descriptor@npm:0.1.7"
+ dependencies:
+ is-accessor-descriptor: ^1.0.1
+ is-data-descriptor: ^1.0.1
+ checksum: 45743109f0bb03f9fa989c34d31ece87cc15792649f147b896a7c4db2906a02fca685867619f4d312e024d7bbd53b945a47c6830d01f5e73efcc6388ac211963
+ languageName: node
+ linkType: hard
+
+"is-descriptor@npm:^1.0.0, is-descriptor@npm:^1.0.2":
+ version: 1.0.3
+ resolution: "is-descriptor@npm:1.0.3"
+ dependencies:
+ is-accessor-descriptor: ^1.0.1
+ is-data-descriptor: ^1.0.1
+ checksum: 316153b2fd86ac23b0a2f28b77744ae0a4e3c7a54fe52fa70b125d0971eb0a3bcfb562fa8e74537af0dad5bc405cc606726eb501fc748a241c10910deea89cfb
+ languageName: node
+ linkType: hard
+
"is-docker@npm:^2.0.0":
version: 2.2.1
resolution: "is-docker@npm:2.2.1"
@@ -11696,6 +11964,22 @@ __metadata:
languageName: node
linkType: hard
+"is-extendable@npm:^0.1.0, is-extendable@npm:^0.1.1":
+ version: 0.1.1
+ resolution: "is-extendable@npm:0.1.1"
+ checksum: 3875571d20a7563772ecc7a5f36cb03167e9be31ad259041b4a8f73f33f885441f778cee1f1fe0085eb4bc71679b9d8c923690003a36a6a5fdf8023e6e3f0672
+ languageName: node
+ linkType: hard
+
+"is-extendable@npm:^1.0.1":
+ version: 1.0.1
+ resolution: "is-extendable@npm:1.0.1"
+ dependencies:
+ is-plain-object: ^2.0.4
+ checksum: db07bc1e9de6170de70eff7001943691f05b9d1547730b11be01c0ebfe67362912ba743cf4be6fd20a5e03b4180c685dad80b7c509fe717037e3eee30ad8e84f
+ languageName: node
+ linkType: hard
+
"is-extglob@npm:^2.1.0, is-extglob@npm:^2.1.1":
version: 2.1.1
resolution: "is-extglob@npm:2.1.1"
@@ -11788,6 +12072,15 @@ __metadata:
languageName: node
linkType: hard
+"is-number@npm:^3.0.0":
+ version: 3.0.0
+ resolution: "is-number@npm:3.0.0"
+ dependencies:
+ kind-of: ^3.0.2
+ checksum: 0c62bf8e9d72c4dd203a74d8cfc751c746e75513380fef420cda8237e619a988ee43e678ddb23c87ac24d91ac0fe9f22e4ffb1301a50310c697e9d73ca3994e9
+ languageName: node
+ linkType: hard
+
"is-number@npm:^7.0.0":
version: 7.0.0
resolution: "is-number@npm:7.0.0"
@@ -11830,6 +12123,15 @@ __metadata:
languageName: node
linkType: hard
+"is-plain-object@npm:^2.0.3, is-plain-object@npm:^2.0.4":
+ version: 2.0.4
+ resolution: "is-plain-object@npm:2.0.4"
+ dependencies:
+ isobject: ^3.0.1
+ checksum: 2a401140cfd86cabe25214956ae2cfee6fbd8186809555cd0e84574f88de7b17abacb2e477a6a658fa54c6083ecbda1e6ae404c7720244cd198903848fca70ca
+ languageName: node
+ linkType: hard
+
"is-plain-object@npm:^5.0.0":
version: 5.0.0
resolution: "is-plain-object@npm:5.0.0"
@@ -11893,15 +12195,6 @@ __metadata:
languageName: node
linkType: hard
-"is-subdir@npm:^1.2.0":
- version: 1.2.0
- resolution: "is-subdir@npm:1.2.0"
- dependencies:
- better-path-resolve: 1.0.0
- checksum: 31029a383972bff4cc4f1bd1463fd04dde017e0a04ae3a6f6e08124a90c6c4656312d593101b0f38805fa3f3c8f6bc4583524bbf72c50784fa5ca0d3e5a76279
- languageName: node
- linkType: hard
-
"is-symbol@npm:^1.0.2, is-symbol@npm:^1.0.3":
version: 1.0.4
resolution: "is-symbol@npm:1.0.4"
@@ -11952,7 +12245,7 @@ __metadata:
languageName: node
linkType: hard
-"is-windows@npm:^1.0.0, is-windows@npm:^1.0.1":
+"is-windows@npm:^1.0.1, is-windows@npm:^1.0.2":
version: 1.0.2
resolution: "is-windows@npm:1.0.2"
checksum: 438b7e52656fe3b9b293b180defb4e448088e7023a523ec21a91a80b9ff8cdb3377ddb5b6e60f7c7de4fa8b63ab56e121b6705fe081b3cf1b828b0a380009ad7
@@ -11990,9 +12283,9 @@ __metadata:
linkType: hard
"isbinaryfile@npm:^5.0.0":
- version: 5.0.4
- resolution: "isbinaryfile@npm:5.0.4"
- checksum: d88982a889369d83a5937b4b4d2288ed3b3dbbcee8fc74db40058f3c089a2c7beb9e5305b7177e82d87ff38fb62be8d60960f7a2d669ca08240ef31c1435b884
+ version: 5.0.2
+ resolution: "isbinaryfile@npm:5.0.2"
+ checksum: 5e3e9d31b016eefb7e93bd0ab7d088489882eeb9018bf71303f2ce5d9ad02dbb127663d065ce2519913c3c9135a99002e989d6b1786a0fcc0b3c3d2defb1f7d0
languageName: node
linkType: hard
@@ -12019,6 +12312,13 @@ __metadata:
languageName: node
linkType: hard
+"isobject@npm:^3.0.0, isobject@npm:^3.0.1":
+ version: 3.0.1
+ resolution: "isobject@npm:3.0.1"
+ checksum: db85c4c970ce30693676487cca0e61da2ca34e8d4967c2e1309143ff910c207133a969f9e4ddb2dc6aba670aabce4e0e307146c310350b298e74a31f7d464703
+ languageName: node
+ linkType: hard
+
"istextorbinary@npm:2.1.0":
version: 2.1.0
resolution: "istextorbinary@npm:2.1.0"
@@ -12125,20 +12425,19 @@ __metadata:
languageName: node
linkType: hard
-"jsdoc-api@npm:^8.1.1":
- version: 8.1.1
- resolution: "jsdoc-api@npm:8.1.1"
+"jsdoc-api@npm:^8.1.0":
+ version: 8.1.0
+ resolution: "jsdoc-api@npm:8.1.0"
dependencies:
array-back: ^6.2.2
cache-point: ^2.0.0
collect-all: ^1.0.4
- file-set: ^4.0.2
fs-then-native: ^2.0.0
jsdoc: ^4.0.3
object-to-spawn-args: ^2.0.1
temp-path: ^1.0.0
walk-back: ^5.1.0
- checksum: 862aff98c438a2ead5693425eabb15262b5af5ddb0b162600491739746193b41e941b48a5dc7577652667fa977354e47471f20c466f5e86e729fcb0fb15d376d
+ checksum: 1c87990b12899e9f491cc66a0a02579b0d9864e17b63e94b1d2590658e4f58a09c7a7017c461cc9692b97a1345e4c700b3fade6b8ca7aab2084b94412e164b57
languageName: node
linkType: hard
@@ -12155,14 +12454,15 @@ __metadata:
linkType: hard
"jsdoc-parse@npm:^6.2.1":
- version: 6.2.4
- resolution: "jsdoc-parse@npm:6.2.4"
+ version: 6.2.1
+ resolution: "jsdoc-parse@npm:6.2.1"
dependencies:
array-back: ^6.2.2
- find-replace: ^5.0.1
lodash.omit: ^4.5.0
- sort-array: ^5.0.0
- checksum: ddc45c25468eff1ffe47a8295d8195f55007c254bc6a4a74de33aad21ec6e72bcd5a4c6f40f8f9d991005b6f55a82179d008694a70b4edceb31473cc5247b867
+ reduce-extract: ^1.0.0
+ sort-array: ^4.1.5
+ test-value: ^3.0.0
+ checksum: 44470e5b84f4a7601a99128ac01b0c3c596db91770592a7385d92b422f97b609fd0e2e9b025e8bb22ebca297cf8f217d9a6d6bf9ed8394e299a41ba0e507afa6
languageName: node
linkType: hard
@@ -12174,25 +12474,25 @@ __metadata:
linkType: hard
"jsdoc-to-markdown@npm:^8.0.1":
- version: 8.0.3
- resolution: "jsdoc-to-markdown@npm:8.0.3"
+ version: 8.0.2
+ resolution: "jsdoc-to-markdown@npm:8.0.2"
dependencies:
array-back: ^6.2.2
command-line-tool: ^0.8.0
config-master: ^3.1.0
- dmd: ^6.2.3
- jsdoc-api: ^8.1.1
+ dmd: ^6.2.1
+ jsdoc-api: ^8.1.0
jsdoc-parse: ^6.2.1
walk-back: ^5.1.0
bin:
jsdoc2md: bin/cli.js
- checksum: df3e130b69a53878f514df0a25f0a6f1871817d8024e2f3721aac0ff551b8fb5d08621d78dafcaf2e303a539758cced33222597fe32d688e5f0361ce903c640f
+ checksum: 92e110fcfb48807182cd2ce7d19c7523549b332970bdcff13feb10d2d72d45f062f3e178e02c22c2628dbde79783ed7e41a77671eeaa84cd5942d4f6abf8f3bb
languageName: node
linkType: hard
"jsdoc@npm:^4.0.3":
- version: 4.0.4
- resolution: "jsdoc@npm:4.0.4"
+ version: 4.0.3
+ resolution: "jsdoc@npm:4.0.3"
dependencies:
"@babel/parser": ^7.20.15
"@jsdoc/salty": ^0.2.1
@@ -12211,16 +12511,25 @@ __metadata:
underscore: ~1.13.2
bin:
jsdoc: ./jsdoc.js
- checksum: f4372a15a262ffd5abfe71315bbf9ad0fd3dd633ca04298702c0b0d3bacd615a35e9f11877bd7aa4e1bb04adb731a55fb15c3e14e69a8e740e86c45548ad39b6
+ checksum: 92b3c1e1c79759d5deb89d4f1d47e11e54f85e0b8c5d972b2415471659c01f50909cbf31cd0666f436e3522a20888bfeb81ba906ed1c2fd6c29d6026702c8018
languageName: node
linkType: hard
-"jsesc@npm:^3.0.2, jsesc@npm:~3.0.2":
- version: 3.0.2
- resolution: "jsesc@npm:3.0.2"
+"jsesc@npm:^2.5.1":
+ version: 2.5.2
+ resolution: "jsesc@npm:2.5.2"
+ bin:
+ jsesc: bin/jsesc
+ checksum: 4dc190771129e12023f729ce20e1e0bfceac84d73a85bc3119f7f938843fe25a4aeccb54b6494dce26fcf263d815f5f31acdefac7cc9329efb8422a4f4d9fa9d
+ languageName: node
+ linkType: hard
+
+"jsesc@npm:~0.5.0":
+ version: 0.5.0
+ resolution: "jsesc@npm:0.5.0"
bin:
jsesc: bin/jsesc
- checksum: a36d3ca40574a974d9c2063bf68c2b6141c20da8f2a36bd3279fc802563f35f0527a6c828801295bdfb2803952cf2cf387786c2c90ed564f88d5782475abfe3c
+ checksum: b8b44cbfc92f198ad972fba706ee6a1dfa7485321ee8c0b25f5cedd538dcb20cde3197de16a7265430fce8277a12db066219369e3d51055038946039f6e20e17
languageName: node
linkType: hard
@@ -12543,7 +12852,7 @@ __metadata:
languageName: node
linkType: hard
-"loader-utils@npm:^2.0.0, loader-utils@npm:^2.0.4":
+"loader-utils@npm:^2.0.0":
version: 2.0.4
resolution: "loader-utils@npm:2.0.4"
dependencies:
@@ -12632,6 +12941,13 @@ __metadata:
languageName: node
linkType: hard
+"lodash.assignin@npm:^4.1.0":
+ version: 4.2.0
+ resolution: "lodash.assignin@npm:4.2.0"
+ checksum: 4b55bc1d65ccd7648fdba8a4316d10546929bf0beb5950830d86c559948cf170f0e65b77c95e66b45b511b85a31161714de8b2008d2537627ef3c7759afe36a6
+ languageName: node
+ linkType: hard
+
"lodash.camelcase@npm:^4.1.1, lodash.camelcase@npm:^4.3.0":
version: 4.3.0
resolution: "lodash.camelcase@npm:4.3.0"
@@ -12639,6 +12955,20 @@ __metadata:
languageName: node
linkType: hard
+"lodash.castarray@npm:^4.4.0":
+ version: 4.4.0
+ resolution: "lodash.castarray@npm:4.4.0"
+ checksum: fca8c7047e0ae2738b0b2503fb00157ae0ff6d8a1b716f87ed715b22560e09de438c75b65e01a7e44ceb41c5b31dce2eb576e46db04beb9c699c498e03cbd00f
+ languageName: node
+ linkType: hard
+
+"lodash.clonedeep@npm:^4.4.1":
+ version: 4.5.0
+ resolution: "lodash.clonedeep@npm:4.5.0"
+ checksum: 92c46f094b064e876a23c97f57f81fbffd5d760bf2d8a1c61d85db6d1e488c66b0384c943abee4f6af7debf5ad4e4282e74ff83177c9e63d8ff081a4837c3489
+ languageName: node
+ linkType: hard
+
"lodash.debounce@npm:^3.1.1":
version: 3.1.1
resolution: "lodash.debounce@npm:3.1.1"
@@ -12662,6 +12992,13 @@ __metadata:
languageName: node
linkType: hard
+"lodash.find@npm:^4.5.1":
+ version: 4.6.0
+ resolution: "lodash.find@npm:4.6.0"
+ checksum: b737f849a4fe36f5c3664ea636780dda2fde18335021faf80cdfdcb300ed75441da6f55cfd6de119092d8bb2ddbc4433f4a8de4b99c0b9c8640465b0901c717c
+ languageName: node
+ linkType: hard
+
"lodash.flatten@npm:^3.0.2":
version: 3.0.2
resolution: "lodash.flatten@npm:3.0.2"
@@ -12749,6 +13086,13 @@ __metadata:
languageName: node
linkType: hard
+"lodash.uniqby@npm:^4.7.0":
+ version: 4.7.0
+ resolution: "lodash.uniqby@npm:4.7.0"
+ checksum: 659264545a95726d1493123345aad8cbf56e17810fa9a0b029852c6d42bc80517696af09d99b23bef1845d10d95e01b8b4a1da578f22aeba7a30d3e0022a4938
+ languageName: node
+ linkType: hard
+
"lodash@npm:^4.0.0, lodash@npm:^4.17.10, lodash@npm:^4.17.11, lodash@npm:^4.17.12, lodash@npm:^4.17.14, lodash@npm:^4.17.15, lodash@npm:^4.17.19, lodash@npm:^4.17.21":
version: 4.17.21
resolution: "lodash@npm:4.17.21"
@@ -12798,9 +13142,9 @@ __metadata:
linkType: hard
"loglevel@npm:^1.4.1":
- version: 1.9.2
- resolution: "loglevel@npm:1.9.2"
- checksum: 896c67b90a507bfcfc1e9a4daa7bf789a441dd70d95cd13b998d6dd46233a3bfadfb8fadb07250432bbfb53bf61e95f2520f9b11f9d3175cc460e5c251eca0af
+ version: 1.9.1
+ resolution: "loglevel@npm:1.9.1"
+ checksum: e1c8586108c4d566122e91f8a79c8df728920e3a714875affa5120566761a24077ec8ec9e5fc388b022e39fc411ec6e090cde1b5775871241b045139771eeb06
languageName: node
linkType: hard
@@ -12862,11 +13206,11 @@ __metadata:
linkType: hard
"magic-string@npm:^0.30.0":
- version: 0.30.12
- resolution: "magic-string@npm:0.30.12"
+ version: 0.30.10
+ resolution: "magic-string@npm:0.30.10"
dependencies:
- "@jridgewell/sourcemap-codec": ^1.5.0
- checksum: 3f0d23b74371765f0e6cad4284eebba0ac029c7a55e39292de5aa92281afb827138cb2323d24d2924f6b31f138c3783596c5ccaa98653fe9cf122e1f81325b59
+ "@jridgewell/sourcemap-codec": ^1.4.15
+ checksum: 456fd47c39b296c47dff967e1965121ace35417eab7f45a99e681e725b8661b48e1573c366ee67a27715025b3740773c46b088f115421c7365ea4ea6fa10d399
languageName: node
linkType: hard
@@ -12934,6 +13278,13 @@ __metadata:
languageName: node
linkType: hard
+"map-cache@npm:^0.2.2":
+ version: 0.2.2
+ resolution: "map-cache@npm:0.2.2"
+ checksum: 3067cea54285c43848bb4539f978a15dedc63c03022abeec6ef05c8cb6829f920f13b94bcaf04142fc6a088318e564c4785704072910d120d55dbc2e0c421969
+ languageName: node
+ linkType: hard
+
"map-obj@npm:^1.0.0":
version: 1.0.1
resolution: "map-obj@npm:1.0.1"
@@ -12948,6 +13299,15 @@ __metadata:
languageName: node
linkType: hard
+"map-visit@npm:^1.0.0":
+ version: 1.0.0
+ resolution: "map-visit@npm:1.0.0"
+ dependencies:
+ object-visit: ^1.0.0
+ checksum: c27045a5021c344fc19b9132eb30313e441863b2951029f8f8b66f79d3d8c1e7e5091578075a996f74e417479506fe9ede28c44ca7bc351a61c9d8073daec36a
+ languageName: node
+ linkType: hard
+
"markdown-it-anchor@npm:^8.6.7":
version: 8.6.7
resolution: "markdown-it-anchor@npm:8.6.7"
@@ -13301,10 +13661,10 @@ __metadata:
languageName: node
linkType: hard
-"merge-descriptors@npm:1.0.3":
- version: 1.0.3
- resolution: "merge-descriptors@npm:1.0.3"
- checksum: 52117adbe0313d5defa771c9993fe081e2d2df9b840597e966aadafde04ae8d0e3da46bac7ca4efc37d4d2b839436582659cd49c6a43eacb3fe3050896a105d1
+"merge-descriptors@npm:1.0.1":
+ version: 1.0.1
+ resolution: "merge-descriptors@npm:1.0.1"
+ checksum: 5abc259d2ae25bb06d19ce2b94a21632583c74e2a9109ee1ba7fd147aa7362b380d971e0251069f8b3eb7d48c21ac839e21fa177b335e82c76ec172e30c31a26
languageName: node
linkType: hard
@@ -13424,13 +13784,34 @@ __metadata:
languageName: node
linkType: hard
-"micromatch@npm:~4.0.8":
- version: 4.0.8
- resolution: "micromatch@npm:4.0.8"
+"micromatch@npm:^3.0.4, micromatch@npm:^3.1.4":
+ version: 3.1.10
+ resolution: "micromatch@npm:3.1.10"
+ dependencies:
+ arr-diff: ^4.0.0
+ array-unique: ^0.3.2
+ braces: ^2.3.1
+ define-property: ^2.0.2
+ extend-shallow: ^3.0.2
+ extglob: ^2.0.4
+ fragment-cache: ^0.2.1
+ kind-of: ^6.0.2
+ nanomatch: ^1.2.9
+ object.pick: ^1.3.0
+ regex-not: ^1.0.0
+ snapdragon: ^0.8.1
+ to-regex: ^3.0.2
+ checksum: ad226cba4daa95b4eaf47b2ca331c8d2e038d7b41ae7ed0697cde27f3f1d6142881ab03d4da51b65d9d315eceb5e4cdddb3fbb55f5f72cfa19cf3ea469d054dc
+ languageName: node
+ linkType: hard
+
+"micromatch@npm:^4.0.2, micromatch@npm:^4.0.4, micromatch@npm:^4.0.5":
+ version: 4.0.7
+ resolution: "micromatch@npm:4.0.7"
dependencies:
braces: ^3.0.3
picomatch: ^2.3.1
- checksum: 79920eb634e6f400b464a954fcfa589c4e7c7143209488e44baf627f9affc8b1e306f41f4f0deedde97e69cb725920879462d3e750ab3bd3c1aed675bb3a8966
+ checksum: 3cde047d70ad80cf60c787b77198d680db3b8c25b23feb01de5e2652205d9c19f43bd81882f69a0fd1f0cde6a7a122d774998aad3271ddb1b8accf8a0f480cf7
languageName: node
linkType: hard
@@ -13495,14 +13876,14 @@ __metadata:
linkType: hard
"mini-css-extract-plugin@npm:^2.5.2":
- version: 2.9.2
- resolution: "mini-css-extract-plugin@npm:2.9.2"
+ version: 2.9.0
+ resolution: "mini-css-extract-plugin@npm:2.9.0"
dependencies:
schema-utils: ^4.0.0
tapable: ^2.2.1
peerDependencies:
webpack: ^5.0.0
- checksum: 67a1f75359371a7776108999d472ae0942ccd904401e364e3a2c710d4b6fec61c4f53288594fcac35891f009e6df8825a00dfd3bfe4bcec0f862081d1f7cad50
+ checksum: ae192c67ba85ac8bffeab66774635bf90181f00d5dd6cf95412426192599ddf5506fb4b1550acbd7a5476476e39db53c770dd40f8378f7baf5de96e3fec4e6e9
languageName: node
linkType: hard
@@ -13682,6 +14063,16 @@ __metadata:
languageName: node
linkType: hard
+"mixin-deep@npm:^1.2.0":
+ version: 1.3.2
+ resolution: "mixin-deep@npm:1.3.2"
+ dependencies:
+ for-in: ^1.0.2
+ is-extendable: ^1.0.1
+ checksum: 820d5a51fcb7479f2926b97f2c3bb223546bc915e6b3a3eb5d906dda871bba569863595424a76682f2b15718252954644f3891437cb7e3f220949bed54b1750d
+ languageName: node
+ linkType: hard
+
"mkdirp2@npm:^1.0.4":
version: 1.0.5
resolution: "mkdirp2@npm:1.0.5"
@@ -13763,7 +14154,14 @@ __metadata:
languageName: node
linkType: hard
-"ms@npm:2.1.3, ms@npm:^2.1.1, ms@npm:^2.1.3":
+"ms@npm:2.1.2":
+ version: 2.1.2
+ resolution: "ms@npm:2.1.2"
+ checksum: 673cdb2c3133eb050c745908d8ce632ed2c02d85640e2edb3ace856a2266a813b30c613569bf3354fdf4ea7d1a1494add3bfa95e2713baa27d0c2c71fc44f58f
+ languageName: node
+ linkType: hard
+
+"ms@npm:2.1.3, ms@npm:^2.1.1":
version: 2.1.3
resolution: "ms@npm:2.1.3"
checksum: aa92de608021b242401676e35cfa5aa42dd70cbdc082b916da7fb925c542173e36bce97ea3e804923fe92c0ad991434e4a38327e15a1b5b5f945d66df615ae6d
@@ -13809,6 +14207,25 @@ __metadata:
languageName: node
linkType: hard
+"nanomatch@npm:^1.2.9":
+ version: 1.2.13
+ resolution: "nanomatch@npm:1.2.13"
+ dependencies:
+ arr-diff: ^4.0.0
+ array-unique: ^0.3.2
+ define-property: ^2.0.2
+ extend-shallow: ^3.0.2
+ fragment-cache: ^0.2.1
+ is-windows: ^1.0.2
+ kind-of: ^6.0.2
+ object.pick: ^1.3.0
+ regex-not: ^1.0.0
+ snapdragon: ^0.8.1
+ to-regex: ^3.0.1
+ checksum: 54d4166d6ef08db41252eb4e96d4109ebcb8029f0374f9db873bd91a1f896c32ec780d2a2ea65c0b2d7caf1f28d5e1ea33746a470f32146ac8bba821d80d38d8
+ languageName: node
+ linkType: hard
+
"natural-compare-lite@npm:^1.4.0":
version: 1.4.0
resolution: "natural-compare-lite@npm:1.4.0"
@@ -13823,20 +14240,13 @@ __metadata:
languageName: node
linkType: hard
-"negotiator@npm:0.6.3":
+"negotiator@npm:0.6.3, negotiator@npm:^0.6.3":
version: 0.6.3
resolution: "negotiator@npm:0.6.3"
checksum: b8ffeb1e262eff7968fc90a2b6767b04cfd9842582a9d0ece0af7049537266e7b2506dfb1d107a32f06dd849ab2aea834d5830f7f4d0e5cb7d36e1ae55d021d9
languageName: node
linkType: hard
-"negotiator@npm:^0.6.3, negotiator@npm:~0.6.4":
- version: 0.6.4
- resolution: "negotiator@npm:0.6.4"
- checksum: 7ded10aa02a0707d1d12a9973fdb5954f98547ca7beb60e31cb3a403cc6e8f11138db7a3b0128425cf836fc85d145ec4ce983b2bdf83dca436af879c2d683510
- languageName: node
- linkType: hard
-
"neo-async@npm:^2.6.0, neo-async@npm:^2.6.2":
version: 2.6.2
resolution: "neo-async@npm:2.6.2"
@@ -13874,15 +14284,6 @@ __metadata:
languageName: node
linkType: hard
-"node-addon-api@npm:^7.0.0":
- version: 7.1.1
- resolution: "node-addon-api@npm:7.1.1"
- dependencies:
- node-gyp: latest
- checksum: 46051999e3289f205799dfaf6bcb017055d7569090f0004811110312e2db94cb4f8654602c7eb77a60a1a05142cc2b96e1b5c56ca4622c41a5c6370787faaf30
- languageName: node
- linkType: hard
-
"node-gyp@npm:latest":
version: 10.2.0
resolution: "node-gyp@npm:10.2.0"
@@ -13931,7 +14332,7 @@ __metadata:
languageName: node
linkType: hard
-"node-releases@npm:^2.0.18":
+"node-releases@npm:^2.0.14":
version: 2.0.18
resolution: "node-releases@npm:2.0.18"
checksum: ef55a3d853e1269a6d6279b7692cd6ff3e40bc74947945101138745bfdc9a5edabfe72cb19a31a8e45752e1910c4c65c77d931866af6357f242b172b7283f5b3
@@ -14012,6 +14413,13 @@ __metadata:
languageName: node
linkType: hard
+"npm-git-info@npm:^1.0.3":
+ version: 1.0.3
+ resolution: "npm-git-info@npm:1.0.3"
+ checksum: 1f2218004e109db649580f023ae2b6eb690efae0e47ec26216a01751325b45788430428b12478d91583e18bd8f4bc933d02a1f42984b8d4977f1c35ddef1bad2
+ languageName: node
+ linkType: hard
+
"npm-package-arg@npm:^10.1.0":
version: 10.1.0
resolution: "npm-package-arg@npm:10.1.0"
@@ -14100,6 +14508,17 @@ __metadata:
languageName: node
linkType: hard
+"object-copy@npm:^0.1.0":
+ version: 0.1.0
+ resolution: "object-copy@npm:0.1.0"
+ dependencies:
+ copy-descriptor: ^0.1.0
+ define-property: ^0.2.5
+ kind-of: ^3.0.3
+ checksum: a9e35f07e3a2c882a7e979090360d1a20ab51d1fa19dfdac3aa8873b328a7c4c7683946ee97c824ae40079d848d6740a3788fa14f2185155dab7ed970a72c783
+ languageName: node
+ linkType: hard
+
"object-get@npm:^2.1.1":
version: 2.1.1
resolution: "object-get@npm:2.1.1"
@@ -14135,6 +14554,15 @@ __metadata:
languageName: node
linkType: hard
+"object-visit@npm:^1.0.0":
+ version: 1.0.1
+ resolution: "object-visit@npm:1.0.1"
+ dependencies:
+ isobject: ^3.0.0
+ checksum: b0ee07f5bf3bb881b881ff53b467ebbde2b37ebb38649d6944a6cd7681b32eedd99da9bd1e01c55facf81f54ed06b13af61aba6ad87f0052982995e09333f790
+ languageName: node
+ linkType: hard
+
"object.assign@npm:^4.1.5":
version: 4.1.5
resolution: "object.assign@npm:4.1.5"
@@ -14162,6 +14590,15 @@ __metadata:
languageName: node
linkType: hard
+"object.pick@npm:^1.3.0":
+ version: 1.3.0
+ resolution: "object.pick@npm:1.3.0"
+ dependencies:
+ isobject: ^3.0.1
+ checksum: 77fb6eed57c67adf75e9901187e37af39f052ef601cb4480386436561357eb9e459e820762f01fd02c5c1b42ece839ad393717a6d1850d848ee11fbabb3e580a
+ languageName: node
+ linkType: hard
+
"object.values@npm:^1.1.0":
version: 1.2.0
resolution: "object.values@npm:1.2.0"
@@ -14437,9 +14874,9 @@ __metadata:
linkType: hard
"package-json-from-dist@npm:^1.0.0":
- version: 1.0.1
- resolution: "package-json-from-dist@npm:1.0.1"
- checksum: 58ee9538f2f762988433da00e26acc788036914d57c71c246bf0be1b60cdbd77dd60b6a3e1a30465f0b248aeb80079e0b34cb6050b1dfa18c06953bb1cbc7602
+ version: 1.0.0
+ resolution: "package-json-from-dist@npm:1.0.0"
+ checksum: ac706ec856a5a03f5261e4e48fa974f24feb044d51f84f8332e2af0af04fbdbdd5bbbfb9cbbe354190409bc8307c83a9e38c6672c3c8855f709afb0006a009ea
languageName: node
linkType: hard
@@ -14493,21 +14930,12 @@ __metadata:
linkType: hard
"parse5-htmlparser2-tree-adapter@npm:^7.0.0":
- version: 7.1.0
- resolution: "parse5-htmlparser2-tree-adapter@npm:7.1.0"
- dependencies:
- domhandler: ^5.0.3
- parse5: ^7.0.0
- checksum: 98326fc5443e2149e10695adbfd0b0b3383c54398799f858b4ac2914adb199af8fcc90c2143aa5f7fd5f9482338f26ef253b468722f34d50bb215ec075d89fe9
- languageName: node
- linkType: hard
-
-"parse5-parser-stream@npm:^7.1.2":
- version: 7.1.2
- resolution: "parse5-parser-stream@npm:7.1.2"
+ version: 7.0.0
+ resolution: "parse5-htmlparser2-tree-adapter@npm:7.0.0"
dependencies:
+ domhandler: ^5.0.2
parse5: ^7.0.0
- checksum: 75b232d460bce6bd0e35012750a78ef034f40ccf550b7c6cec3122395af6b4553202ad3663ad468cf537ead5a2e13b6727670395fd0ff548faccad1dc2dc93cf
+ checksum: fc5d01e07733142a1baf81de5c2a9c41426c04b7ab29dd218acb80cd34a63177c90aff4a4aee66cf9f1d0aeecff1389adb7452ad6f8af0a5888e3e9ad6ef733d
languageName: node
linkType: hard
@@ -14518,12 +14946,12 @@ __metadata:
languageName: node
linkType: hard
-"parse5@npm:^7.0.0, parse5@npm:^7.1.2":
- version: 7.2.1
- resolution: "parse5@npm:7.2.1"
+"parse5@npm:^7.0.0":
+ version: 7.1.2
+ resolution: "parse5@npm:7.1.2"
dependencies:
- entities: ^4.5.0
- checksum: 11253cf8aa2e7fc41c004c64cba6f2c255f809663365db65bd7ad0e8cf7b89e436a563c20059346371cc543a6c1b567032088883ca6a2cbc88276c666b68236d
+ entities: ^4.4.0
+ checksum: 59465dd05eb4c5ec87b76173d1c596e152a10e290b7abcda1aecf0f33be49646ea74840c69af975d7887543ea45564801736356c568d6b5e71792fd0f4055713
languageName: node
linkType: hard
@@ -14534,6 +14962,13 @@ __metadata:
languageName: node
linkType: hard
+"pascalcase@npm:^0.1.1":
+ version: 0.1.1
+ resolution: "pascalcase@npm:0.1.1"
+ checksum: f83681c3c8ff75fa473a2bb2b113289952f802ff895d435edd717e7cb898b0408cbdb247117a938edcbc5d141020909846cc2b92c47213d764e2a94d2ad2b925
+ languageName: node
+ linkType: hard
+
"path-exists@npm:^3.0.0":
version: 3.0.0
resolution: "path-exists@npm:3.0.0"
@@ -14623,17 +15058,17 @@ __metadata:
languageName: node
linkType: hard
-"path-to-regexp@npm:0.1.10":
- version: 0.1.10
- resolution: "path-to-regexp@npm:0.1.10"
- checksum: ab7a3b7a0b914476d44030340b0a65d69851af2a0f33427df1476100ccb87d409c39e2182837a96b98fb38c4ef2ba6b87bdad62bb70a2c153876b8061760583c
+"path-to-regexp@npm:0.1.7":
+ version: 0.1.7
+ resolution: "path-to-regexp@npm:0.1.7"
+ checksum: 69a14ea24db543e8b0f4353305c5eac6907917031340e5a8b37df688e52accd09e3cebfe1660b70d76b6bd89152f52183f28c74813dbf454ba1a01c82a38abce
languageName: node
linkType: hard
"path-to-regexp@npm:^6.2.1":
- version: 6.3.0
- resolution: "path-to-regexp@npm:6.3.0"
- checksum: eca78602e6434a1b6799d511d375ec044e8d7e28f5a48aa5c28d57d8152fb52f3fc62fb1cfc5dfa2198e1f041c2a82ed14043d75740a2fe60e91b5089a153250
+ version: 6.2.2
+ resolution: "path-to-regexp@npm:6.2.2"
+ checksum: b7b0005c36f5099f9ed1fb20a820d2e4ed1297ffe683ea1d678f5e976eb9544f01debb281369dabdc26da82e6453901bf71acf2c7ed14b9243536c2a45286c33
languageName: node
linkType: hard
@@ -14651,10 +15086,10 @@ __metadata:
languageName: node
linkType: hard
-"picocolors@npm:^1.0.0, picocolors@npm:^1.1.0":
- version: 1.1.1
- resolution: "picocolors@npm:1.1.1"
- checksum: e1cf46bf84886c79055fdfa9dcb3e4711ad259949e3565154b004b260cd356c5d54b31a1437ce9782624bf766272fe6b0154f5f0c744fb7af5d454d2b60db045
+"picocolors@npm:^1.0.0, picocolors@npm:^1.0.1":
+ version: 1.0.1
+ resolution: "picocolors@npm:1.0.1"
+ checksum: fa68166d1f56009fc02a34cdfd112b0dd3cf1ef57667ac57281f714065558c01828cdf4f18600ad6851cbe0093952ed0660b1e0156bddf2184b6aaf5817553a5
languageName: node
linkType: hard
@@ -14697,13 +15132,6 @@ __metadata:
languageName: node
linkType: hard
-"pkg-entry-points@npm:^1.1.0":
- version: 1.1.1
- resolution: "pkg-entry-points@npm:1.1.1"
- checksum: cf122530c2d05b8e14c202fcbd4c17f6f10ba3d16421bf9cf43fa7361f2205af4521278d70c3fef3a99cf04122a88daf21bec5f6eca3a067b762c2aa67dafdcd
- languageName: node
- linkType: hard
-
"pkg-up@npm:^2.0.0":
version: 2.0.0
resolution: "pkg-up@npm:2.0.0"
@@ -14753,6 +15181,13 @@ __metadata:
languageName: node
linkType: hard
+"posix-character-classes@npm:^0.1.0":
+ version: 0.1.1
+ resolution: "posix-character-classes@npm:0.1.1"
+ checksum: dedb99913c60625a16050cfed2fb5c017648fc075be41ac18474e1c6c3549ef4ada201c8bd9bd006d36827e289c571b6092e1ef6e756cdbab2fd7046b25c6442
+ languageName: node
+ linkType: hard
+
"possible-typed-array-names@npm:^1.0.0":
version: 1.0.0
resolution: "possible-typed-array-names@npm:1.0.0"
@@ -14805,9 +15240,9 @@ __metadata:
linkType: hard
"postcss-resolve-nested-selector@npm:^0.1.1":
- version: 0.1.6
- resolution: "postcss-resolve-nested-selector@npm:0.1.6"
- checksum: 85453901afe2a4db497b4e0d2c9cf2a097a08fa5d45bc646547025176217050334e423475519a1e6c74a1f31ade819d16bb37a39914e5321e250695ee3feea14
+ version: 0.1.1
+ resolution: "postcss-resolve-nested-selector@npm:0.1.1"
+ checksum: b08fb76ab092a09ee01328bad620a01dcb445ac5eb02dd0ed9ed75217c2f779ecb3bf99a361c46e695689309c08c09f1a1ad7354c8d58c2c2c40d364657fcb08
languageName: node
linkType: hard
@@ -14821,12 +15256,12 @@ __metadata:
linkType: hard
"postcss-selector-parser@npm:^6.0.13, postcss-selector-parser@npm:^6.0.2, postcss-selector-parser@npm:^6.0.4":
- version: 6.1.2
- resolution: "postcss-selector-parser@npm:6.1.2"
+ version: 6.1.1
+ resolution: "postcss-selector-parser@npm:6.1.1"
dependencies:
cssesc: ^3.0.0
util-deprecate: ^1.0.2
- checksum: ce9440fc42a5419d103f4c7c1847cb75488f3ac9cbe81093b408ee9701193a509f664b4d10a2b4d82c694ee7495e022f8f482d254f92b7ffd9ed9dea696c6f84
+ checksum: 1c6a5adfc3c19c6e1e7d94f8addb89a5166fcca72c41f11713043d381ecbe82ce66360c5524e904e17b54f7fc9e6a077994ff31238a456bc7320c3e02e88d92e
languageName: node
linkType: hard
@@ -14838,13 +15273,13 @@ __metadata:
linkType: hard
"postcss@npm:^8.2.15, postcss@npm:^8.4.28":
- version: 8.4.47
- resolution: "postcss@npm:8.4.47"
+ version: 8.4.39
+ resolution: "postcss@npm:8.4.39"
dependencies:
nanoid: ^3.3.7
- picocolors: ^1.1.0
- source-map-js: ^1.2.1
- checksum: f78440a9d8f97431dd2ab1ab8e1de64f12f3eff38a3d8d4a33919b96c381046a314658d2de213a5fa5eb296b656de76a3ec269fdea27f16d5ab465b916a0f52c
+ picocolors: ^1.0.1
+ source-map-js: ^1.2.0
+ checksum: 14b130c90f165961772bdaf99c67f907f3d16494adf0868e57ef68baa67e0d1f6762db9d41ab0f4d09bab6fb7888588dba3596afd1a235fd5c2d43fba7006ac6
languageName: node
linkType: hard
@@ -15056,12 +15491,12 @@ __metadata:
linkType: hard
"pump@npm:^3.0.0":
- version: 3.0.2
- resolution: "pump@npm:3.0.2"
+ version: 3.0.0
+ resolution: "pump@npm:3.0.0"
dependencies:
end-of-stream: ^1.1.0
once: ^1.3.1
- checksum: e0c4216874b96bd25ddf31a0b61a5613e26cc7afa32379217cf39d3915b0509def3565f5f6968fafdad2894c8bbdbd67d340e84f3634b2a29b950cffb6442d9f
+ checksum: e42e9229fba14732593a718b04cb5e1cfef8254544870997e0ecd9732b189a48e1256e4e5478148ecb47c8511dca2b09eae56b4d0aad8009e6fac8072923cfc9
languageName: node
linkType: hard
@@ -15102,7 +15537,16 @@ __metadata:
languageName: node
linkType: hard
-"qs@npm:6.13.0, qs@npm:^6.4.0":
+"qs@npm:6.11.0":
+ version: 6.11.0
+ resolution: "qs@npm:6.11.0"
+ dependencies:
+ side-channel: ^1.0.4
+ checksum: 6e1f29dd5385f7488ec74ac7b6c92f4d09a90408882d0c208414a34dd33badc1a621019d4c799a3df15ab9b1d0292f97c1dd71dc7c045e69f81a8064e5af7297
+ languageName: node
+ linkType: hard
+
+"qs@npm:6.13.0":
version: 6.13.0
resolution: "qs@npm:6.13.0"
dependencies:
@@ -15111,6 +15555,15 @@ __metadata:
languageName: node
linkType: hard
+"qs@npm:^6.4.0":
+ version: 6.12.3
+ resolution: "qs@npm:6.12.3"
+ dependencies:
+ side-channel: ^1.0.6
+ checksum: 9a9228a623bc36d41648237667d7342fb8d64d1cfeb29e474b0c44591ba06ac507e2d726f60eca5af8dc420e5dd23370af408ef8c28e0405675c7187b736a693
+ languageName: node
+ linkType: hard
+
"queue-microtask@npm:^1.2.2":
version: 1.2.3
resolution: "queue-microtask@npm:1.2.3"
@@ -15156,15 +15609,15 @@ __metadata:
linkType: hard
"qunit@npm:^2.20.0":
- version: 2.22.0
- resolution: "qunit@npm:2.22.0"
+ version: 2.21.1
+ resolution: "qunit@npm:2.21.1"
dependencies:
commander: 7.2.0
node-watch: 0.7.3
tiny-glob: 0.2.9
bin:
qunit: bin/qunit.js
- checksum: 6f659ccdc208418e52cd15ab775e203987fc53e5d2b55ce8071d73ae809f6ebec5fdeb174dadfb4800b7f7f64a27727de3e5783c5188aad5056b793b7323f1d9
+ checksum: 51d7c323ef858847cb4fb8b3466e1a26635cbd1b5dbce69e910bf5c1e6a75710d62e4021bb6dbcc787d955522020e4c54c1a8853bc8dc9e9ef8a4c8cf4b76a07
languageName: node
linkType: hard
@@ -15252,13 +15705,6 @@ __metadata:
languageName: node
linkType: hard
-"readdirp@npm:^4.0.1":
- version: 4.0.2
- resolution: "readdirp@npm:4.0.2"
- checksum: 309376e717f94fb7eb61bec21e2603243a9e2420cd2e9bf94ddf026aefea0d7377ed1a62f016d33265682e44908049a55c3cfc2307450a1421654ea008489b39
- languageName: node
- linkType: hard
-
"readdirp@npm:~3.6.0":
version: 3.6.0
resolution: "readdirp@npm:3.6.0"
@@ -15299,6 +15745,15 @@ __metadata:
languageName: node
linkType: hard
+"reduce-extract@npm:^1.0.0":
+ version: 1.0.0
+ resolution: "reduce-extract@npm:1.0.0"
+ dependencies:
+ test-value: ^1.0.1
+ checksum: 1f36cd01617d2ca2d1b077e75ba6521b5303a35d0b6b9a05ad610321cb6df32f20b740d4686d2de7e12e0564dd650bf4b5784c5dec45f8bead04b58c7a14ae31
+ languageName: node
+ linkType: hard
+
"reduce-flatten@npm:^1.0.1":
version: 1.0.1
resolution: "reduce-flatten@npm:1.0.1"
@@ -15329,12 +15784,12 @@ __metadata:
languageName: node
linkType: hard
-"regenerate-unicode-properties@npm:^10.2.0":
- version: 10.2.0
- resolution: "regenerate-unicode-properties@npm:10.2.0"
+"regenerate-unicode-properties@npm:^10.1.0":
+ version: 10.1.1
+ resolution: "regenerate-unicode-properties@npm:10.1.1"
dependencies:
regenerate: ^1.4.2
- checksum: d5c5fc13f8b8d7e16e791637a4bfef741f8d70e267d51845ee7d5404a32fa14c75b181c4efba33e4bff8b0000a2f13e9773593713dfe5b66597df4259275ce63
+ checksum: b80958ef40f125275824c2c47d5081dfaefebd80bff26c76761e9236767c748a4a95a69c053fe29d2df881177f2ca85df4a71fe70a82360388b31159ef19adcf
languageName: node
linkType: hard
@@ -15368,47 +15823,50 @@ __metadata:
languageName: node
linkType: hard
+"regex-not@npm:^1.0.0, regex-not@npm:^1.0.2":
+ version: 1.0.2
+ resolution: "regex-not@npm:1.0.2"
+ dependencies:
+ extend-shallow: ^3.0.2
+ safe-regex: ^1.1.0
+ checksum: 3081403de79559387a35ef9d033740e41818a559512668cef3d12da4e8a29ef34ee13c8ed1256b07e27ae392790172e8a15c8a06b72962fd4550476cde3d8f77
+ languageName: node
+ linkType: hard
+
"regexp.prototype.flags@npm:^1.5.2":
- version: 1.5.3
- resolution: "regexp.prototype.flags@npm:1.5.3"
+ version: 1.5.2
+ resolution: "regexp.prototype.flags@npm:1.5.2"
dependencies:
- call-bind: ^1.0.7
+ call-bind: ^1.0.6
define-properties: ^1.2.1
es-errors: ^1.3.0
- set-function-name: ^2.0.2
- checksum: 83ff0705b837f7cb6d664010a11642250f36d3f642263dd0f3bdfe8f150261aa7b26b50ee97f21c1da30ef82a580bb5afedbef5f45639d69edaafbeac9bbb0ed
+ set-function-name: ^2.0.1
+ checksum: d7f333667d5c564e2d7a97c56c3075d64c722c9bb51b2b4df6822b2e8096d623a5e63088fb4c83df919b6951ef8113841de8b47de7224872fa6838bc5d8a7d64
languageName: node
linkType: hard
-"regexpu-core@npm:^6.1.1":
- version: 6.1.1
- resolution: "regexpu-core@npm:6.1.1"
+"regexpu-core@npm:^5.3.1":
+ version: 5.3.2
+ resolution: "regexpu-core@npm:5.3.2"
dependencies:
+ "@babel/regjsgen": ^0.8.0
regenerate: ^1.4.2
- regenerate-unicode-properties: ^10.2.0
- regjsgen: ^0.8.0
- regjsparser: ^0.11.0
+ regenerate-unicode-properties: ^10.1.0
+ regjsparser: ^0.9.1
unicode-match-property-ecmascript: ^2.0.0
unicode-match-property-value-ecmascript: ^2.1.0
- checksum: ed8e3784e81b816b237313688f28b4695d30d4e0f823dfdf130fd4313c629ac6ec67650563867a6ca9a2435f33e79f3a5001c651aee52791e346213a948de0ff
- languageName: node
- linkType: hard
-
-"regjsgen@npm:^0.8.0":
- version: 0.8.0
- resolution: "regjsgen@npm:0.8.0"
- checksum: a1d925ff14a4b2be774e45775ee6b33b256f89c42d480e6d85152d2133f18bd3d6af662161b226fa57466f7efec367eaf7ccd2a58c0ec2a1306667ba2ad07b0d
+ checksum: 95bb97088419f5396e07769b7de96f995f58137ad75fac5811fb5fe53737766dfff35d66a0ee66babb1eb55386ef981feaef392f9df6d671f3c124812ba24da2
languageName: node
linkType: hard
-"regjsparser@npm:^0.11.0":
- version: 0.11.2
- resolution: "regjsparser@npm:0.11.2"
+"regjsparser@npm:^0.9.1":
+ version: 0.9.1
+ resolution: "regjsparser@npm:0.9.1"
dependencies:
- jsesc: ~3.0.2
+ jsesc: ~0.5.0
bin:
regjsparser: bin/parser
- checksum: 500ab99d6174aef18b43518f4b1f217192459621b0505ad6e8cbbec8135a83e64491077843b4ad06249a207ffecd6566f3db1895a7c5df98f786b4b0edcc9820
+ checksum: 5e1b76afe8f1d03c3beaf9e0d935dd467589c3625f6d65fb8ffa14f224d783a0fed4bf49c2c1b8211043ef92b6117313419edf055a098ed8342e340586741afc
languageName: node
linkType: hard
@@ -15677,6 +16135,13 @@ __metadata:
languageName: node
linkType: hard
+"resolve-url@npm:^0.2.1":
+ version: 0.2.1
+ resolution: "resolve-url@npm:0.2.1"
+ checksum: 7b7035b9ed6e7bc7d289e90aef1eab5a43834539695dac6416ca6e91f1a94132ae4796bbd173cdacfdc2ade90b5f38a3fb6186bebc1b221cd157777a23b9ad14
+ languageName: node
+ linkType: hard
+
"resolve@npm:^1.10.0, resolve@npm:^1.11.1, resolve@npm:^1.13.1, resolve@npm:^1.14.2, resolve@npm:^1.17.0, resolve@npm:^1.20.0, resolve@npm:^1.22.0, resolve@npm:^1.22.1, resolve@npm:^1.22.2, resolve@npm:^1.22.3, resolve@npm:^1.22.8, resolve@npm:^1.3.3, resolve@npm:^1.4.0, resolve@npm:^1.5.0":
version: 1.22.8
resolution: "resolve@npm:1.22.8"
@@ -15723,6 +16188,13 @@ __metadata:
languageName: node
linkType: hard
+"ret@npm:~0.1.10":
+ version: 0.1.15
+ resolution: "ret@npm:0.1.15"
+ checksum: d76a9159eb8c946586567bd934358dfc08a36367b3257f7a3d7255fdd7b56597235af23c6afa0d7f0254159e8051f93c918809962ebd6df24ca2a83dbe4d4151
+ languageName: node
+ linkType: hard
+
"retry@npm:^0.12.0":
version: 0.12.0
resolution: "retry@npm:0.12.0"
@@ -15767,13 +16239,13 @@ __metadata:
linkType: hard
"rimraf@npm:^5.0.0":
- version: 5.0.10
- resolution: "rimraf@npm:5.0.10"
+ version: 5.0.9
+ resolution: "rimraf@npm:5.0.9"
dependencies:
glob: ^10.3.7
bin:
rimraf: dist/esm/bin.mjs
- checksum: 50e27388dd2b3fa6677385fc1e2966e9157c89c86853b96d02e6915663a96b7ff4d590e14f6f70e90f9b554093aa5dbc05ac3012876be558c06a65437337bc05
+ checksum: e6dd5007e34181e1fa732437499d798035b2f3313887435cb855c5c9055bf9646795fc1c63ef843de830df8577cd9862df2dabf913fe08dcc1758c96de4a4fdb
languageName: node
linkType: hard
@@ -15788,7 +16260,7 @@ __metadata:
languageName: node
linkType: hard
-"rollup-pluginutils@npm:^2.6.0":
+"rollup-pluginutils@npm:^2.6.0, rollup-pluginutils@npm:^2.8.1":
version: 2.8.2
resolution: "rollup-pluginutils@npm:2.8.2"
dependencies:
@@ -15819,14 +16291,14 @@ __metadata:
linkType: hard
"router_js@npm:^8.0.3":
- version: 8.0.6
- resolution: "router_js@npm:8.0.6"
+ version: 8.0.5
+ resolution: "router_js@npm:8.0.5"
dependencies:
"@glimmer/env": ^0.1.7
peerDependencies:
route-recognizer: ^0.3.4
rsvp: ^4.8.5
- checksum: b2c32cf7a1606c27438b851c5597b2a87f0f64ff98a3217109e12c36136879fe7f7b137d293c128fb4f3fd02a46ef71fadef411eae883e1af477319904b6ac17
+ checksum: f232e86768ce28b4638a2006f6134099c393ead0f3faf60d5fad19917166866a4903d8cb31670154fd7a623f73b050f28a54b817c558f806cc411f0044ef6b99
languageName: node
linkType: hard
@@ -15943,10 +16415,19 @@ __metadata:
languageName: node
linkType: hard
+"safe-regex@npm:^1.1.0":
+ version: 1.1.0
+ resolution: "safe-regex@npm:1.1.0"
+ dependencies:
+ ret: ~0.1.10
+ checksum: 9a8bba57c87a841f7997b3b951e8e403b1128c1a4fd1182f40cc1a20e2d490593d7c2a21030fadfea320c8e859219019e136f678c6689ed5960b391b822f01d5
+ languageName: node
+ linkType: hard
+
"safe-stable-stringify@npm:^2.2.0, safe-stable-stringify@npm:^2.4.3":
- version: 2.5.0
- resolution: "safe-stable-stringify@npm:2.5.0"
- checksum: d3ce103ed43c6c2f523e39607208bfb1c73aa48179fc5be53c3aa97c118390bffd4d55e012f5393b982b65eb3e0ee954dd57b547930d3f242b0053dcdb923d17
+ version: 2.4.3
+ resolution: "safe-stable-stringify@npm:2.4.3"
+ checksum: 3aeb64449706ee1f5ad2459fc99648b131d48e7a1fbb608d7c628020177512dc9d94108a5cb61bbc953985d313d0afea6566d243237743e02870490afef04b43
languageName: node
linkType: hard
@@ -15996,19 +16477,15 @@ __metadata:
linkType: hard
"sass@npm:^1.66.3, sass@npm:^1.69.5":
- version: 1.80.6
- resolution: "sass@npm:1.80.6"
+ version: 1.77.8
+ resolution: "sass@npm:1.77.8"
dependencies:
- "@parcel/watcher": ^2.4.1
- chokidar: ^4.0.0
+ chokidar: ">=3.0.0 <4.0.0"
immutable: ^4.0.0
source-map-js: ">=0.6.2 <2.0.0"
- dependenciesMeta:
- "@parcel/watcher":
- optional: true
bin:
sass: sass.js
- checksum: 1a81e0fe093ff9109228b5dc1ea2ad64d3fb0e266d968e664a52aca059dd448fbe9a90d9a5a7518e5f31fd1087149ea349bf6fe1271ab15075b7a145c1bea0c9
+ checksum: 6b5dce17faa1bd1e349b4825bf7f76559a32f3f95d789cd2847623c88ee9635e1485d3458532a05fa5b9134cfbce79a4bad3f13dc63c2433632347674db0abae
languageName: node
linkType: hard
@@ -16078,7 +16555,7 @@ __metadata:
languageName: node
linkType: hard
-"semver@npm:^7.0.0, semver@npm:^7.3.2, semver@npm:^7.3.4, semver@npm:^7.3.5, semver@npm:^7.3.7, semver@npm:^7.3.8, semver@npm:^7.5.2, semver@npm:^7.5.3, semver@npm:^7.5.4, semver@npm:^7.6.3":
+"semver@npm:^7.0.0, semver@npm:^7.3.2, semver@npm:^7.3.4, semver@npm:^7.3.5, semver@npm:^7.3.7, semver@npm:^7.3.8, semver@npm:^7.5.2, semver@npm:^7.5.3, semver@npm:^7.5.4":
version: 7.6.3
resolution: "semver@npm:7.6.3"
bin:
@@ -16087,9 +16564,9 @@ __metadata:
languageName: node
linkType: hard
-"send@npm:0.19.0":
- version: 0.19.0
- resolution: "send@npm:0.19.0"
+"send@npm:0.18.0":
+ version: 0.18.0
+ resolution: "send@npm:0.18.0"
dependencies:
debug: 2.6.9
depd: 2.0.0
@@ -16104,7 +16581,7 @@ __metadata:
on-finished: 2.4.1
range-parser: ~1.2.1
statuses: 2.0.1
- checksum: 5ae11bd900c1c2575525e2aa622e856804e2f96a09281ec1e39610d089f53aa69e13fd8db84b52f001d0318cf4bb0b3b904ad532fc4c0014eb90d32db0cff55f
+ checksum: 74fc07ebb58566b87b078ec63e5a3e41ecd987e4272ba67b7467e86c6ad51bc6b0b0154133b6d8b08a2ddda360464f71382f7ef864700f34844a76c8027817a8
languageName: node
linkType: hard
@@ -16117,15 +16594,15 @@ __metadata:
languageName: node
linkType: hard
-"serve-static@npm:1.16.2":
- version: 1.16.2
- resolution: "serve-static@npm:1.16.2"
+"serve-static@npm:1.15.0":
+ version: 1.15.0
+ resolution: "serve-static@npm:1.15.0"
dependencies:
- encodeurl: ~2.0.0
+ encodeurl: ~1.0.2
escape-html: ~1.0.3
parseurl: ~1.3.3
- send: 0.19.0
- checksum: dffc52feb4cc5c68e66d0c7f3c1824d4e989f71050aefc9bd5f822a42c54c9b814f595fc5f2b717f4c7cc05396145f3e90422af31186a93f76cf15f707019759
+ send: 0.18.0
+ checksum: af57fc13be40d90a12562e98c0b7855cf6e8bd4c107fe9a45c212bf023058d54a1871b1c89511c3958f70626fff47faeb795f5d83f8cf88514dbaeb2b724464d
languageName: node
linkType: hard
@@ -16162,6 +16639,18 @@ __metadata:
languageName: node
linkType: hard
+"set-value@npm:^2.0.0, set-value@npm:^2.0.1":
+ version: 2.0.1
+ resolution: "set-value@npm:2.0.1"
+ dependencies:
+ extend-shallow: ^2.0.1
+ is-extendable: ^0.1.1
+ is-plain-object: ^2.0.3
+ split-string: ^3.0.1
+ checksum: 09a4bc72c94641aeae950eb60dc2755943b863780fcc32e441eda964b64df5e3f50603d5ebdd33394ede722528bd55ed43aae26e9df469b4d32e2292b427b601
+ languageName: node
+ linkType: hard
+
"setprototypeof@npm:1.1.0":
version: 1.1.0
resolution: "setprototypeof@npm:1.1.0"
@@ -16338,6 +16827,22 @@ __metadata:
languageName: node
linkType: hard
+"snapdragon@npm:^0.8.1":
+ version: 0.8.2
+ resolution: "snapdragon@npm:0.8.2"
+ dependencies:
+ base: ^0.11.1
+ debug: ^2.2.0
+ define-property: ^0.2.5
+ extend-shallow: ^2.0.1
+ map-cache: ^0.2.2
+ source-map: ^0.5.6
+ source-map-resolve: ^0.5.0
+ use: ^3.1.0
+ checksum: a197f242a8f48b11036563065b2487e9b7068f50a20dd81d9161eca6af422174fc158b8beeadbe59ce5ef172aa5718143312b3aebaae551c124b7824387c8312
+ languageName: node
+ linkType: hard
+
"socket.io-adapter@npm:~2.5.2":
version: 2.5.5
resolution: "socket.io-adapter@npm:2.5.5"
@@ -16359,17 +16864,17 @@ __metadata:
linkType: hard
"socket.io@npm:^4.6.2":
- version: 4.8.1
- resolution: "socket.io@npm:4.8.1"
+ version: 4.7.5
+ resolution: "socket.io@npm:4.7.5"
dependencies:
accepts: ~1.3.4
base64id: ~2.0.0
cors: ~2.8.5
debug: ~4.3.2
- engine.io: ~6.6.0
+ engine.io: ~6.5.2
socket.io-adapter: ~2.5.2
socket.io-parser: ~4.2.4
- checksum: d5e4d7eabba7a04c0d130a7b34c57050a1b4694e5b9eb9bd0a40dd07c1d635f3d5cacc15442f6135be8b2ecdad55dad08ee576b5c74864508890ff67329722fa
+ checksum: b8b57216152cf230bdcb77b5450e124ebe1fee7482eeb50a6ef760b69f2f5a064e9b8640ce9c1efc5c9e081f5d797d3f6ff3f81606e19ddaf5d4114aad9ec7d3
languageName: node
linkType: hard
@@ -16394,18 +16899,13 @@ __metadata:
languageName: node
linkType: hard
-"sort-array@npm:^5.0.0":
- version: 5.0.0
- resolution: "sort-array@npm:5.0.0"
+"sort-array@npm:^4.1.5":
+ version: 4.1.5
+ resolution: "sort-array@npm:4.1.5"
dependencies:
- array-back: ^6.2.2
- typical: ^7.1.1
- peerDependencies:
- "@75lb/nature": ^0.1.1
- peerDependenciesMeta:
- "@75lb/nature":
- optional: true
- checksum: b14748cfa091143432e9a84e2bb1478235a63ba38fa3b5d8da31441b0d7183588f2d61454428a4349fb5f912c189c712d5eecebf3b7101da03ecd6383dc2c5c7
+ array-back: ^5.0.0
+ typical: ^6.0.1
+ checksum: ffaf7c255988af15ec08d78347743d7414ad495b94cd32d201be887b63258b453c9bda240d1c388bd71b46922db307ff7dbc988e717d3ded7eb20513698ea178
languageName: node
linkType: hard
@@ -16432,10 +16932,23 @@ __metadata:
languageName: node
linkType: hard
-"source-map-js@npm:>=0.6.2 <2.0.0, source-map-js@npm:^1.0.1, source-map-js@npm:^1.2.1":
- version: 1.2.1
- resolution: "source-map-js@npm:1.2.1"
- checksum: 4eb0cd997cdf228bc253bcaff9340afeb706176e64868ecd20efbe6efea931465f43955612346d6b7318789e5265bdc419bc7669c1cebe3db0eb255f57efa76b
+"source-map-js@npm:>=0.6.2 <2.0.0, source-map-js@npm:^1.0.1, source-map-js@npm:^1.2.0":
+ version: 1.2.0
+ resolution: "source-map-js@npm:1.2.0"
+ checksum: 791a43306d9223792e84293b00458bf102a8946e7188f3db0e4e22d8d530b5f80a4ce468eb5ec0bf585443ad55ebbd630bf379c98db0b1f317fd902500217f97
+ languageName: node
+ linkType: hard
+
+"source-map-resolve@npm:^0.5.0":
+ version: 0.5.3
+ resolution: "source-map-resolve@npm:0.5.3"
+ dependencies:
+ atob: ^2.1.2
+ decode-uri-component: ^0.2.0
+ resolve-url: ^0.2.1
+ source-map-url: ^0.4.0
+ urix: ^0.1.0
+ checksum: c73fa44ac00783f025f6ad9e038ab1a2e007cd6a6b86f47fe717c3d0765b4a08d264f6966f3bd7cd9dbcd69e4832783d5472e43247775b2a550d6f2155d24bae
languageName: node
linkType: hard
@@ -16472,7 +16985,7 @@ __metadata:
languageName: node
linkType: hard
-"source-map@npm:^0.5.3":
+"source-map@npm:^0.5.3, source-map@npm:^0.5.6":
version: 0.5.7
resolution: "source-map@npm:0.5.7"
checksum: 5dc2043b93d2f194142c7f38f74a24670cd7a0063acdaf4bf01d2964b402257ae843c2a8fa822ad5b71013b5fcafa55af7421383da919752f22ff488bc553f4d
@@ -16542,9 +17055,18 @@ __metadata:
linkType: hard
"spdx-license-ids@npm:^3.0.0":
- version: 3.0.20
- resolution: "spdx-license-ids@npm:3.0.20"
- checksum: 0c57750bedbcff48f3d0e266fbbdaf0aab54217e182f669542ffe0b5a902dce69e8cdfa126a131e1ddd39a9bef4662e357b2b41315d7240b4a28c0a7e782bb40
+ version: 3.0.18
+ resolution: "spdx-license-ids@npm:3.0.18"
+ checksum: 457825df5dd1fc0135b0bb848c896143f70945cc2da148afc71c73ed0837d1d651f809006e406d82109c9dd71a8cb39785a3604815fe46bc0548e9d3976f6b69
+ languageName: node
+ linkType: hard
+
+"split-string@npm:^3.0.1":
+ version: 3.1.0
+ resolution: "split-string@npm:3.1.0"
+ dependencies:
+ extend-shallow: ^3.0.0
+ checksum: ae5af5c91bdc3633628821bde92fdf9492fa0e8a63cf6a0376ed6afde93c701422a1610916f59be61972717070119e848d10dfbbd5024b7729d6a71972d2a84c
languageName: node
linkType: hard
@@ -16594,6 +17116,16 @@ __metadata:
languageName: node
linkType: hard
+"static-extend@npm:^0.1.1":
+ version: 0.1.2
+ resolution: "static-extend@npm:0.1.2"
+ dependencies:
+ define-property: ^0.2.5
+ object-copy: ^0.1.0
+ checksum: 8657485b831f79e388a437260baf22784540417a9b29e11572c87735df24c22b84eda42107403a64b30861b2faf13df9f7fc5525d51f9d1d2303aba5cbf4e12c
+ languageName: node
+ linkType: hard
+
"statuses@npm:2.0.1":
version: 2.0.1
resolution: "statuses@npm:2.0.1"
@@ -17037,12 +17569,12 @@ __metadata:
linkType: hard
"supports-hyperlinks@npm:^3.0.0":
- version: 3.1.0
- resolution: "supports-hyperlinks@npm:3.1.0"
+ version: 3.0.0
+ resolution: "supports-hyperlinks@npm:3.0.0"
dependencies:
has-flag: ^4.0.0
supports-color: ^7.0.0
- checksum: 051ffc31ae0d3334502decb6a17170ff89d870094d6835d93dfb2cda03e2a4504bf861a0954942af5e65fdd038b81cef5998696d0f4f4ff5f5bd3e40c7981874
+ checksum: 41021305de5255b10d821bf93c7a781f783e1693d0faec293d7fc7ccf17011b90bde84b0295fa92ba75c6c390351fe84fdd18848cad4bf656e464a958243c3e7
languageName: node
linkType: hard
@@ -17084,11 +17616,9 @@ __metadata:
linkType: hard
"swagger-ui-dist@npm:^5.9.0":
- version: 5.18.2
- resolution: "swagger-ui-dist@npm:5.18.2"
- dependencies:
- "@scarf/scarf": =1.4.0
- checksum: 4e8f3e4669276f421f91ce6214f9df09a5b0f9aa1b636e9044076af4f3a02b093d1eb43ed1b3962aba08be803692e71dfc1ce0c28c2057b9eb0c35b166d7ef42
+ version: 5.17.14
+ resolution: "swagger-ui-dist@npm:5.17.14"
+ checksum: e10f8068e370fb17cf6882c8d8b925044862ea74b67296c6d97eef42a904eed3e9ed21867cc0458cbf0de7bd2a49e79282a1a3b7e6a1ccdbba1b650d86b528bb
languageName: node
linkType: hard
@@ -17126,12 +17656,12 @@ __metadata:
linkType: hard
"synckit@npm:^0.9.1":
- version: 0.9.2
- resolution: "synckit@npm:0.9.2"
+ version: 0.9.1
+ resolution: "synckit@npm:0.9.1"
dependencies:
"@pkgr/core": ^0.1.0
tslib: ^2.6.2
- checksum: 3a30e828efbdcf3b50fccab4da6e90ea7ca24d8c5c2ad3ffe98e07d7c492df121e0f75227c6e510f96f976aae76f1fa4710cb7b1d69db881caf66ef9de89360e
+ checksum: 4042941a4d939675f1d7b01124b8405b6ac616f3e3f396d00e46c67f38d0d5b7f9a1de05bc7ceea4ce80d967b450cfa2460e5f6aca81f7cea8f1a28be9392985
languageName: node
linkType: hard
@@ -17219,7 +17749,7 @@ __metadata:
languageName: node
linkType: hard
-"terser-webpack-plugin@npm:^5.3.10":
+"terser-webpack-plugin@npm:^5.3.7":
version: 5.3.10
resolution: "terser-webpack-plugin@npm:5.3.10"
dependencies:
@@ -17255,8 +17785,8 @@ __metadata:
linkType: hard
"terser@npm:^5.26.0, terser@npm:^5.7.0":
- version: 5.36.0
- resolution: "terser@npm:5.36.0"
+ version: 5.31.3
+ resolution: "terser@npm:5.31.3"
dependencies:
"@jridgewell/source-map": ^0.3.3
acorn: ^8.8.2
@@ -17264,7 +17794,17 @@ __metadata:
source-map-support: ~0.5.20
bin:
terser: bin/terser
- checksum: 489afd31901a2b170f7766948a3aa0e25da0acb41e9e35bd9f9b4751dfa2fc846e485f6fb9d34f0839a96af77f675b5fbf0a20c9aa54e0b8d7c219cf0b55e508
+ checksum: cb4ccd5cb42c719272959dcae63d41e4696fb304123392943282caa6dfcdc49f94e7c48353af8bcd4fbc34457b240b7f843db7fec21bb2bdc18e01d4f45b035e
+ languageName: node
+ linkType: hard
+
+"test-value@npm:^1.0.1":
+ version: 1.1.0
+ resolution: "test-value@npm:1.1.0"
+ dependencies:
+ array-back: ^1.0.2
+ typical: ^2.4.2
+ checksum: 7c02d5228057e8a62fdca9e67f370b4d57aebfb01bbb92cc6a2a108b9a97528d15f8c76faf9c8c7ce4e152e2c30832ebb1067ebeeadd89fb2f65b384dbf1a881
languageName: node
linkType: hard
@@ -17289,8 +17829,8 @@ __metadata:
linkType: hard
"testem@npm:^3.10.1":
- version: 3.15.2
- resolution: "testem@npm:3.15.2"
+ version: 3.15.0
+ resolution: "testem@npm:3.15.0"
dependencies:
"@xmldom/xmldom": ^0.8.0
backbone: ^1.1.2
@@ -17305,7 +17845,11 @@ __metadata:
glob: ^7.0.4
http-proxy: ^1.13.1
js-yaml: ^3.2.5
- lodash: ^4.17.21
+ lodash.assignin: ^4.1.0
+ lodash.castarray: ^4.4.0
+ lodash.clonedeep: ^4.4.1
+ lodash.find: ^4.5.1
+ lodash.uniqby: ^4.7.0
mkdirp: ^3.0.1
mustache: ^4.2.0
node-notifier: ^10.0.0
@@ -17319,7 +17863,7 @@ __metadata:
tmp: 0.0.33
bin:
testem: testem.js
- checksum: 7fec8b3df50907a5d600cd12f23803147e62dbb3370560fe73114e0398bb0ff41c6b863b01da868d2a28c1700d5f7c3fef9ff66d04dd4aed1b30b0ec19c1e096
+ checksum: 46f348a71f07b27c61ef910c4800472c0c1fd2caafc49948b851bea18fef88a1a919916d7352972b7618fdfbe2f829f4d9225f665c80085c996595f8f30c1c47
languageName: node
linkType: hard
@@ -17442,6 +17986,22 @@ __metadata:
languageName: node
linkType: hard
+"to-fast-properties@npm:^2.0.0":
+ version: 2.0.0
+ resolution: "to-fast-properties@npm:2.0.0"
+ checksum: be2de62fe58ead94e3e592680052683b1ec986c72d589e7b21e5697f8744cdbf48c266fa72f6c15932894c10187b5f54573a3bcf7da0bfd964d5caf23d436168
+ languageName: node
+ linkType: hard
+
+"to-object-path@npm:^0.3.0":
+ version: 0.3.0
+ resolution: "to-object-path@npm:0.3.0"
+ dependencies:
+ kind-of: ^3.0.2
+ checksum: 9425effee5b43e61d720940fa2b889623f77473d459c2ce3d4a580a4405df4403eec7be6b857455908070566352f9e2417304641ed158dda6f6a365fe3e66d70
+ languageName: node
+ linkType: hard
+
"to-regex-range@npm:^5.0.1":
version: 5.0.1
resolution: "to-regex-range@npm:5.0.1"
@@ -17451,6 +18011,18 @@ __metadata:
languageName: node
linkType: hard
+"to-regex@npm:^3.0.1, to-regex@npm:^3.0.2":
+ version: 3.0.2
+ resolution: "to-regex@npm:3.0.2"
+ dependencies:
+ define-property: ^2.0.2
+ extend-shallow: ^3.0.2
+ regex-not: ^1.0.2
+ safe-regex: ^1.1.0
+ checksum: 4ed4a619059b64e204aad84e4e5f3ea82d97410988bcece7cf6cbfdbf193d11bff48cf53842d88b8bb00b1bfc0d048f61f20f0709e6f393fd8fe0122662d9db4
+ languageName: node
+ linkType: hard
+
"to-vfile@npm:^6.1.0":
version: 6.1.0
resolution: "to-vfile@npm:6.1.0"
@@ -17491,13 +18063,13 @@ __metadata:
linkType: hard
"traverse@npm:^0.6.7":
- version: 0.6.10
- resolution: "traverse@npm:0.6.10"
+ version: 0.6.9
+ resolution: "traverse@npm:0.6.9"
dependencies:
gopd: ^1.0.1
typedarray.prototype.slice: ^1.0.3
which-typed-array: ^1.1.15
- checksum: ff25d30726db4867c01ff1f1bd8a5e3356b920c4d674ddf6c3764179bb54766cf1ad0158bbd65667e1f5fbde2d4efbd814d7b24d44149cc31255f0cfe2ab2095
+ checksum: e2f4b46caf849b6ea9006230995edc7376c1361f33c2110f425339a814b71b968f5c84a130ae21b4300d1849fff42cec6117c2aebde8a68d33c6871e9621a80f
languageName: node
linkType: hard
@@ -17558,9 +18130,9 @@ __metadata:
linkType: hard
"tslib@npm:^2.0.3, tslib@npm:^2.1.0, tslib@npm:^2.4.0, tslib@npm:^2.4.1, tslib@npm:^2.6.1, tslib@npm:^2.6.2":
- version: 2.8.1
- resolution: "tslib@npm:2.8.1"
- checksum: e4aba30e632b8c8902b47587fd13345e2827fa639e7c3121074d5ee0880723282411a8838f830b55100cbe4517672f84a2472667d355b81e8af165a55dc6203a
+ version: 2.6.3
+ resolution: "tslib@npm:2.6.3"
+ checksum: 74fce0e100f1ebd95b8995fbbd0e6c91bdd8f4c35c00d4da62e285a3363aaa534de40a80db30ecfd388ed7c313c42d930ee0eaf108e8114214b180eec3dbe6f5
languageName: node
linkType: hard
@@ -17584,20 +18156,13 @@ __metadata:
languageName: node
linkType: hard
-"type-detect@npm:4.0.8":
+"type-detect@npm:4.0.8, type-detect@npm:^4.0.8":
version: 4.0.8
resolution: "type-detect@npm:4.0.8"
checksum: 62b5628bff67c0eb0b66afa371bd73e230399a8d2ad30d852716efcc4656a7516904570cd8631a49a3ce57c10225adf5d0cbdcb47f6b0255fe6557c453925a15
languageName: node
linkType: hard
-"type-detect@npm:^4.1.0":
- version: 4.1.0
- resolution: "type-detect@npm:4.1.0"
- checksum: 3b32f873cd02bc7001b00a61502b7ddc4b49278aabe68d652f732e1b5d768c072de0bc734b427abf59d0520a5f19a2e07309ab921ef02018fa1cb4af155cdb37
- languageName: node
- linkType: hard
-
"type-fest@npm:^0.11.0":
version: 0.11.0
resolution: "type-fest@npm:0.11.0"
@@ -17729,12 +18294,12 @@ __metadata:
linkType: hard
"typescript@npm:^5.4.5":
- version: 5.6.3
- resolution: "typescript@npm:5.6.3"
+ version: 5.5.3
+ resolution: "typescript@npm:5.5.3"
bin:
tsc: bin/tsc
tsserver: bin/tsserver
- checksum: ba302f8822777ebefb28b554105f3e074466b671e7444ec6b75dadc008a62f46f373d9e57ceced1c433756d06c8b7dc569a7eefdf3a9573122a49205ff99021a
+ checksum: 4b4f14313484d5c86064d04ba892544801fa551f5cf72719b540b498056fec7fc192d0bbdb2ba1448e759b1548769956da9e43e7c16781e8d8856787b0575004
languageName: node
linkType: hard
@@ -17749,16 +18314,16 @@ __metadata:
linkType: hard
"typescript@patch:typescript@^5.4.5#~builtin":
- version: 5.6.3
- resolution: "typescript@patch:typescript@npm%3A5.6.3#~builtin::version=5.6.3&hash=85af82"
+ version: 5.5.3
+ resolution: "typescript@patch:typescript@npm%3A5.5.3#~builtin::version=5.5.3&hash=85af82"
bin:
tsc: bin/tsc
tsserver: bin/tsserver
- checksum: ade87bce2363ee963eed0e4ca8a312ea02c81873ebd53609bc3f6dc0a57f6e61ad7e3fb8cbb7f7ab8b5081cbee801b023f7c4823ee70b1c447eae050e6c7622b
+ checksum: 6853be4607706cc1ad2f16047cf1cd72d39f79acd5f9716e1d23bc0e462c7f59be7458fe58a21665e7657a05433d7ab8419d093a5a4bd5f3a33f879b35d2769b
languageName: node
linkType: hard
-"typical@npm:^2.6.0, typical@npm:^2.6.1":
+"typical@npm:^2.4.2, typical@npm:^2.6.0, typical@npm:^2.6.1":
version: 2.6.1
resolution: "typical@npm:2.6.1"
checksum: 6af04fefe50d90d3471f058b2cdc0f49b7436bdd605cd00acea7965926ff388a5a7d692ef144f45fccee6f8e896c065702ecc44b69057e2ce88c09e897c7d3a4
@@ -17772,10 +18337,10 @@ __metadata:
languageName: node
linkType: hard
-"typical@npm:^7.1.1":
- version: 7.2.0
- resolution: "typical@npm:7.2.0"
- checksum: 8d268941404831632666f6c76f64a86bec9fa957abecc903de659b0a8eb2cdca5c4dbb8e0e93bb404f46bf61e18811b73cf50f92af1da760db7ed7e5697df645
+"typical@npm:^6.0.1":
+ version: 6.0.1
+ resolution: "typical@npm:6.0.1"
+ checksum: 4eae0d3a964150a09f709b8ceed2e2800f10525f66e58212555aadf0339b16c524e6d0c4b259541ac10e8a21f5135b5a2e99a2a39be755122b19a4ecf9fa8f8c
languageName: node
linkType: hard
@@ -17794,11 +18359,11 @@ __metadata:
linkType: hard
"uglify-js@npm:^3.1.4":
- version: 3.19.3
- resolution: "uglify-js@npm:3.19.3"
+ version: 3.19.0
+ resolution: "uglify-js@npm:3.19.0"
bin:
uglifyjs: bin/uglifyjs
- checksum: 7ed6272fba562eb6a3149cfd13cda662f115847865c03099e3995a0e7a910eba37b82d4fccf9e88271bb2bcbe505bb374967450f433c17fa27aa36d94a8d0553
+ checksum: 23dc4778a9c5b5252888f3871e34b4a5e69ccc92e0febd9598c82cb559a7d550244ebc3f10eb0af0586c7cc34afe8be99d1581d9fcd36e3bed219d28d0fd3452
languageName: node
linkType: hard
@@ -17825,30 +18390,23 @@ __metadata:
linkType: hard
"underscore@npm:^1.12.1":
- version: 1.13.7
- resolution: "underscore@npm:1.13.7"
- checksum: 174b011af29e4fbe2c70eb2baa8bfab0d0336cf2f5654f364484967bc6264a86224d0134b9176e4235c8cceae00d11839f0fd4824268de04b11c78aca1241684
- languageName: node
- linkType: hard
-
-"undici-types@npm:~6.19.8":
- version: 6.19.8
- resolution: "undici-types@npm:6.19.8"
- checksum: de51f1b447d22571cf155dfe14ff6d12c5bdaec237c765085b439c38ca8518fc360e88c70f99469162bf2e14188a7b0bcb06e1ed2dc031042b984b0bb9544017
+ version: 1.13.6
+ resolution: "underscore@npm:1.13.6"
+ checksum: d5cedd14a9d0d91dd38c1ce6169e4455bb931f0aaf354108e47bd46d3f2da7464d49b2171a5cf786d61963204a42d01ea1332a903b7342ad428deaafaf70ec36
languageName: node
linkType: hard
-"undici@npm:^6.19.5":
- version: 6.20.1
- resolution: "undici@npm:6.20.1"
- checksum: 3bb1405b406fa0e913ff4ec6fd310c9b4d950b7064ba5949b2f616c1f13070d26f5558aefb4b56b2eafb555925443ce44cb801e143d2417ecf12ddf8d5c05cf6
+"undici-types@npm:~5.26.4":
+ version: 5.26.5
+ resolution: "undici-types@npm:5.26.5"
+ checksum: 3192ef6f3fd5df652f2dc1cd782b49d6ff14dc98e5dced492aa8a8c65425227da5da6aafe22523c67f035a272c599bb89cfe803c1db6311e44bed3042fc25487
languageName: node
linkType: hard
"unicode-canonical-property-names-ecmascript@npm:^2.0.0":
- version: 2.0.1
- resolution: "unicode-canonical-property-names-ecmascript@npm:2.0.1"
- checksum: 3c3dabdb1d22aef4904399f9e810d0b71c0b12b3815169d96fac97e56d5642840c6071cf709adcace2252bc6bb80242396c2ec74b37224eb015c5f7aca40bad7
+ version: 2.0.0
+ resolution: "unicode-canonical-property-names-ecmascript@npm:2.0.0"
+ checksum: 39be078afd014c14dcd957a7a46a60061bc37c4508ba146517f85f60361acf4c7539552645ece25de840e17e293baa5556268d091ca6762747fdd0c705001a45
languageName: node
linkType: hard
@@ -17863,9 +18421,9 @@ __metadata:
linkType: hard
"unicode-match-property-value-ecmascript@npm:^2.1.0":
- version: 2.2.0
- resolution: "unicode-match-property-value-ecmascript@npm:2.2.0"
- checksum: 9e3151e1d0bc6be35c4cef105e317c04090364173e8462005b5cde08a1e7c858b6586486cfebac39dc2c6c8c9ee24afb245de6d527604866edfa454fe2a35fae
+ version: 2.1.0
+ resolution: "unicode-match-property-value-ecmascript@npm:2.1.0"
+ checksum: 8d6f5f586b9ce1ed0e84a37df6b42fdba1317a05b5df0c249962bd5da89528771e2d149837cad11aa26bcb84c35355cb9f58a10c3d41fa3b899181ece6c85220
languageName: node
linkType: hard
@@ -17897,6 +18455,18 @@ __metadata:
languageName: node
linkType: hard
+"union-value@npm:^1.0.0":
+ version: 1.0.1
+ resolution: "union-value@npm:1.0.1"
+ dependencies:
+ arr-union: ^3.1.0
+ get-value: ^2.0.6
+ is-extendable: ^0.1.1
+ set-value: ^2.0.1
+ checksum: a3464097d3f27f6aa90cf103ed9387541bccfc006517559381a10e0dffa62f465a9d9a09c9b9c3d26d0f4cbe61d4d010e2fbd710fd4bf1267a768ba8a774b0ba
+ languageName: node
+ linkType: hard
+
"unique-filename@npm:^3.0.0":
version: 3.0.0
resolution: "unique-filename@npm:3.0.0"
@@ -18020,6 +18590,16 @@ __metadata:
languageName: node
linkType: hard
+"unset-value@npm:^1.0.0":
+ version: 1.0.0
+ resolution: "unset-value@npm:1.0.0"
+ dependencies:
+ has-value: ^0.3.1
+ isobject: ^3.0.0
+ checksum: 5990ecf660672be2781fc9fb322543c4aa592b68ed9a3312fa4df0e9ba709d42e823af090fc8f95775b4cd2c9a5169f7388f0cec39238b6d0d55a69fc2ab6b29
+ languageName: node
+ linkType: hard
+
"upath@npm:^2.0.1":
version: 2.0.1
resolution: "upath@npm:2.0.1"
@@ -18027,17 +18607,17 @@ __metadata:
languageName: node
linkType: hard
-"update-browserslist-db@npm:^1.1.1":
- version: 1.1.1
- resolution: "update-browserslist-db@npm:1.1.1"
+"update-browserslist-db@npm:^1.1.0":
+ version: 1.1.0
+ resolution: "update-browserslist-db@npm:1.1.0"
dependencies:
- escalade: ^3.2.0
- picocolors: ^1.1.0
+ escalade: ^3.1.2
+ picocolors: ^1.0.1
peerDependencies:
browserslist: ">= 4.21.0"
bin:
update-browserslist-db: cli.js
- checksum: 2ea11bd2562122162c3e438d83a1f9125238c0844b6d16d366e3276d0c0acac6036822dc7df65fc5a89c699cdf9f174acf439c39bedf3f9a2f3983976e4b4c3e
+ checksum: 7b74694d96f0c360f01b702e72353dc5a49df4fe6663d3ee4e5c628f061576cddf56af35a3a886238c01dd3d8f231b7a86a8ceaa31e7a9220ae31c1c1238e562
languageName: node
linkType: hard
@@ -18057,6 +18637,20 @@ __metadata:
languageName: node
linkType: hard
+"urix@npm:^0.1.0":
+ version: 0.1.0
+ resolution: "urix@npm:0.1.0"
+ checksum: 4c076ecfbf3411e888547fe844e52378ab5ada2d2f27625139011eada79925e77f7fbf0e4016d45e6a9e9adb6b7e64981bd49b22700c7c401c5fc15f423303b3
+ languageName: node
+ linkType: hard
+
+"use@npm:^3.1.0":
+ version: 3.1.1
+ resolution: "use@npm:3.1.1"
+ checksum: 08a130289f5238fcbf8f59a18951286a6e660d17acccc9d58d9b69dfa0ee19aa038e8f95721b00b432c36d1629a9e32a464bf2e7e0ae6a244c42ddb30bdd8b33
+ languageName: node
+ linkType: hard
+
"username-sync@npm:^1.0.2":
version: 1.0.3
resolution: "username-sync@npm:1.0.3"
@@ -18173,6 +18767,7 @@ __metadata:
"@babel/preset-env": ^7.24.6
"@babel/preset-typescript": ^7.24.6
"@docfy/ember": ^0.8.5
+ "@ember-data/legacy-compat": ~4.12.4
"@ember/legacy-built-in-components": ^0.4.1
"@ember/optional-features": ^2.0.0
"@ember/render-modifiers": ^1.0.2
@@ -18181,7 +18776,7 @@ __metadata:
"@ember/test-waiters": ^3.1.0
"@glimmer/component": ^1.1.2
"@glimmer/tracking": ^1.1.2
- "@hashicorp/design-system-components": ~4.13.0
+ "@hashicorp/design-system-components": ~4.7.0
"@hashicorp/ember-flight-icons": ^5.1.3
"@icholy/duration": ^5.1.0
"@lineal-viz/lineal": ^0.5.1
@@ -18215,7 +18810,7 @@ __metadata:
ember-a11y-testing: ^7.0.1
ember-auto-import: ^2.7.2
ember-basic-dropdown: ^8.0.4
- ember-cli: ~5.8.0
+ ember-cli: ~5.4.2
ember-cli-babel: ^8.2.0
ember-cli-clean-css: ^3.0.0
ember-cli-content-security-policy: 2.0.3
@@ -18232,7 +18827,7 @@ __metadata:
ember-cli-terser: ^4.0.2
ember-composable-helpers: 5.0.0
ember-concurrency: ^4.0.2
- ember-data: ~5.3.2
+ ember-data: ~4.12.4
ember-engines: 0.8.23
ember-exam: ^9.0.0
ember-inflector: 4.0.2
@@ -18245,11 +18840,12 @@ __metadata:
ember-qunit: ^8.0.1
ember-resolver: ^11.0.1
ember-responsive: 5.0.0
+ ember-router-helpers: ^0.4.0
ember-service-worker: "meirish/ember-service-worker#configurable-scope"
ember-sinon-qunit: ^7.4.0
- ember-source: ~5.8.0
+ ember-source: ~5.4.0
ember-style-modifier: ^4.1.0
- ember-svg-jar: 2.6.0
+ ember-svg-jar: 2.4.4
ember-template-lint: ^6.0.0
ember-template-lint-plugin-prettier: ^5.0.0
ember-test-selectors: 6.0.0
@@ -18296,7 +18892,7 @@ __metadata:
typescript: ^5.4.5
uuid: ^9.0.0
walk-sync: ^2.0.2
- webpack: 5.94.0
+ webpack: 5.89.0
languageName: unknown
linkType: soft
@@ -18347,9 +18943,9 @@ __metadata:
linkType: hard
"walk-back@npm:^5.1.0":
- version: 5.1.1
- resolution: "walk-back@npm:5.1.1"
- checksum: c976482ff419cd9b2ae256d6eec4a9d9c131656c6cbf316acbe3372feee79bd7c8c82122434af02f301071fe08865804e0d929a80eb44c42f9ad26edbe9a85a1
+ version: 5.1.0
+ resolution: "walk-back@npm:5.1.0"
+ checksum: b0e9acdac22dcd281d24fb857af52f39ca4e8f3b18921106b676b0baa96fb7f97c55630d9df0814207a194e6a75f175d8c8282648914458fd949eae901b276f5
languageName: node
linkType: hard
@@ -18441,13 +19037,13 @@ __metadata:
languageName: node
linkType: hard
-"watchpack@npm:^2.4.1":
- version: 2.4.2
- resolution: "watchpack@npm:2.4.2"
+"watchpack@npm:^2.4.0":
+ version: 2.4.1
+ resolution: "watchpack@npm:2.4.1"
dependencies:
glob-to-regexp: ^0.4.1
graceful-fs: ^4.1.2
- checksum: 92d9d52ce3d16fd83ed6994d1dd66a4d146998882f4c362d37adfea9ab77748a5b4d1e0c65fa104797928b2d40f635efa8f9b925a6265428a69f1e1852ca3441
+ checksum: 5b0179348655dcdf19cac7cb4ff923fdc024d630650c0bf6bec8899cf47c60e19d4f810a88dba692ed0e7f684cf0fcffea86efdbf6c35d81f031e328043b7fab
languageName: node
linkType: hard
@@ -18467,39 +19063,40 @@ __metadata:
languageName: node
linkType: hard
-"webpack@npm:5.94.0":
- version: 5.94.0
- resolution: "webpack@npm:5.94.0"
+"webpack@npm:5.89.0":
+ version: 5.89.0
+ resolution: "webpack@npm:5.89.0"
dependencies:
- "@types/estree": ^1.0.5
- "@webassemblyjs/ast": ^1.12.1
- "@webassemblyjs/wasm-edit": ^1.12.1
- "@webassemblyjs/wasm-parser": ^1.12.1
+ "@types/eslint-scope": ^3.7.3
+ "@types/estree": ^1.0.0
+ "@webassemblyjs/ast": ^1.11.5
+ "@webassemblyjs/wasm-edit": ^1.11.5
+ "@webassemblyjs/wasm-parser": ^1.11.5
acorn: ^8.7.1
- acorn-import-attributes: ^1.9.5
- browserslist: ^4.21.10
+ acorn-import-assertions: ^1.9.0
+ browserslist: ^4.14.5
chrome-trace-event: ^1.0.2
- enhanced-resolve: ^5.17.1
+ enhanced-resolve: ^5.15.0
es-module-lexer: ^1.2.1
eslint-scope: 5.1.1
events: ^3.2.0
glob-to-regexp: ^0.4.1
- graceful-fs: ^4.2.11
+ graceful-fs: ^4.2.9
json-parse-even-better-errors: ^2.3.1
loader-runner: ^4.2.0
mime-types: ^2.1.27
neo-async: ^2.6.2
schema-utils: ^3.2.0
tapable: ^2.1.1
- terser-webpack-plugin: ^5.3.10
- watchpack: ^2.4.1
+ terser-webpack-plugin: ^5.3.7
+ watchpack: ^2.4.0
webpack-sources: ^3.2.3
peerDependenciesMeta:
webpack-cli:
optional: true
bin:
webpack: bin/webpack.js
- checksum: 6a3d667be304a69cd6dcb8d676bc29f47642c0d389af514cfcd646eaaa809961bc6989fc4b2621a717dfc461130f29c6e20006d62a32e012dafaa9517813a4e6
+ checksum: 43fe0dbc30e168a685ef5a86759d5016a705f6563b39a240aa00826a80637d4a3deeb8062e709d6a4b05c63e796278244c84b04174704dc4a37bedb0f565c5ed
languageName: node
linkType: hard
@@ -18521,22 +19118,6 @@ __metadata:
languageName: node
linkType: hard
-"whatwg-encoding@npm:^3.1.1":
- version: 3.1.1
- resolution: "whatwg-encoding@npm:3.1.1"
- dependencies:
- iconv-lite: 0.6.3
- checksum: f75a61422421d991e4aec775645705beaf99a16a88294d68404866f65e92441698a4f5b9fa11dd609017b132d7b286c3c1534e2de5b3e800333856325b549e3c
- languageName: node
- linkType: hard
-
-"whatwg-mimetype@npm:^4.0.0":
- version: 4.0.0
- resolution: "whatwg-mimetype@npm:4.0.0"
- checksum: f97edd4b4ee7e46a379f3fb0e745de29fe8b839307cc774300fd49059fcdd560d38cb8fe21eae5575b8f39b022f23477cc66e40b0355c2851ce84760339cef30
- languageName: node
- linkType: hard
-
"which-boxed-primitive@npm:^1.0.2":
version: 1.0.2
resolution: "which-boxed-primitive@npm:1.0.2"
@@ -18816,11 +19397,11 @@ __metadata:
linkType: hard
"yaml@npm:^2.2.2":
- version: 2.6.0
- resolution: "yaml@npm:2.6.0"
+ version: 2.4.5
+ resolution: "yaml@npm:2.4.5"
bin:
yaml: bin.mjs
- checksum: e5e74fd75e01bde2c09333d529af9fbb5928c5f7f01bfdefdcb2bf753d4ef489a45cab4deac01c9448f55ca27e691612b81fe3c3a59bb8cb5b0069da0f92cf0b
+ checksum: f8efd407c07e095f00f3031108c9960b2b12971d10162b1ec19007200f6c987d2e28f73283f4731119aa610f177a3ea03d4a8fcf640600a25de1b74d00c69b3d
languageName: node
linkType: hard
diff --git a/vault/activity/activity_log.pb.go b/vault/activity/activity_log.pb.go
index aa7ce5d5919c..0f12bf229f4a 100644
--- a/vault/activity/activity_log.pb.go
+++ b/vault/activity/activity_log.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: vault/activity/activity_log.proto
@@ -49,9 +49,11 @@ type EntityRecord struct {
func (x *EntityRecord) Reset() {
*x = EntityRecord{}
- mi := &file_vault_activity_activity_log_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_activity_activity_log_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *EntityRecord) String() string {
@@ -62,7 +64,7 @@ func (*EntityRecord) ProtoMessage() {}
func (x *EntityRecord) ProtoReflect() protoreflect.Message {
mi := &file_vault_activity_activity_log_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -132,16 +134,15 @@ type LogFragment struct {
// token counts not yet in a log segment,
// indexed by namespace ID
NonEntityTokens map[string]uint64 `protobuf:"bytes,3,rep,name=non_entity_tokens,json=nonEntityTokens,proto3" json:"non_entity_tokens,omitempty" protobuf_key:"bytes,1,opt,name=key,proto3" protobuf_val:"varint,2,opt,name=value,proto3"`
- // the cluster id that this fragment originated from
- // this is used when a fragment is sent from a secondary
- OriginatingCluster string `protobuf:"bytes,4,opt,name=originating_cluster,json=originatingCluster,proto3" json:"originating_cluster,omitempty"`
}
func (x *LogFragment) Reset() {
*x = LogFragment{}
- mi := &file_vault_activity_activity_log_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_activity_activity_log_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *LogFragment) String() string {
@@ -152,7 +153,7 @@ func (*LogFragment) ProtoMessage() {}
func (x *LogFragment) ProtoReflect() protoreflect.Message {
mi := &file_vault_activity_activity_log_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -188,13 +189,6 @@ func (x *LogFragment) GetNonEntityTokens() map[string]uint64 {
return nil
}
-func (x *LogFragment) GetOriginatingCluster() string {
- if x != nil {
- return x.OriginatingCluster
- }
- return ""
-}
-
// This activity log stores records for both clients with entities
// and clients without entities
type EntityActivityLog struct {
@@ -207,9 +201,11 @@ type EntityActivityLog struct {
func (x *EntityActivityLog) Reset() {
*x = EntityActivityLog{}
- mi := &file_vault_activity_activity_log_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_activity_activity_log_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *EntityActivityLog) String() string {
@@ -220,7 +216,7 @@ func (*EntityActivityLog) ProtoMessage() {}
func (x *EntityActivityLog) ProtoReflect() protoreflect.Message {
mi := &file_vault_activity_activity_log_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -252,9 +248,11 @@ type TokenCount struct {
func (x *TokenCount) Reset() {
*x = TokenCount{}
- mi := &file_vault_activity_activity_log_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_activity_activity_log_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *TokenCount) String() string {
@@ -265,7 +263,7 @@ func (*TokenCount) ProtoMessage() {}
func (x *TokenCount) ProtoReflect() protoreflect.Message {
mi := &file_vault_activity_activity_log_proto_msgTypes[3]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -295,9 +293,11 @@ type LogFragmentResponse struct {
func (x *LogFragmentResponse) Reset() {
*x = LogFragmentResponse{}
- mi := &file_vault_activity_activity_log_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_activity_activity_log_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *LogFragmentResponse) String() string {
@@ -308,7 +308,7 @@ func (*LogFragmentResponse) ProtoMessage() {}
func (x *LogFragmentResponse) ProtoReflect() protoreflect.Message {
mi := &file_vault_activity_activity_log_proto_msgTypes[4]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -342,7 +342,7 @@ var file_vault_activity_activity_log_proto_rawDesc = []byte{
0x01, 0x28, 0x09, 0x52, 0x0d, 0x6d, 0x6f, 0x75, 0x6e, 0x74, 0x41, 0x63, 0x63, 0x65, 0x73, 0x73,
0x6f, 0x72, 0x12, 0x1f, 0x0a, 0x0b, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x5f, 0x74, 0x79, 0x70,
0x65, 0x18, 0x06, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0a, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x54,
- 0x79, 0x70, 0x65, 0x22, 0xb7, 0x02, 0x0a, 0x0b, 0x4c, 0x6f, 0x67, 0x46, 0x72, 0x61, 0x67, 0x6d,
+ 0x79, 0x70, 0x65, 0x22, 0x86, 0x02, 0x0a, 0x0b, 0x4c, 0x6f, 0x67, 0x46, 0x72, 0x61, 0x67, 0x6d,
0x65, 0x6e, 0x74, 0x12, 0x29, 0x0a, 0x10, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69,
0x6e, 0x67, 0x5f, 0x6e, 0x6f, 0x64, 0x65, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x0f, 0x6f,
0x72, 0x69, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x4e, 0x6f, 0x64, 0x65, 0x12, 0x30,
@@ -354,35 +354,32 @@ var file_vault_activity_activity_log_proto_rawDesc = []byte{
0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x2e, 0x4c, 0x6f, 0x67, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65,
0x6e, 0x74, 0x2e, 0x4e, 0x6f, 0x6e, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x54, 0x6f, 0x6b, 0x65,
0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52, 0x0f, 0x6e, 0x6f, 0x6e, 0x45, 0x6e, 0x74, 0x69,
- 0x74, 0x79, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x12, 0x2f, 0x0a, 0x13, 0x6f, 0x72, 0x69, 0x67,
- 0x69, 0x6e, 0x61, 0x74, 0x69, 0x6e, 0x67, 0x5f, 0x63, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x18,
- 0x04, 0x20, 0x01, 0x28, 0x09, 0x52, 0x12, 0x6f, 0x72, 0x69, 0x67, 0x69, 0x6e, 0x61, 0x74, 0x69,
- 0x6e, 0x67, 0x43, 0x6c, 0x75, 0x73, 0x74, 0x65, 0x72, 0x1a, 0x42, 0x0a, 0x14, 0x4e, 0x6f, 0x6e,
- 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72,
- 0x79, 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03,
- 0x6b, 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01,
- 0x28, 0x04, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x45, 0x0a,
- 0x11, 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x41, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x4c,
- 0x6f, 0x67, 0x12, 0x30, 0x0a, 0x07, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20,
- 0x03, 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x2e, 0x45,
- 0x6e, 0x74, 0x69, 0x74, 0x79, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x52, 0x07, 0x63, 0x6c, 0x69,
- 0x65, 0x6e, 0x74, 0x73, 0x22, 0xb4, 0x01, 0x0a, 0x0a, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x43, 0x6f,
- 0x75, 0x6e, 0x74, 0x12, 0x5f, 0x0a, 0x15, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x62, 0x79, 0x5f,
- 0x6e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x03,
- 0x28, 0x0b, 0x32, 0x2c, 0x2e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x2e, 0x54, 0x6f,
- 0x6b, 0x65, 0x6e, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x2e, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x79,
- 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x49, 0x64, 0x45, 0x6e, 0x74, 0x72, 0x79,
- 0x52, 0x12, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61,
- 0x63, 0x65, 0x49, 0x64, 0x1a, 0x45, 0x0a, 0x17, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x79, 0x4e,
- 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x49, 0x64, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12,
- 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65,
- 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04,
- 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x15, 0x0a, 0x13, 0x4c,
- 0x6f, 0x67, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e,
- 0x73, 0x65, 0x42, 0x2b, 0x5a, 0x29, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d,
- 0x2f, 0x68, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70, 0x2f, 0x76, 0x61, 0x75, 0x6c, 0x74,
- 0x2f, 0x76, 0x61, 0x75, 0x6c, 0x74, 0x2f, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x62,
- 0x06, 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
+ 0x74, 0x79, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x1a, 0x42, 0x0a, 0x14, 0x4e, 0x6f, 0x6e, 0x45,
+ 0x6e, 0x74, 0x69, 0x74, 0x79, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x73, 0x45, 0x6e, 0x74, 0x72, 0x79,
+ 0x12, 0x10, 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b,
+ 0x65, 0x79, 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28,
+ 0x04, 0x52, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x45, 0x0a, 0x11,
+ 0x45, 0x6e, 0x74, 0x69, 0x74, 0x79, 0x41, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x4c, 0x6f,
+ 0x67, 0x12, 0x30, 0x0a, 0x07, 0x63, 0x6c, 0x69, 0x65, 0x6e, 0x74, 0x73, 0x18, 0x01, 0x20, 0x03,
+ 0x28, 0x0b, 0x32, 0x16, 0x2e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x2e, 0x45, 0x6e,
+ 0x74, 0x69, 0x74, 0x79, 0x52, 0x65, 0x63, 0x6f, 0x72, 0x64, 0x52, 0x07, 0x63, 0x6c, 0x69, 0x65,
+ 0x6e, 0x74, 0x73, 0x22, 0xb4, 0x01, 0x0a, 0x0a, 0x54, 0x6f, 0x6b, 0x65, 0x6e, 0x43, 0x6f, 0x75,
+ 0x6e, 0x74, 0x12, 0x5f, 0x0a, 0x15, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x5f, 0x62, 0x79, 0x5f, 0x6e,
+ 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x5f, 0x69, 0x64, 0x18, 0x01, 0x20, 0x03, 0x28,
+ 0x0b, 0x32, 0x2c, 0x2e, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x2e, 0x54, 0x6f, 0x6b,
+ 0x65, 0x6e, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x2e, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x79, 0x4e,
+ 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x49, 0x64, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x52,
+ 0x12, 0x63, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x79, 0x4e, 0x61, 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63,
+ 0x65, 0x49, 0x64, 0x1a, 0x45, 0x0a, 0x17, 0x43, 0x6f, 0x75, 0x6e, 0x74, 0x42, 0x79, 0x4e, 0x61,
+ 0x6d, 0x65, 0x73, 0x70, 0x61, 0x63, 0x65, 0x49, 0x64, 0x45, 0x6e, 0x74, 0x72, 0x79, 0x12, 0x10,
+ 0x0a, 0x03, 0x6b, 0x65, 0x79, 0x18, 0x01, 0x20, 0x01, 0x28, 0x09, 0x52, 0x03, 0x6b, 0x65, 0x79,
+ 0x12, 0x14, 0x0a, 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x18, 0x02, 0x20, 0x01, 0x28, 0x04, 0x52,
+ 0x05, 0x76, 0x61, 0x6c, 0x75, 0x65, 0x3a, 0x02, 0x38, 0x01, 0x22, 0x15, 0x0a, 0x13, 0x4c, 0x6f,
+ 0x67, 0x46, 0x72, 0x61, 0x67, 0x6d, 0x65, 0x6e, 0x74, 0x52, 0x65, 0x73, 0x70, 0x6f, 0x6e, 0x73,
+ 0x65, 0x42, 0x2b, 0x5a, 0x29, 0x67, 0x69, 0x74, 0x68, 0x75, 0x62, 0x2e, 0x63, 0x6f, 0x6d, 0x2f,
+ 0x68, 0x61, 0x73, 0x68, 0x69, 0x63, 0x6f, 0x72, 0x70, 0x2f, 0x76, 0x61, 0x75, 0x6c, 0x74, 0x2f,
+ 0x76, 0x61, 0x75, 0x6c, 0x74, 0x2f, 0x61, 0x63, 0x74, 0x69, 0x76, 0x69, 0x74, 0x79, 0x62, 0x06,
+ 0x70, 0x72, 0x6f, 0x74, 0x6f, 0x33,
}
var (
@@ -424,6 +421,68 @@ func file_vault_activity_activity_log_proto_init() {
if File_vault_activity_activity_log_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_vault_activity_activity_log_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*EntityRecord); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_activity_activity_log_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*LogFragment); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_activity_activity_log_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*EntityActivityLog); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_activity_activity_log_proto_msgTypes[3].Exporter = func(v any, i int) any {
+ switch v := v.(*TokenCount); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_activity_activity_log_proto_msgTypes[4].Exporter = func(v any, i int) any {
+ switch v := v.(*LogFragmentResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/vault/activity/activity_log.proto b/vault/activity/activity_log.proto
index ac68db1c4e2a..cb0a6b94e8d7 100644
--- a/vault/activity/activity_log.proto
+++ b/vault/activity/activity_log.proto
@@ -38,10 +38,6 @@ message LogFragment {
// token counts not yet in a log segment,
// indexed by namespace ID
map non_entity_tokens = 3;
-
- // the cluster id that this fragment originated from
- // this is used when a fragment is sent from a secondary
- string originating_cluster = 4;
}
// This activity log stores records for both clients with entities
diff --git a/vault/activity_log.go b/vault/activity_log.go
index 3ad43d31b479..71df6654a16a 100644
--- a/vault/activity_log.go
+++ b/vault/activity_log.go
@@ -36,29 +36,23 @@ import (
const (
// activitySubPath is the directory under the system view where
// the log will be stored.
- activitySubPath = "counters/activity/"
- activityEntityBasePath = "log/entity/"
- activityTokenBasePath = "log/directtokens/"
- activityTokenLocalBasePath = "local/" + activityTokenBasePath
- activityQueryBasePath = "queries/"
- activityConfigKey = "config"
- activityIntentLogKey = "endofmonth"
- activityGlobalPathPrefix = "global/"
- activityLocalPathPrefix = "local/"
+ activitySubPath = "counters/activity/"
+ activityEntityBasePath = "log/entity/"
+ activityTokenBasePath = "log/directtokens/"
+ activityQueryBasePath = "queries/"
+ activityConfigKey = "config"
+ activityIntentLogKey = "endofmonth"
activityACMERegenerationKey = "acme-regeneration"
// sketch for each month that stores hash of client ids
distinctClientsBasePath = "log/distinctclients/"
// for testing purposes (public as needed)
- ActivityLogPrefix = "sys/counters/activity/log/"
- ActivityGlobalLogPrefix = "sys/counters/activity/global/log/"
- ActivityLogLocalPrefix = "sys/counters/activity/local/log/"
- ActivityPrefix = "sys/counters/activity/"
+ ActivityLogPrefix = "sys/counters/activity/log/"
+ ActivityPrefix = "sys/counters/activity/"
- // Time to wait before a perf standby sends data to the active node, or
- // before the active node of a performance secondary sends global data to the primary.
- activityFragmentSendInterval = 10 * time.Minute
+ // Time to wait on perf standby before sending fragment
+ activityFragmentStandbyTime = 10 * time.Minute
// Time between writes of segment to storage
activitySegmentInterval = 10 * time.Minute
@@ -144,21 +138,13 @@ type ActivityLog struct {
// ActivityLog.l protects the configuration settings, except enable, and any modifications
// to the current segment.
- // Acquire "l" before fragmentLock, globalFragmentLock, and localFragmentLock if all must be held.
+ // Acquire "l" before fragmentLock if both must be held.
l sync.RWMutex
// fragmentLock protects enable, partialMonthClientTracker, fragment,
// standbyFragmentsReceived.
fragmentLock sync.RWMutex
- // localFragmentLock protects partialMonthLocalClientTracker, localFragment,
- // standbyLocalFragmentsReceived.
- localFragmentLock sync.RWMutex
-
- // globalFragmentLock protects enable secondaryGlobalClientFragments, standbyGlobalFragmentsReceived, currentGlobalFragment
- // and globalPartialMonthClientTracker
- globalFragmentLock sync.RWMutex
-
// enabled indicates if the activity log is enabled for this cluster.
// This is protected by fragmentLock so we can check with only
// a single synchronization call.
@@ -181,20 +167,13 @@ type ActivityLog struct {
nodeID string
// current log fragment (may be nil)
- fragment *activity.LogFragment
+ fragment *activity.LogFragment
+ fragmentCreation time.Time
// Channel to signal a new fragment has been created
// so it's appropriate to start the timer.
newFragmentCh chan struct{}
- // current local log fragment (may be nil)
- localFragment *activity.LogFragment
-
- // Channel to signal a new global fragment has been created
- // so it's appropriate to start the timer. Once the timer finishes
- // the secondary will send currentGlobalFragment to the primary
- newGlobalClientFragmentCh chan struct{}
-
// Channel for sending fragment immediately
sendCh chan struct{}
@@ -204,24 +183,9 @@ type ActivityLog struct {
// track metadata and contents of the most recent log segment
currentSegment segmentInfo
- // track metadata and contents of the most recent global log segment
- currentGlobalSegment segmentInfo
-
- // track metadata and contents of the most recent local log segment
- currentLocalSegment segmentInfo
-
// Fragments received from performance standbys
standbyFragmentsReceived []*activity.LogFragment
- // Local fragments received from performance standbys
- standbyLocalFragmentsReceived []*activity.LogFragment
-
- // Global fragments received from performance standbys
- standbyGlobalFragmentsReceived []*activity.LogFragment
-
- // Fragments of global clients received from performance secondaries
- secondaryGlobalClientFragments []*activity.LogFragment
-
// precomputed queries
queryStore *activity.PrecomputedQueryStore
defaultReportMonths int
@@ -241,12 +205,6 @@ type ActivityLog struct {
// partialMonthClientTracker tracks active clients this month. Protected by fragmentLock.
partialMonthClientTracker map[string]*activity.EntityRecord
- // partialMonthLocalClientTracker tracks active local clients this month. Protected by localFragmentLock.
- partialMonthLocalClientTracker map[string]*activity.EntityRecord
-
- // globalPartialMonthClientTracker tracks active clients this month. Protected by globalFragmentLock.
- globalPartialMonthClientTracker map[string]*activity.EntityRecord
-
inprocessExport *atomic.Bool
// clock is used to support manipulating time in unit and integration tests
@@ -254,9 +212,6 @@ type ActivityLog struct {
// precomputedQueryWritten receives an element whenever a precomputed query
// is written. It's used for unit testing
precomputedQueryWritten chan struct{}
-
- // currentGlobalFragment tracks the global clients of all the clients in memory
- currentGlobalFragment *activity.LogFragment
}
// These non-persistent configuration options allow us to disable
@@ -279,18 +234,6 @@ type ActivityLogCoreConfig struct {
Clock timeutil.Clock
DisableInvalidation bool
-
- // GlobalFragmentSendInterval sets the interval to send global data from the secondary to the primary
- // This is only for testing purposes
- GlobalFragmentSendInterval time.Duration
-
- // PerfStandbyFragmentSendInterval sets the interval to send fragment data from the perf standby to the active
- // This is only for testing purposes
- PerfStandbyFragmentSendInterval time.Duration
-
- // StorageWriteTestingInterval sets the interval flush data to the storage.
- // This is only for testing purposes
- StorageWriteTestingInterval time.Duration
}
// ActivityLogExportRecord is the output structure for activity export
@@ -361,20 +304,17 @@ func NewActivityLog(core *Core, logger log.Logger, view *BarrierView, metrics me
clock = timeutil.DefaultClock{}
}
a := &ActivityLog{
- core: core,
- configOverrides: &core.activityLogConfig,
- logger: logger,
- view: view,
- metrics: metrics,
- nodeID: hostname,
- newFragmentCh: make(chan struct{}, 1),
- sendCh: make(chan struct{}, 1), // buffered so it can be triggered by fragment size
- doneCh: make(chan struct{}, 1),
- partialMonthClientTracker: make(map[string]*activity.EntityRecord),
- partialMonthLocalClientTracker: make(map[string]*activity.EntityRecord),
- newGlobalClientFragmentCh: make(chan struct{}, 1),
- globalPartialMonthClientTracker: make(map[string]*activity.EntityRecord),
- clock: clock,
+ core: core,
+ configOverrides: &core.activityLogConfig,
+ logger: logger,
+ view: view,
+ metrics: metrics,
+ nodeID: hostname,
+ newFragmentCh: make(chan struct{}, 1),
+ sendCh: make(chan struct{}, 1), // buffered so it can be triggered by fragment size
+ doneCh: make(chan struct{}, 1),
+ partialMonthClientTracker: make(map[string]*activity.EntityRecord),
+ clock: clock,
currentSegment: segmentInfo{
startTimestamp: 0,
currentClients: &activity.EntityActivityLog{
@@ -388,38 +328,9 @@ func NewActivityLog(core *Core, logger log.Logger, view *BarrierView, metrics me
},
clientSequenceNumber: 0,
},
- currentGlobalSegment: segmentInfo{
- startTimestamp: 0,
- currentClients: &activity.EntityActivityLog{
- Clients: make([]*activity.EntityRecord, 0),
- },
- // tokenCount is deprecated, but must still exist for the current segment
- // so the fragment that was using TWEs before the 1.9 changes
- // can be flushed to the current segment.
- tokenCount: &activity.TokenCount{
- CountByNamespaceID: make(map[string]uint64),
- },
- clientSequenceNumber: 0,
- },
- currentLocalSegment: segmentInfo{
- startTimestamp: 0,
- currentClients: &activity.EntityActivityLog{
- Clients: make([]*activity.EntityRecord, 0),
- },
- // tokenCount is deprecated, but must still exist for the current segment
- // so the fragment that was using TWEs before the 1.9 changes
- // can be flushed to the current segment.
- tokenCount: &activity.TokenCount{
- CountByNamespaceID: make(map[string]uint64),
- },
- clientSequenceNumber: 0,
- },
- standbyFragmentsReceived: make([]*activity.LogFragment, 0),
- standbyLocalFragmentsReceived: make([]*activity.LogFragment, 0),
- standbyGlobalFragmentsReceived: make([]*activity.LogFragment, 0),
- secondaryGlobalClientFragments: make([]*activity.LogFragment, 0),
- inprocessExport: atomic.NewBool(false),
- precomputedQueryWritten: make(chan struct{}),
+ standbyFragmentsReceived: make([]*activity.LogFragment, 0),
+ inprocessExport: atomic.NewBool(false),
+ precomputedQueryWritten: make(chan struct{}),
}
config, err := a.loadConfigOrDefault(core.activeContext)
@@ -462,46 +373,14 @@ func (a *ActivityLog) saveCurrentSegmentToStorageLocked(ctx context.Context, for
defer a.metrics.MeasureSinceWithLabels([]string{"core", "activity", "segment_write"},
a.clock.Now(), []metricsutil.Label{})
- // Swap out the pending regular fragments
+ // Swap out the pending fragments
a.fragmentLock.Lock()
- currentFragment := a.fragment
+ localFragment := a.fragment
a.fragment = nil
standbys := a.standbyFragmentsReceived
a.standbyFragmentsReceived = make([]*activity.LogFragment, 0)
a.fragmentLock.Unlock()
- a.globalFragmentLock.Lock()
- secondaryGlobalClients := a.secondaryGlobalClientFragments
- a.secondaryGlobalClientFragments = make([]*activity.LogFragment, 0)
- standbyGlobalClients := a.standbyGlobalFragmentsReceived
- a.standbyGlobalFragmentsReceived = make([]*activity.LogFragment, 0)
- globalClients := a.currentGlobalFragment
- a.currentGlobalFragment = nil
- a.globalFragmentLock.Unlock()
-
- if !a.core.IsPerfSecondary() {
- if a.currentGlobalFragment != nil {
- a.metrics.IncrCounterWithLabels([]string{"core", "activity", "global_fragment_size"},
- float32(len(globalClients.Clients)),
- []metricsutil.Label{
- {"type", "client"},
- })
- }
- var globalReceivedFragmentTotal int
- for _, globalReceivedFragment := range secondaryGlobalClients {
- globalReceivedFragmentTotal += len(globalReceivedFragment.Clients)
- }
- for _, globalReceivedFragment := range standbyGlobalClients {
- globalReceivedFragmentTotal += len(globalReceivedFragment.Clients)
- }
- a.metrics.IncrCounterWithLabels([]string{"core", "activity", "global_received_fragment_size"},
- float32(globalReceivedFragmentTotal),
- []metricsutil.Label{
- {"type", "client"},
- })
-
- }
-
// If segment start time is zero, do not update or write
// (even if force is true). This can happen if activityLog is
// disabled after a save as been triggered.
@@ -509,54 +388,24 @@ func (a *ActivityLog) saveCurrentSegmentToStorageLocked(ctx context.Context, for
return nil
}
- if ret := a.createCurrentSegmentFromFragments(ctx, append(standbys, currentFragment), &a.currentSegment, force, ""); ret != nil {
- return ret
- }
-
- // If we are the primary, store global clients
- // Create fragments from global clients and store the segment
- if !a.core.IsPerfSecondary() {
- globalFragments := append(append(secondaryGlobalClients, globalClients), standbyGlobalClients...)
- if ret := a.createCurrentSegmentFromFragments(ctx, globalFragments, &a.currentGlobalSegment, force, activityGlobalPathPrefix); ret != nil {
- return ret
- }
- }
-
- // Swap out the pending local fragments
- a.localFragmentLock.Lock()
- localFragment := a.localFragment
- a.localFragment = nil
- standbyLocalFragments := a.standbyLocalFragmentsReceived
- a.standbyLocalFragmentsReceived = make([]*activity.LogFragment, 0)
- a.localFragmentLock.Unlock()
-
- // Measure the current local fragment
+ // Measure the current fragment
if localFragment != nil {
- a.metrics.IncrCounterWithLabels([]string{"core", "activity", "local_fragment_size"},
+ a.metrics.IncrCounterWithLabels([]string{"core", "activity", "fragment_size"},
float32(len(localFragment.Clients)),
[]metricsutil.Label{
{"type", "entity"},
})
- a.metrics.IncrCounterWithLabels([]string{"core", "activity", "local_fragment_size"},
+ a.metrics.IncrCounterWithLabels([]string{"core", "activity", "fragment_size"},
float32(len(localFragment.NonEntityTokens)),
[]metricsutil.Label{
{"type", "direct_token"},
})
}
- // store local fragments
- if ret := a.createCurrentSegmentFromFragments(ctx, append(standbyLocalFragments, localFragment), &a.currentLocalSegment, force, activityLocalPathPrefix); ret != nil {
- return ret
- }
-
- return nil
-}
-
-func (a *ActivityLog) createCurrentSegmentFromFragments(ctx context.Context, fragments []*activity.LogFragment, currentSegment *segmentInfo, force bool, storagePathPrefix string) error {
// Collect new entities and new tokens.
saveChanges := false
newEntities := make(map[string]*activity.EntityRecord)
- for _, f := range fragments {
+ for _, f := range append(standbys, localFragment) {
if f == nil {
continue
}
@@ -566,7 +415,7 @@ func (a *ActivityLog) createCurrentSegmentFromFragments(ctx context.Context, fra
for _, e := range f.Clients {
// We could sort by timestamp to see which is first.
// We'll ignore that; the order of the append above means
- // that we choose entries in currentFragment over those
+ // that we choose entries in localFragment over those
// from standby nodes.
newEntities[e.ClientID] = e
}
@@ -580,7 +429,7 @@ func (a *ActivityLog) createCurrentSegmentFromFragments(ctx context.Context, fra
// a.partialMonthClientTracker.nonEntityCountByNamespaceID. This preserves backward
// compatibility for the precomputedQueryWorkers and the segment storing
// logic.
- currentSegment.tokenCount.CountByNamespaceID[ns] += val
+ a.currentSegment.tokenCount.CountByNamespaceID[ns] += val
}
}
@@ -589,14 +438,14 @@ func (a *ActivityLog) createCurrentSegmentFromFragments(ctx context.Context, fra
}
// Will all new entities fit? If not, roll over to a new segment.
- available := ActivitySegmentClientCapacity - len(currentSegment.currentClients.Clients)
+ available := ActivitySegmentClientCapacity - len(a.currentSegment.currentClients.Clients)
remaining := available - len(newEntities)
excess := 0
if remaining < 0 {
excess = -remaining
}
- segmentClients := currentSegment.currentClients.Clients
+ segmentClients := a.currentSegment.currentClients.Clients
excessClients := make([]*activity.EntityRecord, 0, excess)
for _, record := range newEntities {
if available > 0 {
@@ -606,8 +455,9 @@ func (a *ActivityLog) createCurrentSegmentFromFragments(ctx context.Context, fra
excessClients = append(excessClients, record)
}
}
- currentSegment.currentClients.Clients = segmentClients
- err := a.saveCurrentSegmentInternal(ctx, force, *currentSegment, storagePathPrefix)
+ a.currentSegment.currentClients.Clients = segmentClients
+
+ err := a.saveCurrentSegmentInternal(ctx, force)
if err != nil {
// The current fragment(s) have already been placed into the in-memory
// segment, but we may lose any excess (in excessClients).
@@ -617,7 +467,7 @@ func (a *ActivityLog) createCurrentSegmentFromFragments(ctx context.Context, fra
}
if available <= 0 {
- if currentSegment.clientSequenceNumber >= activityLogMaxSegmentPerMonth {
+ if a.currentSegment.clientSequenceNumber >= activityLogMaxSegmentPerMonth {
// Cannot send as Warn because it will repeat too often,
// and disabling/renabling would be complicated.
a.logger.Trace("too many segments in current month", "dropped", len(excessClients))
@@ -625,13 +475,13 @@ func (a *ActivityLog) createCurrentSegmentFromFragments(ctx context.Context, fra
}
// Rotate to next segment
- currentSegment.clientSequenceNumber += 1
+ a.currentSegment.clientSequenceNumber += 1
if len(excessClients) > ActivitySegmentClientCapacity {
a.logger.Warn("too many new active clients, dropping tail", "clients", len(excessClients))
excessClients = excessClients[:ActivitySegmentClientCapacity]
}
- currentSegment.currentClients.Clients = excessClients
- err := a.saveCurrentSegmentInternal(ctx, force, *currentSegment, storagePathPrefix)
+ a.currentSegment.currentClients.Clients = excessClients
+ err := a.saveCurrentSegmentInternal(ctx, force)
if err != nil {
return err
}
@@ -640,12 +490,12 @@ func (a *ActivityLog) createCurrentSegmentFromFragments(ctx context.Context, fra
}
// :force: forces a save of tokens/entities even if the in-memory log is empty
-func (a *ActivityLog) saveCurrentSegmentInternal(ctx context.Context, force bool, currentSegment segmentInfo, storagePathPrefix string) error {
- _, err := a.saveSegmentEntitiesInternal(ctx, currentSegment, force, storagePathPrefix)
+func (a *ActivityLog) saveCurrentSegmentInternal(ctx context.Context, force bool) error {
+ _, err := a.saveSegmentEntitiesInternal(ctx, a.currentSegment, force)
if err != nil {
return err
}
- _, err = a.saveSegmentTokensInternal(ctx, currentSegment, force)
+ _, err = a.saveSegmentTokensInternal(ctx, a.currentSegment, force)
return err
}
@@ -654,7 +504,7 @@ func (a *ActivityLog) saveSegmentTokensInternal(ctx context.Context, currentSegm
return "", nil
}
// RFC (VLT-120) defines this as 1-indexed, but it should be 0-indexed
- tokenPath := fmt.Sprintf("%s%d/0", activityTokenLocalBasePath, currentSegment.startTimestamp)
+ tokenPath := fmt.Sprintf("%s%d/0", activityTokenBasePath, currentSegment.startTimestamp)
// We must still allow for the tokenCount of the current segment to
// be written to storage, since if we remove this code we will incur
// data loss for one segment's worth of TWEs.
@@ -664,15 +514,15 @@ func (a *ActivityLog) saveSegmentTokensInternal(ctx context.Context, currentSegm
switch {
case err != nil:
a.logger.Error(fmt.Sprintf("unable to retrieve oldest version timestamp: %s", err.Error()))
- case len(currentSegment.tokenCount.CountByNamespaceID) > 0 &&
+ case len(a.currentSegment.tokenCount.CountByNamespaceID) > 0 &&
(oldestUpgradeTime.Add(time.Duration(trackedTWESegmentPeriod * time.Hour)).Before(time.Now())):
a.logger.Error(fmt.Sprintf("storing nonzero token count over a month after vault was upgraded to %s", oldestVersion))
default:
- if len(currentSegment.tokenCount.CountByNamespaceID) > 0 {
+ if len(a.currentSegment.tokenCount.CountByNamespaceID) > 0 {
a.logger.Info("storing nonzero token count")
}
}
- tokenCount, err := proto.Marshal(currentSegment.tokenCount)
+ tokenCount, err := proto.Marshal(a.currentSegment.tokenCount)
if err != nil {
return "", err
}
@@ -689,10 +539,10 @@ func (a *ActivityLog) saveSegmentTokensInternal(ctx context.Context, currentSegm
return tokenPath, nil
}
-func (a *ActivityLog) saveSegmentEntitiesInternal(ctx context.Context, currentSegment segmentInfo, force bool, storagePathPrefix string) (string, error) {
- entityPath := fmt.Sprintf("%s%s%d/%d", storagePathPrefix, activityEntityBasePath, currentSegment.startTimestamp, currentSegment.clientSequenceNumber)
+func (a *ActivityLog) saveSegmentEntitiesInternal(ctx context.Context, currentSegment segmentInfo, force bool) (string, error) {
+ entityPath := fmt.Sprintf("%s%d/%d", activityEntityBasePath, currentSegment.startTimestamp, currentSegment.clientSequenceNumber)
- for _, client := range currentSegment.currentClients.Clients {
+ for _, client := range a.currentSegment.currentClients.Clients {
// Explicitly catch and throw clear error message if client ID creation and storage
// results in a []byte that doesn't assert into a valid string.
if !utf8.ValidString(client.ClientID) {
@@ -736,7 +586,7 @@ func parseSegmentNumberFromPath(path string) (int, bool) {
// sorted last to first
func (a *ActivityLog) availableLogs(ctx context.Context, upTo time.Time) ([]time.Time, error) {
paths := make([]string, 0)
- for _, basePath := range []string{activityEntityBasePath, activityLocalPathPrefix + activityEntityBasePath, activityGlobalPathPrefix + activityEntityBasePath, activityTokenLocalBasePath} {
+ for _, basePath := range []string{activityEntityBasePath, activityTokenBasePath} {
p, err := a.view.List(ctx, basePath)
if err != nil {
return nil, err
@@ -785,25 +635,8 @@ func (a *ActivityLog) getMostRecentActivityLogSegment(ctx context.Context, now t
}
// getLastEntitySegmentNumber returns the (non-negative) last segment number for the :startTime:, if it exists
-func (a *ActivityLog) getLastEntitySegmentNumber(ctx context.Context, startTime time.Time) (uint64, uint64, uint64, bool, error) {
- segmentHighestNum, segmentPresent, err := a.getLastSegmentNumberByEntityPath(ctx, activityEntityBasePath+fmt.Sprint(startTime.Unix())+"/")
- if err != nil {
- return 0, 0, 0, false, err
- }
- globalHighestNum, globalSegmentPresent, err := a.getLastSegmentNumberByEntityPath(ctx, activityGlobalPathPrefix+activityEntityBasePath+fmt.Sprint(startTime.Unix())+"/")
- if err != nil {
- return 0, 0, 0, false, err
- }
- localHighestNum, localSegmentPresent, err := a.getLastSegmentNumberByEntityPath(ctx, activityLocalPathPrefix+activityEntityBasePath+fmt.Sprint(startTime.Unix())+"/")
- if err != nil {
- return 0, 0, 0, false, err
- }
-
- return segmentHighestNum, uint64(localHighestNum), uint64(globalHighestNum), (segmentPresent || localSegmentPresent || globalSegmentPresent), nil
-}
-
-func (a *ActivityLog) getLastSegmentNumberByEntityPath(ctx context.Context, entityPath string) (uint64, bool, error) {
- p, err := a.view.List(ctx, entityPath)
+func (a *ActivityLog) getLastEntitySegmentNumber(ctx context.Context, startTime time.Time) (uint64, bool, error) {
+ p, err := a.view.List(ctx, activityEntityBasePath+fmt.Sprint(startTime.Unix())+"/")
if err != nil {
return 0, false, err
}
@@ -817,14 +650,12 @@ func (a *ActivityLog) getLastSegmentNumberByEntityPath(ctx context.Context, enti
}
}
- segmentPresent := true
- segmentHighestNum := uint64(highestNum)
if highestNum < 0 {
// numbers less than 0 are invalid. if a negative number is the highest value, there isn't a segment
- segmentHighestNum = 0
- segmentPresent = false
+ return 0, false, nil
}
- return segmentHighestNum, segmentPresent, nil
+
+ return uint64(highestNum), true, nil
}
// WalkEntitySegments loads each of the entity segments for a particular start time
@@ -863,7 +694,7 @@ func (a *ActivityLog) WalkTokenSegments(ctx context.Context,
startTime time.Time,
walkFn func(*activity.TokenCount),
) error {
- basePath := activityTokenLocalBasePath + fmt.Sprint(startTime.Unix()) + "/"
+ basePath := activityTokenBasePath + fmt.Sprint(startTime.Unix()) + "/"
pathList, err := a.view.List(ctx, basePath)
if err != nil {
return err
@@ -907,7 +738,6 @@ func (a *ActivityLog) loadPriorEntitySegment(ctx context.Context, startTime time
}
a.l.RLock()
- defer a.l.RUnlock()
a.fragmentLock.Lock()
// Handle the (unlikely) case where the end of the month has been reached while background loading.
// Or the feature has been disabled.
@@ -917,62 +747,15 @@ func (a *ActivityLog) loadPriorEntitySegment(ctx context.Context, startTime time
}
}
a.fragmentLock.Unlock()
-
- // load all the active global clients
- globalPath := activityGlobalPathPrefix + activityEntityBasePath + fmt.Sprint(startTime.Unix()) + "/" + strconv.FormatUint(sequenceNum, 10)
- data, err = a.view.Get(ctx, globalPath)
- if err != nil {
- return err
- }
- if data == nil {
- return nil
- }
- out = &activity.EntityActivityLog{}
- err = proto.Unmarshal(data.Value, out)
- if err != nil {
- return err
- }
- a.globalFragmentLock.Lock()
- // Handle the (unlikely) case where the end of the month has been reached while background loading.
- // Or the feature has been disabled.
- if a.enabled && startTime.Unix() == a.currentGlobalSegment.startTimestamp {
- for _, ent := range out.Clients {
- a.globalPartialMonthClientTracker[ent.ClientID] = ent
- }
- }
- a.globalFragmentLock.Unlock()
-
- // load all the active local clients
- localPath := activityLocalPathPrefix + activityEntityBasePath + fmt.Sprint(startTime.Unix()) + "/" + strconv.FormatUint(sequenceNum, 10)
- data, err = a.view.Get(ctx, localPath)
- if err != nil {
- return err
- }
- if data == nil {
- return nil
- }
- out = &activity.EntityActivityLog{}
- err = proto.Unmarshal(data.Value, out)
- if err != nil {
- return err
- }
- a.localFragmentLock.Lock()
- // Handle the (unlikely) case where the end of the month has been reached while background loading.
- // Or the feature has been disabled.
- if a.enabled && startTime.Unix() == a.currentLocalSegment.startTimestamp {
- for _, ent := range out.Clients {
- a.partialMonthLocalClientTracker[ent.ClientID] = ent
- }
- }
- a.localFragmentLock.Unlock()
+ a.l.RUnlock()
return nil
}
// loadCurrentClientSegment loads the most recent segment (for "this month")
// into memory (to append new entries), and to the partialMonthClientTracker to
-// avoid duplication call with fragmentLock, globalFragmentLock, localFragmentLock and l held.
-func (a *ActivityLog) loadCurrentClientSegment(ctx context.Context, startTime time.Time, sequenceNum uint64, localSegmentSequenceNumber uint64, globalSegmentSequenceNumber uint64) error {
+// avoid duplication call with fragmentLock and l held.
+func (a *ActivityLog) loadCurrentClientSegment(ctx context.Context, startTime time.Time, sequenceNum uint64) error {
path := activityEntityBasePath + fmt.Sprint(startTime.Unix()) + "/" + strconv.FormatUint(sequenceNum, 10)
data, err := a.view.Get(ctx, path)
if err != nil {
@@ -1006,81 +789,13 @@ func (a *ActivityLog) loadCurrentClientSegment(ctx context.Context, startTime ti
a.partialMonthClientTracker[client.ClientID] = client
}
- // load current global segment
- path = activityGlobalPathPrefix + activityEntityBasePath + fmt.Sprint(startTime.Unix()) + "/" + strconv.FormatUint(globalSegmentSequenceNumber, 10)
- data, err = a.view.Get(ctx, path)
- if err != nil {
- return err
- }
- if data == nil {
- return nil
- }
-
- out = &activity.EntityActivityLog{}
- err = proto.Unmarshal(data.Value, out)
- if err != nil {
- return err
- }
-
- if !a.core.perfStandby {
- a.currentGlobalSegment = segmentInfo{
- startTimestamp: startTime.Unix(),
- currentClients: &activity.EntityActivityLog{
- Clients: out.Clients,
- },
- tokenCount: &activity.TokenCount{
- CountByNamespaceID: make(map[string]uint64),
- },
- clientSequenceNumber: sequenceNum,
- }
- } else {
- // populate this for edge case checking (if end of month passes while background loading on standby)
- a.currentGlobalSegment.startTimestamp = startTime.Unix()
- }
- for _, client := range out.Clients {
- a.globalPartialMonthClientTracker[client.ClientID] = client
- }
-
- // load current local segment
- path = activityLocalPathPrefix + activityEntityBasePath + fmt.Sprint(startTime.Unix()) + "/" + strconv.FormatUint(localSegmentSequenceNumber, 10)
- data, err = a.view.Get(ctx, path)
- if err != nil {
- return err
- }
- if data == nil {
- return nil
- }
-
- out = &activity.EntityActivityLog{}
- err = proto.Unmarshal(data.Value, out)
- if err != nil {
- return err
- }
-
- if !a.core.perfStandby {
- a.currentLocalSegment = segmentInfo{
- startTimestamp: startTime.Unix(),
- currentClients: &activity.EntityActivityLog{
- Clients: out.Clients,
- },
- tokenCount: a.currentLocalSegment.tokenCount,
- clientSequenceNumber: sequenceNum,
- }
- } else {
- // populate this for edge case checking (if end of month passes while background loading on standby)
- a.currentLocalSegment.startTimestamp = startTime.Unix()
- }
- for _, client := range out.Clients {
- a.partialMonthLocalClientTracker[client.ClientID] = client
- }
-
return nil
}
// tokenCountExists checks if there's a token log for :startTime:
// this function should be called with the lock held
func (a *ActivityLog) tokenCountExists(ctx context.Context, startTime time.Time) (bool, error) {
- p, err := a.view.List(ctx, activityTokenLocalBasePath+fmt.Sprint(startTime.Unix())+"/")
+ p, err := a.view.List(ctx, activityTokenBasePath+fmt.Sprint(startTime.Unix())+"/")
if err != nil {
return false, err
}
@@ -1105,7 +820,7 @@ func (a *ActivityLog) loadTokenCount(ctx context.Context, startTime time.Time) e
return nil
}
- path := activityTokenLocalBasePath + fmt.Sprint(startTime.Unix()) + "/0"
+ path := activityTokenBasePath + fmt.Sprint(startTime.Unix()) + "/0"
data, err := a.view.Get(ctx, path)
if err != nil {
return err
@@ -1129,7 +844,6 @@ func (a *ActivityLog) loadTokenCount(ctx context.Context, startTime time.Time) e
// so that TWEs counted before the introduction of a client ID for TWEs are
// still reported in the partial client counts.
a.currentSegment.tokenCount = out
- a.currentLocalSegment.tokenCount = out
return nil
}
@@ -1153,42 +867,33 @@ func (a *ActivityLog) entityBackgroundLoader(ctx context.Context, wg *sync.WaitG
}
// Initialize a new current segment, based on the current time.
-// Call with fragmentLock, globalFragmentLock, localFragmentLock and l held.
+// Call with fragmentLock and l held.
func (a *ActivityLog) startNewCurrentLogLocked(now time.Time) {
a.logger.Trace("initializing new log")
a.resetCurrentLog()
- a.setCurrentSegmentTimeLocked(now)
+ a.currentSegment.startTimestamp = now.Unix()
}
-// Should be called with fragmentLock, globalFragmentLock, localFragmentLock and l held.
+// Should be called with fragmentLock and l held.
func (a *ActivityLog) newMonthCurrentLogLocked(currentTime time.Time) {
a.logger.Trace("continuing log to new month")
a.resetCurrentLog()
monthStart := timeutil.StartOfMonth(currentTime.UTC())
- a.setCurrentSegmentTimeLocked(monthStart)
+ a.currentSegment.startTimestamp = monthStart.Unix()
}
// Initialize a new current segment, based on the given time
-// should be called with fragmentLock, globalFragmentLock, localFragmentLock and l held.
+// should be called with fragmentLock and l held.
func (a *ActivityLog) newSegmentAtGivenTime(t time.Time) {
timestamp := t.Unix()
a.logger.Trace("starting a segment", "timestamp", timestamp)
a.resetCurrentLog()
- a.setCurrentSegmentTimeLocked(t)
-}
-
-// Sets the timestamp of all the segments to the given time.
-// should be called with l held.
-func (a *ActivityLog) setCurrentSegmentTimeLocked(t time.Time) {
- timestamp := t.Unix()
a.currentSegment.startTimestamp = timestamp
- a.currentGlobalSegment.startTimestamp = timestamp
- a.currentLocalSegment.startTimestamp = timestamp
}
// Reset all the current segment state.
-// Should be called with fragmentLock, globalFragmentLock, localFragmentLock and l held.
+// Should be called with fragmentLock and l held.
func (a *ActivityLog) resetCurrentLog() {
a.currentSegment.startTimestamp = 0
a.currentSegment.currentClients = &activity.EntityActivityLog{
@@ -1203,55 +908,40 @@ func (a *ActivityLog) resetCurrentLog() {
a.currentSegment.clientSequenceNumber = 0
- // global segment
- a.currentGlobalSegment.startTimestamp = 0
- a.currentGlobalSegment.currentClients = &activity.EntityActivityLog{
- Clients: make([]*activity.EntityRecord, 0),
- }
- a.currentGlobalSegment.clientSequenceNumber = 0
-
- // local segment
- a.currentLocalSegment.startTimestamp = 0
- a.currentLocalSegment.currentClients = &activity.EntityActivityLog{
- Clients: make([]*activity.EntityRecord, 0),
- }
- a.currentLocalSegment.clientSequenceNumber = 0
-
a.fragment = nil
a.partialMonthClientTracker = make(map[string]*activity.EntityRecord)
- a.currentGlobalFragment = nil
- a.globalPartialMonthClientTracker = make(map[string]*activity.EntityRecord)
-
- a.localFragment = nil
- a.partialMonthLocalClientTracker = make(map[string]*activity.EntityRecord)
-
a.standbyFragmentsReceived = make([]*activity.LogFragment, 0)
- a.standbyLocalFragmentsReceived = make([]*activity.LogFragment, 0)
- a.standbyGlobalFragmentsReceived = make([]*activity.LogFragment, 0)
- a.secondaryGlobalClientFragments = make([]*activity.LogFragment, 0)
}
func (a *ActivityLog) deleteLogWorker(ctx context.Context, startTimestamp int64, whenDone chan struct{}) {
- entityPathsToDelete := make([]string, 0)
- entityPathsToDelete = append(entityPathsToDelete, fmt.Sprintf("%v%v/", activityEntityBasePath, startTimestamp))
- entityPathsToDelete = append(entityPathsToDelete, fmt.Sprintf("%s%v%v/", activityGlobalPathPrefix, activityEntityBasePath, startTimestamp))
- entityPathsToDelete = append(entityPathsToDelete, fmt.Sprintf("%s%v%v/", activityLocalPathPrefix, activityEntityBasePath, startTimestamp))
- entityPathsToDelete = append(entityPathsToDelete, fmt.Sprintf("%v%v/", activityTokenLocalBasePath, startTimestamp))
-
- for _, path := range entityPathsToDelete {
- segments, err := a.view.List(ctx, path)
+ entityPath := fmt.Sprintf("%v%v/", activityEntityBasePath, startTimestamp)
+ tokenPath := fmt.Sprintf("%v%v/", activityTokenBasePath, startTimestamp)
+
+ entitySegments, err := a.view.List(ctx, entityPath)
+ if err != nil {
+ a.logger.Error("could not list entity paths", "error", err)
+ return
+ }
+ for _, p := range entitySegments {
+ err = a.view.Delete(ctx, entityPath+p)
if err != nil {
- a.logger.Error("could not list segment path", "error", err)
- return
+ a.logger.Error("could not delete entity log", "error", err)
}
- for _, p := range segments {
- err = a.view.Delete(ctx, path+p)
- if err != nil {
- a.logger.Error("could not delete log", "error", err)
- }
+ }
+
+ tokenSegments, err := a.view.List(ctx, tokenPath)
+ if err != nil {
+ a.logger.Error("could not list token paths", "error", err)
+ return
+ }
+ for _, p := range tokenSegments {
+ err = a.view.Delete(ctx, tokenPath+p)
+ if err != nil {
+ a.logger.Error("could not delete token log", "error", err)
}
}
+
// Allow whoever started this as a goroutine to wait for it to finish.
close(whenDone)
}
@@ -1277,11 +967,6 @@ func (a *ActivityLog) refreshFromStoredLog(ctx context.Context, wg *sync.WaitGro
defer a.l.Unlock()
a.fragmentLock.Lock()
defer a.fragmentLock.Unlock()
- a.globalFragmentLock.Lock()
- defer a.globalFragmentLock.Unlock()
- // startNewCurrentLogLocked below calls resetCurrentLog which is protected by fragmentLock, globalFragmentLock, localFragmentLock and l
- a.localFragmentLock.Lock()
- defer a.localFragmentLock.Unlock()
decreasingLogTimes, err := a.getMostRecentActivityLogSegment(ctx, now)
if err != nil {
@@ -1350,7 +1035,7 @@ func (a *ActivityLog) refreshFromStoredLog(ctx context.Context, wg *sync.WaitGro
}
// load entity logs from storage into memory
- lastSegment, localLastSegment, globalLastSegment, segmentsExist, err := a.getLastEntitySegmentNumber(ctx, mostRecent)
+ lastSegment, segmentsExist, err := a.getLastEntitySegmentNumber(ctx, mostRecent)
if err != nil {
return err
}
@@ -1359,7 +1044,7 @@ func (a *ActivityLog) refreshFromStoredLog(ctx context.Context, wg *sync.WaitGro
return nil
}
- err = a.loadCurrentClientSegment(ctx, mostRecent, lastSegment, localLastSegment, globalLastSegment)
+ err = a.loadCurrentClientSegment(ctx, mostRecent, lastSegment)
if err != nil || lastSegment == 0 {
return err
}
@@ -1408,9 +1093,6 @@ func (a *ActivityLog) SetConfig(ctx context.Context, config activityConfig) {
// enabled is protected by fragmentLock
a.fragmentLock.Lock()
- // startNewCurrentLogLocked and resetCurrentLog is protected by fragmentLock, globalFragmentLock, localFragmentLock and l
- a.localFragmentLock.Lock()
- a.globalFragmentLock.Lock()
originalEnabled := a.enabled
switch config.Enabled {
case "enable":
@@ -1425,7 +1107,7 @@ func (a *ActivityLog) SetConfig(ctx context.Context, config activityConfig) {
a.logger.Info("activity log enable changed", "original", originalEnabled, "current", a.enabled)
}
- if !a.enabled && a.currentSegment.startTimestamp != 0 && a.currentGlobalSegment.startTimestamp != 0 && a.currentLocalSegment.startTimestamp != 0 {
+ if !a.enabled && a.currentSegment.startTimestamp != 0 {
a.logger.Trace("deleting current segment")
a.deleteDone = make(chan struct{})
// this is called from a request under stateLock, so use activeContext
@@ -1434,7 +1116,7 @@ func (a *ActivityLog) SetConfig(ctx context.Context, config activityConfig) {
}
forceSave := false
- if a.enabled && a.currentSegment.startTimestamp == 0 && a.currentGlobalSegment.startTimestamp == 0 && a.currentLocalSegment.startTimestamp == 0 {
+ if a.enabled && a.currentSegment.startTimestamp == 0 {
a.startNewCurrentLogLocked(a.clock.Now().UTC())
// Force a save so we can distinguish between
//
@@ -1448,14 +1130,10 @@ func (a *ActivityLog) SetConfig(ctx context.Context, config activityConfig) {
forceSave = true
}
a.fragmentLock.Unlock()
- a.localFragmentLock.Unlock()
- a.globalFragmentLock.Unlock()
if forceSave {
// l is still held here
- a.saveCurrentSegmentInternal(ctx, true, a.currentSegment, "")
- a.saveCurrentSegmentInternal(ctx, true, a.currentGlobalSegment, activityGlobalPathPrefix)
- a.saveCurrentSegmentInternal(ctx, true, a.currentLocalSegment, activityLocalPathPrefix)
+ a.saveCurrentSegmentInternal(ctx, true)
}
a.defaultReportMonths = config.DefaultReportMonths
@@ -1546,9 +1224,6 @@ func (c *Core) setupActivityLogLocked(ctx context.Context, wg *sync.WaitGroup, r
} else {
if !c.activityLogConfig.DisableFragmentWorker {
go manager.activeFragmentWorker(ctx)
- if c.IsPerfSecondary() {
- go manager.secondaryFragmentWorker(ctx)
- }
}
doRegeneration := !reload && !manager.hasRegeneratedACME(ctx)
@@ -1700,97 +1375,6 @@ func (a *ActivityLog) StartOfNextMonth() time.Time {
return timeutil.StartOfNextMonth(segmentStart)
}
-// secondaryFragmentWorker handles scheduling global client fragments
-// to send via RPC to the primary; it runs on performance secondaries
-func (a *ActivityLog) secondaryFragmentWorker(ctx context.Context) {
- timer := a.clock.NewTimer(time.Duration(0))
- fragmentWaiting := false
- // Eat first event, so timer is stopped
- <-timer.C
-
- endOfMonth := a.clock.NewTimer(a.StartOfNextMonth().Sub(a.clock.Now()))
- if a.configOverrides.DisableTimers {
- endOfMonth.Stop()
- }
- sendInterval := activityFragmentSendInterval
- // This changes the interval to a duration that was set for testing purposes
- if a.configOverrides.GlobalFragmentSendInterval.Microseconds() > 0 {
- sendInterval = a.configOverrides.GlobalFragmentSendInterval
- }
-
- sendFunc := func() {
- ctx, cancel := context.WithTimeout(ctx, activityFragmentSendTimeout)
- defer cancel()
- err := a.sendGlobalClients(ctx)
- if err != nil {
- a.logger.Warn("activity log global fragment lost", "error", err)
- }
- }
-
- for {
- select {
- case <-a.doneCh:
- // Shutting down activity log.
- if fragmentWaiting && !timer.Stop() {
- <-timer.C
- }
- if !endOfMonth.Stop() {
- <-endOfMonth.C
- }
- return
- case <-a.newGlobalClientFragmentCh:
- // New fragment created, start the timer if not
- // already running
- if !fragmentWaiting {
- fragmentWaiting = true
- if !a.configOverrides.DisableTimers {
- a.logger.Trace("reset global fragment timer")
- timer.Reset(sendInterval)
- }
- }
- case <-timer.C:
- a.logger.Trace("sending global fragment on timer expiration")
- fragmentWaiting = false
- sendFunc()
- case <-a.sendCh:
- a.logger.Trace("sending global fragment on request")
- // It might be that we get sendCh before fragmentCh
- // if a fragment is created and then immediately fills
- // up to its limit. So we attempt to send even if the timer's
- // not running.
- if fragmentWaiting {
- fragmentWaiting = false
- if !timer.Stop() {
- <-timer.C
- }
- }
- sendFunc()
- case <-endOfMonth.C:
- a.logger.Trace("sending global fragment on end of month")
- // Flush the current fragment, if any
- if fragmentWaiting {
- fragmentWaiting = false
- if !timer.Stop() {
- <-timer.C
- }
- }
- sendFunc()
-
- // clear active entity set
- a.globalFragmentLock.Lock()
- a.globalPartialMonthClientTracker = make(map[string]*activity.EntityRecord)
-
- a.globalFragmentLock.Unlock()
-
- // Set timer for next month.
- // The current segment *probably* hasn't been set yet (via invalidation),
- // so don't rely on it.
- target := timeutil.StartOfNextMonth(a.clock.Now().UTC())
- endOfMonth.Reset(target.Sub(a.clock.Now()))
- }
- }
-}
-
// perfStandbyFragmentWorker handles scheduling fragments
// to send via RPC; it runs on perf standby nodes only.
func (a *ActivityLog) perfStandbyFragmentWorker(ctx context.Context) {
@@ -1804,12 +1388,6 @@ func (a *ActivityLog) perfStandbyFragmentWorker(ctx context.Context) {
endOfMonth.Stop()
}
- sendInterval := activityFragmentSendInterval
- // This changes the interval to a duration that was set for testing purposes
- if a.configOverrides.PerfStandbyFragmentSendInterval.Microseconds() > 0 {
- sendInterval = a.configOverrides.PerfStandbyFragmentSendInterval
- }
-
sendFunc := func() {
ctx, cancel := context.WithTimeout(ctx, activityFragmentSendTimeout)
defer cancel()
@@ -1837,7 +1415,7 @@ func (a *ActivityLog) perfStandbyFragmentWorker(ctx context.Context) {
fragmentWaiting = true
if !a.configOverrides.DisableTimers {
a.logger.Trace("reset fragment timer")
- timer.Reset(sendInterval)
+ timer.Reset(activityFragmentStandbyTime)
}
}
case <-timer.C:
@@ -1874,11 +1452,6 @@ func (a *ActivityLog) perfStandbyFragmentWorker(ctx context.Context) {
a.fragmentLock.Unlock()
- // clear local active entity set
- a.localFragmentLock.Lock()
- a.partialMonthLocalClientTracker = make(map[string]*activity.EntityRecord)
- a.localFragmentLock.Unlock()
-
// Set timer for next month.
// The current segment *probably* hasn't been set yet (via invalidation),
// so don't rely on it.
@@ -1891,13 +1464,7 @@ func (a *ActivityLog) perfStandbyFragmentWorker(ctx context.Context) {
// activeFragmentWorker handles scheduling the write of the next
// segment. It runs on active nodes only.
func (a *ActivityLog) activeFragmentWorker(ctx context.Context) {
- writeInterval := activitySegmentInterval
- // This changes the interval to a duration that was set for testing purposes
- if a.configOverrides.StorageWriteTestingInterval.Microseconds() > 0 {
- writeInterval = a.configOverrides.StorageWriteTestingInterval
- }
-
- ticker := a.clock.NewTicker(writeInterval)
+ ticker := a.clock.NewTicker(activitySegmentInterval)
endOfMonth := a.clock.NewTimer(a.StartOfNextMonth().Sub(a.clock.Now()))
if a.configOverrides.DisableTimers {
@@ -2010,12 +1577,7 @@ func (a *ActivityLog) HandleEndOfMonth(ctx context.Context, currentTime time.Tim
// in the previous month, and recover by calling newMonthCurrentLog
// again and triggering the precomputed query.
a.fragmentLock.Lock()
- // calls newMonthCurrentLogLocked which is protected by fragmentLock, globalFragmentLock, localFragmentLock and l
- a.localFragmentLock.Lock()
- a.globalFragmentLock.Lock()
a.newMonthCurrentLogLocked(currentTime)
- a.globalFragmentLock.Unlock()
- a.localFragmentLock.Unlock()
a.fragmentLock.Unlock()
// Work on precomputed queries in background
@@ -2061,29 +1623,13 @@ func (c *Core) ResetActivityLog() []*activity.LogFragment {
allFragments := make([]*activity.LogFragment, 1)
a.fragmentLock.Lock()
-
allFragments[0] = a.fragment
a.fragment = nil
+
allFragments = append(allFragments, a.standbyFragmentsReceived...)
a.standbyFragmentsReceived = make([]*activity.LogFragment, 0)
- a.secondaryGlobalClientFragments = make([]*activity.LogFragment, 0)
a.partialMonthClientTracker = make(map[string]*activity.EntityRecord)
a.fragmentLock.Unlock()
-
- // local fragments
- a.localFragmentLock.Lock()
- allFragments = append(allFragments, a.localFragment)
- a.localFragment = nil
- allFragments = append(allFragments, a.standbyLocalFragmentsReceived...)
- a.standbyLocalFragmentsReceived = make([]*activity.LogFragment, 0)
- a.partialMonthLocalClientTracker = make(map[string]*activity.EntityRecord)
- a.localFragmentLock.Unlock()
-
- // global fragments
- a.globalFragmentLock.Lock()
- a.globalPartialMonthClientTracker = make(map[string]*activity.EntityRecord)
- a.standbyGlobalFragmentsReceived = make([]*activity.LogFragment, 0)
- a.globalFragmentLock.Unlock()
return allFragments
}
@@ -2121,16 +1667,11 @@ func (a *ActivityLog) AddActivityToFragment(clientID string, namespaceID string,
a.fragmentLock.RLock()
if a.enabled {
- _, presentInRegularClientMap := a.partialMonthClientTracker[clientID]
- _, presentInLocalClientmap := a.partialMonthLocalClientTracker[clientID]
- if presentInRegularClientMap || presentInLocalClientmap {
- present = true
- }
+ _, present = a.partialMonthClientTracker[clientID]
} else {
present = true
}
a.fragmentLock.RUnlock()
-
if present {
return
}
@@ -2139,23 +1680,12 @@ func (a *ActivityLog) AddActivityToFragment(clientID string, namespaceID string,
a.fragmentLock.Lock()
defer a.fragmentLock.Unlock()
- a.localFragmentLock.Lock()
- defer a.localFragmentLock.Unlock()
-
- a.globalFragmentLock.Lock()
- defer a.globalFragmentLock.Unlock()
-
// Re-check entity ID after re-acquiring lock
- _, presentInRegularClientMap := a.partialMonthClientTracker[clientID]
- _, presentInLocalClientmap := a.partialMonthLocalClientTracker[clientID]
- if presentInRegularClientMap || presentInLocalClientmap {
- present = true
- }
+ _, present = a.partialMonthClientTracker[clientID]
if present {
return
}
- // create fragments if doesn't already exist
a.createCurrentFragment()
clientRecord := &activity.EntityRecord{
@@ -2174,73 +1704,20 @@ func (a *ActivityLog) AddActivityToFragment(clientID string, namespaceID string,
clientRecord.NonEntity = true
}
- // add the clients to the regular fragment
a.fragment.Clients = append(a.fragment.Clients, clientRecord)
a.partialMonthClientTracker[clientRecord.ClientID] = clientRecord
-
- if local, _ := a.isClientLocal(clientRecord); local {
- // If the client is local then add the client to the current local fragment
- a.localFragment.Clients = append(a.localFragment.Clients, clientRecord)
- a.partialMonthLocalClientTracker[clientRecord.ClientID] = clientRecord
- } else {
- if _, ok := a.globalPartialMonthClientTracker[clientRecord.ClientID]; !ok {
- // If the client is not local and has not already been seen, then add the client
- // to the current global fragment
- a.currentGlobalFragment.Clients = append(a.currentGlobalFragment.Clients, clientRecord)
- a.globalPartialMonthClientTracker[clientRecord.ClientID] = clientRecord
- }
- }
-}
-
-// isClientLocal checks whether the given client is on a local mount.
-// In all other cases, we will assume it is a global client.
-func (a *ActivityLog) isClientLocal(client *activity.EntityRecord) (bool, error) {
- if !utf8.ValidString(client.ClientID) {
- return false, fmt.Errorf("client ID %q is not a valid string", client.ClientID)
- }
- // Tokens are not replicated to performance secondary clusters
- if client.GetClientType() == nonEntityTokenActivityType {
- return true, nil
- }
- mountEntry := a.core.router.MatchingMountByAccessor(client.MountAccessor)
- // If the mount entry is nil, this means the mount has been deleted. We will assume it was replicated because we do not want to
- // over count clients
- if mountEntry != nil && mountEntry.Local {
- return true, nil
- }
-
- return false, nil
}
-// Create the fragments (regular fragment, local fragment and global fragment) if it doesn't already exist.
-// Must be called with the fragmentLock, localFragmentLock and globalFragmentLock held.
+// Create the current fragment if it doesn't already exist.
+// Must be called with the lock held.
func (a *ActivityLog) createCurrentFragment() {
if a.fragment == nil {
- // create regular fragment
a.fragment = &activity.LogFragment{
OriginatingNode: a.nodeID,
Clients: make([]*activity.EntityRecord, 0, 120),
NonEntityTokens: make(map[string]uint64),
}
-
- // create local fragment
- a.localFragment = &activity.LogFragment{
- OriginatingNode: a.nodeID,
- Clients: make([]*activity.EntityRecord, 0, 120),
- NonEntityTokens: make(map[string]uint64),
- }
-
- // create global fragment
- a.currentGlobalFragment = &activity.LogFragment{
- OriginatingCluster: a.core.ClusterID(),
- Clients: make([]*activity.EntityRecord, 0),
- }
-
- if a.core.IsPerfSecondary() {
- // Signal that a new global segment is available, start
- // the timer to send it
- a.newGlobalClientFragmentCh <- struct{}{}
- }
+ a.fragmentCreation = a.clock.Now().UTC()
// Signal that a new segment is available, start
// the timer to send it.
@@ -2248,81 +1725,25 @@ func (a *ActivityLog) createCurrentFragment() {
}
}
-func (a *ActivityLog) receivedGlobalClientFragments(fragment *activity.LogFragment) {
- a.logger.Trace("received fragment from secondary", "cluster_id", fragment.GetOriginatingCluster())
-
- a.globalFragmentLock.Lock()
- defer a.globalFragmentLock.Unlock()
-
- if !a.enabled {
- return
- }
-
- for _, e := range fragment.Clients {
- a.globalPartialMonthClientTracker[e.ClientID] = e
- }
-
- a.secondaryGlobalClientFragments = append(a.secondaryGlobalClientFragments, fragment)
-}
-
func (a *ActivityLog) receivedFragment(fragment *activity.LogFragment) {
a.logger.Trace("received fragment from standby", "node", fragment.OriginatingNode)
- isLocalFragment := false
- if !a.enabled {
- return
- }
-
a.fragmentLock.Lock()
defer a.fragmentLock.Unlock()
- // Check if the received fragment from standby is a local fragment.
- // A fragment can have all local clients or all non-local clients except for regular fragment (which has both currently but will be modified to only hold non-local clients later).
- // Check the first client to identify the type of fragment.
- if len(fragment.Clients) > 0 {
- client := fragment.Clients[0]
- if local, _ := a.isClientLocal(client); local {
- isLocalFragment = true
-
- a.localFragmentLock.Lock()
- defer a.localFragmentLock.Unlock()
- } else {
- a.globalFragmentLock.Lock()
- defer a.globalFragmentLock.Unlock()
- }
+ if !a.enabled {
+ return
}
for _, e := range fragment.Clients {
a.partialMonthClientTracker[e.ClientID] = e
- if isLocalFragment {
- a.partialMonthLocalClientTracker[e.ClientID] = e
- } else {
- a.globalPartialMonthClientTracker[e.ClientID] = e
- }
}
a.standbyFragmentsReceived = append(a.standbyFragmentsReceived, fragment)
- if isLocalFragment {
- a.standbyLocalFragmentsReceived = append(a.standbyLocalFragmentsReceived, fragment)
- } else {
- a.standbyGlobalFragmentsReceived = append(a.standbyGlobalFragmentsReceived, fragment)
- }
-
// TODO: check if current segment is full and should be written
}
-// returns the active local and global clients for the current month
-func (a *ActivityLog) GetAllPartialMonthClients() (map[string]*activity.EntityRecord, map[string]*activity.EntityRecord) {
- a.localFragmentLock.Lock()
- defer a.localFragmentLock.Unlock()
-
- a.globalFragmentLock.Lock()
- defer a.globalFragmentLock.Unlock()
-
- return a.partialMonthLocalClientTracker, a.globalPartialMonthClientTracker
-}
-
type ResponseCounts struct {
EntityClients int `json:"entity_clients" mapstructure:"entity_clients"`
NonEntityClients int `json:"non_entity_clients" mapstructure:"non_entity_clients"`
@@ -3264,7 +2685,6 @@ func (a *ActivityLog) retentionWorker(ctx context.Context, currentTime time.Time
// Periodic report of number of active entities, with the current month.
// We don't break this down by namespace because that would require going to storage (that information
// is not currently stored in memory.)
-// TODO: to deprecate. These metrics are not useful anymore
func (a *ActivityLog) PartialMonthMetrics(ctx context.Context) ([]metricsutil.GaugeLabelValues, error) {
a.fragmentLock.RLock()
defer a.fragmentLock.RUnlock()
@@ -3301,9 +2721,6 @@ func (a *ActivityLog) populateNamespaceAndMonthlyBreakdowns() (map[int64]*proces
for _, e := range a.partialMonthClientTracker {
processClientRecord(e, byNamespace, byMonth, a.clock.Now())
}
- for _, e := range a.partialMonthLocalClientTracker {
- processClientRecord(e, byNamespace, byMonth, a.clock.Now())
- }
return byMonth, byNamespace
}
diff --git a/vault/activity_log_stubs_oss.go b/vault/activity_log_stubs_oss.go
deleted file mode 100644
index 7d2457360563..000000000000
--- a/vault/activity_log_stubs_oss.go
+++ /dev/null
@@ -1,15 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: BUSL-1.1
-
-//go:build !enterprise
-
-package vault
-
-import "context"
-
-//go:generate go run github.com/hashicorp/vault/tools/stubmaker
-
-// sendGlobalClients is a no-op on CE
-func (a *ActivityLog) sendGlobalClients(ctx context.Context) error {
- return nil
-}
diff --git a/vault/activity_log_test.go b/vault/activity_log_test.go
index 4742d11467b8..81a691dadbed 100644
--- a/vault/activity_log_test.go
+++ b/vault/activity_log_test.go
@@ -9,7 +9,6 @@ import (
"encoding/json"
"errors"
"fmt"
- "io"
"net/http"
"reflect"
"sort"
@@ -24,7 +23,6 @@ import (
"github.com/go-test/deep"
"github.com/golang/protobuf/proto"
"github.com/hashicorp/go-uuid"
- "github.com/hashicorp/vault/builtin/credential/userpass"
"github.com/hashicorp/vault/helper/constants"
"github.com/hashicorp/vault/helper/namespace"
"github.com/hashicorp/vault/helper/timeutil"
@@ -36,16 +34,7 @@ import (
// TestActivityLog_Creation calls AddEntityToFragment and verifies that it appears correctly in a.fragment.
func TestActivityLog_Creation(t *testing.T) {
- storage := &logical.InmemStorage{}
- coreConfig := &CoreConfig{
- CredentialBackends: map[string]logical.Factory{
- "userpass": userpass.Factory,
- },
- Physical: storage.Underlying(),
- }
-
- cluster := NewTestCluster(t, coreConfig, nil)
- core := cluster.Cores[0].Core
+ core, _, _ := TestCoreUnsealed(t)
a := core.activityLog
a.SetEnable(true)
@@ -56,31 +45,24 @@ func TestActivityLog_Creation(t *testing.T) {
if a.logger == nil || a.view == nil {
t.Fatal("activity log not initialized")
}
- if a.fragment != nil || a.currentGlobalFragment != nil {
+ if a.fragment != nil {
t.Fatal("activity log already has fragment")
}
- if a.localFragment != nil {
- t.Fatal("activity log already has a local fragment")
- }
-
const entity_id = "entity_id_75432"
const namespace_id = "ns123"
ts := time.Now()
a.AddEntityToFragment(entity_id, namespace_id, ts.Unix())
- if a.fragment == nil || a.currentGlobalFragment == nil {
+ if a.fragment == nil {
t.Fatal("no fragment created")
}
if a.fragment.OriginatingNode != a.nodeID {
t.Errorf("mismatched node ID, %q vs %q", a.fragment.OriginatingNode, a.nodeID)
}
- if a.currentGlobalFragment.OriginatingCluster != a.core.ClusterID() {
- t.Errorf("mismatched cluster ID, %q vs %q", a.currentGlobalFragment.GetOriginatingCluster(), a.core.ClusterID())
- }
- if a.fragment.Clients == nil || a.currentGlobalFragment.Clients == nil {
+ if a.fragment.Clients == nil {
t.Fatal("no fragment entity slice")
}
@@ -91,9 +73,6 @@ func TestActivityLog_Creation(t *testing.T) {
if len(a.fragment.Clients) != 1 {
t.Fatalf("wrong number of entities %v", len(a.fragment.Clients))
}
- if len(a.currentGlobalFragment.Clients) != 1 {
- t.Fatalf("wrong number of entities %v", len(a.currentGlobalFragment.Clients))
- }
er := a.fragment.Clients[0]
if er.ClientID != entity_id {
@@ -106,17 +85,6 @@ func TestActivityLog_Creation(t *testing.T) {
t.Errorf("mimatched timestamp, %v vs %v", er.Timestamp, ts.Unix())
}
- er = a.currentGlobalFragment.Clients[0]
- if er.ClientID != entity_id {
- t.Errorf("mimatched entity ID, %q vs %q", er.ClientID, entity_id)
- }
- if er.NamespaceID != namespace_id {
- t.Errorf("mimatched namespace ID, %q vs %q", er.NamespaceID, namespace_id)
- }
- if er.Timestamp != ts.Unix() {
- t.Errorf("mimatched timestamp, %v vs %v", er.Timestamp, ts.Unix())
- }
-
// Reset and test the other code path
a.fragment = nil
a.AddTokenToFragment(namespace_id)
@@ -133,49 +101,6 @@ func TestActivityLog_Creation(t *testing.T) {
if actual != 1 {
t.Errorf("mismatched number of tokens, %v vs %v", actual, 1)
}
-
- // test local fragment
- localMe := &MountEntry{
- Table: credentialTableType,
- Path: "userpass-local/",
- Type: "userpass",
- Local: true,
- Accessor: "local_mount_accessor",
- }
- err := core.enableCredential(namespace.RootContext(nil), localMe)
- require.NoError(t, err)
-
- const local_entity_id = "entity_id_75434"
- local_ts := time.Now()
-
- a.AddClientToFragment(local_entity_id, "root", local_ts.Unix(), false, "local_mount_accessor")
-
- if a.localFragment.OriginatingNode != a.nodeID {
- t.Errorf("mismatched node ID, %q vs %q", a.localFragment.OriginatingNode, a.nodeID)
- }
-
- if a.localFragment.Clients == nil {
- t.Fatal("no local fragment entity slice")
- }
-
- if a.localFragment.NonEntityTokens == nil {
- t.Fatal("no local fragment token map")
- }
-
- if len(a.localFragment.Clients) != 1 {
- t.Fatalf("wrong number of entities %v", len(a.localFragment.Clients))
- }
-
- er = a.localFragment.Clients[0]
- if er.ClientID != local_entity_id {
- t.Errorf("mimatched entity ID, %q vs %q", er.ClientID, local_entity_id)
- }
- if er.NamespaceID != "root" {
- t.Errorf("mimatched namespace ID, %q vs %q", er.NamespaceID, "root")
- }
- if er.Timestamp != ts.Unix() {
- t.Errorf("mimatched timestamp, %v vs %v", er.Timestamp, ts.Unix())
- }
}
// TestActivityLog_Creation_WrappingTokens calls HandleTokenUsage for two wrapping tokens, and verifies that this
@@ -193,17 +118,10 @@ func TestActivityLog_Creation_WrappingTokens(t *testing.T) {
t.Fatal("activity log not initialized")
}
a.fragmentLock.Lock()
- if a.fragment != nil || a.currentGlobalFragment != nil {
+ if a.fragment != nil {
t.Fatal("activity log already has fragment")
}
a.fragmentLock.Unlock()
-
- a.localFragmentLock.Lock()
- if a.localFragment != nil {
- t.Fatal("activity log already has local fragment")
- }
- a.localFragmentLock.Unlock()
-
const namespace_id = "ns123"
te := &logical.TokenEntry{
@@ -221,7 +139,7 @@ func TestActivityLog_Creation_WrappingTokens(t *testing.T) {
}
a.fragmentLock.Lock()
- if a.fragment != nil || a.currentGlobalFragment != nil {
+ if a.fragment != nil {
t.Fatal("fragment created")
}
a.fragmentLock.Unlock()
@@ -241,7 +159,7 @@ func TestActivityLog_Creation_WrappingTokens(t *testing.T) {
}
a.fragmentLock.Lock()
- if a.fragment != nil || a.currentGlobalFragment != nil {
+ if a.fragment != nil {
t.Fatal("fragment created")
}
a.fragmentLock.Unlock()
@@ -280,16 +198,13 @@ func TestActivityLog_UniqueEntities(t *testing.T) {
a.AddEntityToFragment(id2, "root", t3.Unix())
a.AddEntityToFragment(id1, "root", t3.Unix())
- if a.fragment == nil || a.currentGlobalFragment == nil {
+ if a.fragment == nil {
t.Fatal("no current fragment")
}
if len(a.fragment.Clients) != 2 {
t.Fatalf("number of entities is %v", len(a.fragment.Clients))
}
- if len(a.currentGlobalFragment.Clients) != 2 {
- t.Fatalf("number of entities is %v", len(a.currentGlobalFragment.Clients))
- }
for i, e := range a.fragment.Clients {
expectedID := id1
@@ -309,24 +224,6 @@ func TestActivityLog_UniqueEntities(t *testing.T) {
t.Errorf("%v: expected %v, got %v", i, expectedTime, e.Timestamp)
}
}
- for i, e := range a.currentGlobalFragment.Clients {
- expectedID := id1
- expectedTime := t1.Unix()
- expectedNS := "root"
- if i == 1 {
- expectedID = id2
- expectedTime = t2.Unix()
- }
- if e.ClientID != expectedID {
- t.Errorf("%v: expected %q, got %q", i, expectedID, e.ClientID)
- }
- if e.NamespaceID != expectedNS {
- t.Errorf("%v: expected %q, got %q", i, expectedNS, e.NamespaceID)
- }
- if e.Timestamp != expectedTime {
- t.Errorf("%v: expected %v, got %v", i, expectedTime, e.Timestamp)
- }
- }
checkExpectedEntitiesInMap(t, a, []string{id1, id2})
}
@@ -400,7 +297,7 @@ func TestActivityLog_SaveTokensToStorage(t *testing.T) {
a.SetStartTimestamp(time.Now().Unix()) // set a nonzero segment
nsIDs := [...]string{"ns1_id", "ns2_id", "ns3_id"}
- path := fmt.Sprintf("%sdirecttokens/%d/0", ActivityLogLocalPrefix, a.GetStartTimestamp())
+ path := fmt.Sprintf("%sdirecttokens/%d/0", ActivityLogPrefix, a.GetStartTimestamp())
for i := 0; i < 3; i++ {
a.AddTokenToFragment(nsIDs[0])
@@ -410,14 +307,10 @@ func TestActivityLog_SaveTokensToStorage(t *testing.T) {
if err != nil {
t.Fatalf("got error writing tokens to storage: %v", err)
}
- if a.fragment != nil || a.currentGlobalFragment != nil {
+ if a.fragment != nil {
t.Errorf("fragment was not reset after write to storage")
}
- if a.localFragment != nil {
- t.Errorf("local fragment was not reset after write to storage")
- }
-
out := &activity.TokenCount{}
protoSegment := readSegmentFromStorage(t, core, path)
err = proto.Unmarshal(protoSegment.Value, out)
@@ -446,14 +339,10 @@ func TestActivityLog_SaveTokensToStorage(t *testing.T) {
if err != nil {
t.Fatalf("got error writing tokens to storage: %v", err)
}
- if a.fragment != nil || a.currentGlobalFragment != nil {
+ if a.fragment != nil {
t.Errorf("fragment was not reset after write to storage")
}
- if a.localFragment != nil {
- t.Errorf("local fragment was not reset after write to storage")
- }
-
protoSegment = readSegmentFromStorage(t, core, path)
out = &activity.TokenCount{}
err = proto.Unmarshal(protoSegment.Value, out)
@@ -491,7 +380,7 @@ func TestActivityLog_SaveTokensToStorageDoesNotUpdateTokenCount(t *testing.T) {
a.SetStandbyEnable(ctx, true)
a.SetStartTimestamp(time.Now().Unix()) // set a nonzero segment
- tokenPath := fmt.Sprintf("%sdirecttokens/%d/0", ActivityLogLocalPrefix, a.GetStartTimestamp())
+ tokenPath := fmt.Sprintf("%sdirecttokens/%d/0", ActivityLogPrefix, a.GetStartTimestamp())
clientPath := fmt.Sprintf("sys/counters/activity/log/entity/%d/0", a.GetStartTimestamp())
// Create some entries without entityIDs
tokenEntryOne := logical.TokenEntry{NamespaceID: namespace.RootNamespaceID, Policies: []string{"hi"}}
@@ -519,14 +408,10 @@ func TestActivityLog_SaveTokensToStorageDoesNotUpdateTokenCount(t *testing.T) {
}
// Assert that new elements have been written to the fragment
- if a.fragment != nil || a.currentGlobalFragment != nil {
+ if a.fragment != nil {
t.Errorf("fragment was not reset after write to storage")
}
- if a.localFragment != nil {
- t.Errorf("local fragment was not reset after write to storage")
- }
-
// Assert that no tokens have been written to the fragment
readSegmentFromStorageNil(t, core, tokenPath)
@@ -579,7 +464,6 @@ func TestActivityLog_SaveEntitiesToStorage(t *testing.T) {
now.Add(2 * time.Second).Unix(),
}
path := fmt.Sprintf("%sentity/%d/0", ActivityLogPrefix, a.GetStartTimestamp())
- globalPath := fmt.Sprintf("%sentity/%d/0", ActivityGlobalLogPrefix, a.GetStartTimestamp())
a.AddEntityToFragment(ids[0], "root", times[0])
a.AddEntityToFragment(ids[1], "root2", times[1])
@@ -587,13 +471,10 @@ func TestActivityLog_SaveEntitiesToStorage(t *testing.T) {
if err != nil {
t.Fatalf("got error writing entities to storage: %v", err)
}
- if a.fragment != nil || a.currentGlobalFragment != nil {
+ if a.fragment != nil {
t.Errorf("fragment was not reset after write to storage")
}
- if a.localFragment != nil {
- t.Errorf("local fragment was not reset after write to storage")
- }
protoSegment := readSegmentFromStorage(t, core, path)
out := &activity.EntityActivityLog{}
err = proto.Unmarshal(protoSegment.Value, out)
@@ -616,99 +497,6 @@ func TestActivityLog_SaveEntitiesToStorage(t *testing.T) {
t.Fatalf("could not unmarshal protobuf: %v", err)
}
expectedEntityIDs(t, out, ids)
-
- protoSegment = readSegmentFromStorage(t, core, globalPath)
- out = &activity.EntityActivityLog{}
- err = proto.Unmarshal(protoSegment.Value, out)
- if err != nil {
- t.Fatalf("could not unmarshal protobuf: %v", err)
- }
- expectedEntityIDs(t, out, ids)
-}
-
-// TestActivityLog_SaveEntitiesToStorageCommon calls AddClientToFragment with clients with local and non-local mount accessors and then
-// writes the segment to storage. Read back from storage, and verify that client IDs exist in storage in the right local and non-local entity paths.
-func TestActivityLog_SaveEntitiesToStorageCommon(t *testing.T) {
- t.Parallel()
-
- storage := &logical.InmemStorage{}
- coreConfig := &CoreConfig{
- CredentialBackends: map[string]logical.Factory{
- "userpass": userpass.Factory,
- },
- Physical: storage.Underlying(),
- }
-
- cluster := NewTestCluster(t, coreConfig, nil)
- core := cluster.Cores[0].Core
- TestWaitActive(t, core)
-
- ctx := namespace.RootContext(nil)
-
- a := core.activityLog
- a.SetEnable(true)
- a.SetStartTimestamp(time.Now().Unix()) // set a nonzero segment
-
- var err error
-
- // create a local and non-local mount entry
- nonLocalMountEntry := &MountEntry{
- Table: credentialTableType,
- Path: "nonLocalUserpass/",
- Type: "userpass",
- Accessor: "nonLocalMountAccessor",
- }
- err = core.enableCredential(ctx, nonLocalMountEntry)
- require.NoError(t, err)
-
- localMountEntry := &MountEntry{
- Table: credentialTableType,
- Path: "localUserpass/",
- Local: true,
- Type: "userpass",
- Accessor: "localMountAccessor",
- }
- err = core.enableCredential(ctx, localMountEntry)
- require.NoError(t, err)
-
- now := time.Now()
- ids := []string{"non-local-client-id-1", "non-local-client-id-2", "local-client-id-1"}
-
- globalPath := fmt.Sprintf("%sentity/%d/0", ActivityGlobalLogPrefix, a.GetStartTimestamp())
- localPath := fmt.Sprintf("%sentity/%d/0", ActivityLogLocalPrefix, a.GetStartTimestamp())
-
- // add clients with local and non-local mount accessors
- a.AddClientToFragment(ids[0], "root", now.Unix(), false, "nonLocalMountAccessor")
- a.AddClientToFragment(ids[1], "root", now.Unix(), false, "nonLocalMountAccessor")
- a.AddClientToFragment(ids[2], "root", now.Unix(), false, "localMountAccessor")
-
- err = a.saveCurrentSegmentToStorage(ctx, false)
- if err != nil {
- t.Fatalf("got error writing entities to storage: %v", err)
- }
- if a.fragment != nil {
- t.Errorf("fragment was not reset after write to storage")
- }
-
- // read entity ids from non-local entity storage path
- protoSegment := readSegmentFromStorage(t, core, globalPath)
- out := &activity.EntityActivityLog{}
- err = proto.Unmarshal(protoSegment.Value, out)
- if err != nil {
- t.Fatalf("could not unmarshal protobuf: %v", err)
- }
- expectedEntityIDs(t, out, ids[:2])
-
- // read entity ids from local entity storage path
- protoSegment = readSegmentFromStorage(t, core, localPath)
- out = &activity.EntityActivityLog{}
- err = proto.Unmarshal(protoSegment.Value, out)
- if err != nil {
- t.Fatalf("could not unmarshal protobuf: %v", err)
- }
-
- // local entity is local-client-id-1 in ids with index 2
- expectedEntityIDs(t, out, ids[2:])
}
// TestActivityLog_StoreAndReadHyperloglog inserts into a hyperloglog, stores it and then reads it back. The test
@@ -775,8 +563,8 @@ func TestModifyResponseMonthsNilAppend(t *testing.T) {
}
// TestActivityLog_ReceivedFragment calls receivedFragment with a fragment and verifies it gets added to
-// standbyFragmentsReceived and standbyGlobalFragmentsReceived. Send the same fragment again and then verify that it doesn't change the entity map but does
-// get added to standbyFragmentsReceived and standbyGlobalFragmentsReceived.
+// standbyFragmentsReceived. Send the same fragment again and then verify that it doesn't change the entity map but does
+// get added to standbyFragmentsReceived.
func TestActivityLog_ReceivedFragment(t *testing.T) {
core, _, _ := TestCoreUnsealed(t)
a := core.activityLog
@@ -818,10 +606,6 @@ func TestActivityLog_ReceivedFragment(t *testing.T) {
t.Fatalf("fragment count is %v, expected 1", len(a.standbyFragmentsReceived))
}
- if len(a.standbyGlobalFragmentsReceived) != 1 {
- t.Fatalf("fragment count is %v, expected 1", len(a.standbyGlobalFragmentsReceived))
- }
-
// Send a duplicate, should be stored but not change entity map
a.receivedFragment(fragment)
@@ -830,9 +614,6 @@ func TestActivityLog_ReceivedFragment(t *testing.T) {
if len(a.standbyFragmentsReceived) != 2 {
t.Fatalf("fragment count is %v, expected 2", len(a.standbyFragmentsReceived))
}
- if len(a.standbyGlobalFragmentsReceived) != 2 {
- t.Fatalf("fragment count is %v, expected 2", len(a.standbyGlobalFragmentsReceived))
- }
}
// TestActivityLog_availableLogsEmptyDirectory verifies that availableLogs returns an empty slice when the log directory
@@ -856,18 +637,13 @@ func TestActivityLog_availableLogs(t *testing.T) {
// set up a few files in storage
core, _, _ := TestCoreUnsealed(t)
a := core.activityLog
- paths := [...]string{"entity/1111/1", "entity/992/3"}
- tokenPaths := [...]string{"directtokens/1111/1", "directtokens/1000000/1", "directtokens/992/1"}
+ paths := [...]string{"entity/1111/1", "directtokens/1111/1", "directtokens/1000000/1", "entity/992/3", "directtokens/992/1"}
expectedTimes := [...]time.Time{time.Unix(1000000, 0), time.Unix(1111, 0), time.Unix(992, 0)}
for _, path := range paths {
WriteToStorage(t, core, ActivityLogPrefix+path, []byte("test"))
}
- for _, path := range tokenPaths {
- WriteToStorage(t, core, ActivityLogLocalPrefix+path, []byte("test"))
- }
-
// verify above files are there, and dates in correct order
times, err := a.availableLogs(context.Background(), time.Now())
if err != nil {
@@ -1002,7 +778,7 @@ func TestActivityLog_MultipleFragmentsAndSegments(t *testing.T) {
path0 := fmt.Sprintf("sys/counters/activity/log/entity/%d/0", startTimestamp)
path1 := fmt.Sprintf("sys/counters/activity/log/entity/%d/1", startTimestamp)
path2 := fmt.Sprintf("sys/counters/activity/log/entity/%d/2", startTimestamp)
- tokenPath := fmt.Sprintf("sys/counters/activity/local/log/directtokens/%d/0", startTimestamp)
+ tokenPath := fmt.Sprintf("sys/counters/activity/log/directtokens/%d/0", startTimestamp)
genID := func(i int) string {
return fmt.Sprintf("11111111-1111-1111-1111-%012d", i)
@@ -1306,65 +1082,45 @@ func TestActivityLog_getLastEntitySegmentNumber(t *testing.T) {
core, _, _ := TestCoreUnsealed(t)
a := core.activityLog
paths := [...]string{"entity/992/0", "entity/1000/-1", "entity/1001/foo", "entity/1111/0", "entity/1111/1"}
- globalPaths := [...]string{"entity/992/0", "entity/1000/-1", "entity/1001/foo", "entity/1111/1"}
- localPaths := [...]string{"entity/992/0", "entity/1000/-1", "entity/1001/foo", "entity/1111/0", "entity/1111/1"}
for _, path := range paths {
WriteToStorage(t, core, ActivityLogPrefix+path, []byte("test"))
}
- for _, path := range globalPaths {
- WriteToStorage(t, core, ActivityGlobalLogPrefix+path, []byte("test"))
- }
- for _, path := range localPaths {
- WriteToStorage(t, core, ActivityLogLocalPrefix+path, []byte("test"))
- }
testCases := []struct {
- input int64
- expectedVal uint64
- expectedGlobalVal uint64
- expectedLocalVal uint64
- expectExists bool
+ input int64
+ expectedVal uint64
+ expectExists bool
}{
{
- input: 992,
- expectedVal: 0,
- expectedGlobalVal: 0,
- expectedLocalVal: 0,
- expectExists: true,
+ input: 992,
+ expectedVal: 0,
+ expectExists: true,
},
{
- input: 1000,
- expectedVal: 0,
- expectedGlobalVal: 0,
- expectedLocalVal: 0,
- expectExists: false,
+ input: 1000,
+ expectedVal: 0,
+ expectExists: false,
},
{
- input: 1001,
- expectedVal: 0,
- expectedGlobalVal: 0,
- expectedLocalVal: 0,
- expectExists: false,
+ input: 1001,
+ expectedVal: 0,
+ expectExists: false,
},
{
- input: 1111,
- expectedVal: 1,
- expectedGlobalVal: 1,
- expectedLocalVal: 1,
- expectExists: true,
+ input: 1111,
+ expectedVal: 1,
+ expectExists: true,
},
{
- input: 2222,
- expectedVal: 0,
- expectedGlobalVal: 0,
- expectedLocalVal: 0,
- expectExists: false,
+ input: 2222,
+ expectedVal: 0,
+ expectExists: false,
},
}
ctx := context.Background()
for _, tc := range testCases {
- result, localSegmentNumber, globalSegmentNumber, exists, err := a.getLastEntitySegmentNumber(ctx, time.Unix(tc.input, 0))
+ result, exists, err := a.getLastEntitySegmentNumber(ctx, time.Unix(tc.input, 0))
if err != nil {
t.Fatalf("unexpected error for input %d: %v", tc.input, err)
}
@@ -1374,12 +1130,6 @@ func TestActivityLog_getLastEntitySegmentNumber(t *testing.T) {
if result != tc.expectedVal {
t.Errorf("expected: %d got: %d for input: %d", tc.expectedVal, result, tc.input)
}
- if globalSegmentNumber != tc.expectedGlobalVal {
- t.Errorf("expected: %d got: %d for input: %d", tc.expectedGlobalVal, globalSegmentNumber, tc.input)
- }
- if localSegmentNumber != tc.expectedLocalVal {
- t.Errorf("expected: %d got: %d for input: %d", tc.expectedLocalVal, localSegmentNumber, tc.input)
- }
}
}
@@ -1390,7 +1140,7 @@ func TestActivityLog_tokenCountExists(t *testing.T) {
a := core.activityLog
paths := [...]string{"directtokens/992/0", "directtokens/1001/foo", "directtokens/1111/0", "directtokens/2222/1"}
for _, path := range paths {
- WriteToStorage(t, core, ActivityLogLocalPrefix+path, []byte("test"))
+ WriteToStorage(t, core, ActivityLogPrefix+path, []byte("test"))
}
testCases := []struct {
@@ -1495,16 +1245,8 @@ func (a *ActivityLog) resetEntitiesInMemory(t *testing.T) {
a.l.Lock()
defer a.l.Unlock()
-
a.fragmentLock.Lock()
defer a.fragmentLock.Unlock()
-
- a.localFragmentLock.Lock()
- defer a.localFragmentLock.Unlock()
-
- a.globalFragmentLock.Lock()
- defer a.globalFragmentLock.Unlock()
-
a.currentSegment = segmentInfo{
startTimestamp: time.Time{}.Unix(),
currentClients: &activity.EntityActivityLog{
@@ -1514,27 +1256,7 @@ func (a *ActivityLog) resetEntitiesInMemory(t *testing.T) {
clientSequenceNumber: 0,
}
- a.currentGlobalSegment = segmentInfo{
- startTimestamp: time.Time{}.Unix(),
- currentClients: &activity.EntityActivityLog{
- Clients: make([]*activity.EntityRecord, 0),
- },
- tokenCount: a.currentGlobalSegment.tokenCount,
- clientSequenceNumber: 0,
- }
-
- a.currentLocalSegment = segmentInfo{
- startTimestamp: time.Time{}.Unix(),
- currentClients: &activity.EntityActivityLog{
- Clients: make([]*activity.EntityRecord, 0),
- },
- tokenCount: a.currentLocalSegment.tokenCount,
- clientSequenceNumber: 0,
- }
-
a.partialMonthClientTracker = make(map[string]*activity.EntityRecord)
- a.partialMonthLocalClientTracker = make(map[string]*activity.EntityRecord)
- a.globalPartialMonthClientTracker = make(map[string]*activity.EntityRecord)
}
// TestActivityLog_loadCurrentClientSegment writes entity segments and calls loadCurrentClientSegment, then verifies
@@ -1550,7 +1272,6 @@ func TestActivityLog_loadCurrentClientSegment(t *testing.T) {
}
a.l.Lock()
a.currentSegment.tokenCount = tokenCount
- a.currentLocalSegment.tokenCount = tokenCount
a.l.Unlock()
// setup in-storage data to load for testing
@@ -1611,22 +1332,16 @@ func TestActivityLog_loadCurrentClientSegment(t *testing.T) {
t.Fatalf(err.Error())
}
WriteToStorage(t, core, ActivityLogPrefix+tc.path, data)
- WriteToStorage(t, core, ActivityGlobalLogPrefix+tc.path, data)
- WriteToStorage(t, core, ActivityLogLocalPrefix+tc.path, data)
}
ctx := context.Background()
for _, tc := range testCases {
a.l.Lock()
a.fragmentLock.Lock()
- a.globalFragmentLock.Lock()
- a.localFragmentLock.Lock()
// loadCurrentClientSegment requires us to grab the fragment lock and the
// activityLog lock, as per the comment in the loadCurrentClientSegment
// function
- err := a.loadCurrentClientSegment(ctx, time.Unix(tc.time, 0), tc.seqNum, tc.seqNum, tc.seqNum)
- a.localFragmentLock.Unlock()
- a.globalFragmentLock.Unlock()
+ err := a.loadCurrentClientSegment(ctx, time.Unix(tc.time, 0), tc.seqNum)
a.fragmentLock.Unlock()
a.l.Unlock()
@@ -1638,29 +1353,19 @@ func TestActivityLog_loadCurrentClientSegment(t *testing.T) {
}
// verify accurate data in in-memory current segment
- require.Equal(t, tc.time, a.GetStartTimestamp())
- require.Equal(t, tc.seqNum, a.GetEntitySequenceNumber())
- require.Equal(t, tc.seqNum, a.GetGlobalEntitySequenceNumber())
- require.Equal(t, tc.seqNum, a.GetLocalEntitySequenceNumber())
-
- currentEntities := a.GetCurrentEntities()
- if !entityRecordsEqual(t, currentEntities.Clients, tc.entities.Clients) {
- t.Errorf("bad data loaded. expected: %v, got: %v for path %q", tc.entities.Clients, currentEntities, tc.path)
+ startTimestamp := a.GetStartTimestamp()
+ if startTimestamp != tc.time {
+ t.Errorf("bad timestamp loaded. expected: %v, got: %v for path %q", tc.time, startTimestamp, tc.path)
}
- globalClients := core.GetActiveGlobalClientsList()
- if err := ActiveEntitiesEqual(globalClients, tc.entities.Clients); err != nil {
- t.Errorf("bad data loaded into active global entities. expected only set of EntityID from %v in %v for path %q: %v", tc.entities.Clients, globalClients, tc.path, err)
+ seqNum := a.GetEntitySequenceNumber()
+ if seqNum != tc.seqNum {
+ t.Errorf("bad sequence number loaded. expected: %v, got: %v for path %q", tc.seqNum, seqNum, tc.path)
}
- localClients := core.GetActiveLocalClientsList()
- if err := ActiveEntitiesEqual(localClients, tc.entities.Clients); err != nil {
- t.Errorf("bad data loaded into active local entities. expected only set of EntityID from %v in %v for path %q: %v", tc.entities.Clients, localClients, tc.path, err)
- }
-
- currentGlobalEntities := a.GetCurrentGlobalEntities()
- if !entityRecordsEqual(t, currentGlobalEntities.Clients, tc.entities.Clients) {
- t.Errorf("bad data loaded. expected: %v, got: %v for path %q", tc.entities.Clients, currentGlobalEntities, tc.path)
+ currentEntities := a.GetCurrentEntities()
+ if !entityRecordsEqual(t, currentEntities.Clients, tc.entities.Clients) {
+ t.Errorf("bad data loaded. expected: %v, got: %v for path %q", tc.entities.Clients, currentEntities, tc.path)
}
activeClients := core.GetActiveClientsList()
@@ -1740,8 +1445,6 @@ func TestActivityLog_loadPriorEntitySegment(t *testing.T) {
t.Fatalf(err.Error())
}
WriteToStorage(t, core, ActivityLogPrefix+tc.path, data)
- WriteToStorage(t, core, ActivityGlobalLogPrefix+tc.path, data)
- WriteToStorage(t, core, ActivityLogLocalPrefix+tc.path, data)
}
ctx := context.Background()
@@ -1749,15 +1452,9 @@ func TestActivityLog_loadPriorEntitySegment(t *testing.T) {
if tc.refresh {
a.l.Lock()
a.fragmentLock.Lock()
- a.localFragmentLock.Lock()
a.partialMonthClientTracker = make(map[string]*activity.EntityRecord)
- a.partialMonthLocalClientTracker = make(map[string]*activity.EntityRecord)
- a.globalPartialMonthClientTracker = make(map[string]*activity.EntityRecord)
a.currentSegment.startTimestamp = tc.time
- a.currentGlobalSegment.startTimestamp = tc.time
- a.currentLocalSegment.startTimestamp = tc.time
a.fragmentLock.Unlock()
- a.localFragmentLock.Unlock()
a.l.Unlock()
}
@@ -1810,7 +1507,7 @@ func TestActivityLog_loadTokenCount(t *testing.T) {
ctx := context.Background()
for _, tc := range testCases {
- WriteToStorage(t, core, ActivityLogLocalPrefix+tc.path, data)
+ WriteToStorage(t, core, ActivityLogPrefix+tc.path, data)
}
for _, tc := range testCases {
@@ -1920,14 +1617,6 @@ func setupActivityRecordsInStorage(t *testing.T, base time.Time, includeEntities
},
}...)
}
-
- // append some local entity data
- entityRecords = append(entityRecords, &activity.EntityRecord{
- ClientID: "44444444-4444-4444-4444-444444444444",
- NamespaceID: namespace.RootNamespaceID,
- Timestamp: time.Now().Unix(),
- })
-
for i, entityRecord := range entityRecords {
entityData, err := proto.Marshal(&activity.EntityActivityLog{
Clients: []*activity.EntityRecord{entityRecord},
@@ -1935,17 +1624,10 @@ func setupActivityRecordsInStorage(t *testing.T, base time.Time, includeEntities
if err != nil {
t.Fatalf(err.Error())
}
- switch i {
- case 0:
+ if i == 0 {
WriteToStorage(t, core, ActivityLogPrefix+"entity/"+fmt.Sprint(monthsAgo.Unix())+"/0", entityData)
- WriteToStorage(t, core, ActivityGlobalLogPrefix+"entity/"+fmt.Sprint(monthsAgo.Unix())+"/0", entityData)
-
- case len(entityRecords) - 1:
- // local data
- WriteToStorage(t, core, ActivityLogLocalPrefix+"entity/"+fmt.Sprint(base.Unix())+"/"+strconv.Itoa(i-1), entityData)
- default:
+ } else {
WriteToStorage(t, core, ActivityLogPrefix+"entity/"+fmt.Sprint(base.Unix())+"/"+strconv.Itoa(i-1), entityData)
- WriteToStorage(t, core, ActivityGlobalLogPrefix+"entity/"+fmt.Sprint(base.Unix())+"/"+strconv.Itoa(i-1), entityData)
}
}
}
@@ -1969,7 +1651,7 @@ func setupActivityRecordsInStorage(t *testing.T, base time.Time, includeEntities
t.Fatalf(err.Error())
}
- WriteToStorage(t, core, ActivityLogLocalPrefix+"directtokens/"+fmt.Sprint(base.Unix())+"/0", tokenData)
+ WriteToStorage(t, core, ActivityLogPrefix+"directtokens/"+fmt.Sprint(base.Unix())+"/0", tokenData)
}
return a, entityRecords, tokenRecords
@@ -1992,24 +1674,15 @@ func TestActivityLog_refreshFromStoredLog(t *testing.T) {
Clients: expectedClientRecords[1:],
}
expectedCurrent := &activity.EntityActivityLog{
- Clients: expectedClientRecords[len(expectedClientRecords)-2 : len(expectedClientRecords)-1],
- }
- expectedCurrentLocal := &activity.EntityActivityLog{
Clients: expectedClientRecords[len(expectedClientRecords)-1:],
}
- currentEntities := a.GetCurrentGlobalEntities()
+ currentEntities := a.GetCurrentEntities()
if !entityRecordsEqual(t, currentEntities.Clients, expectedCurrent.Clients) {
// we only expect the newest entity segment to be loaded (for the current month)
t.Errorf("bad activity entity logs loaded. expected: %v got: %v", expectedCurrent, currentEntities)
}
- currentLocalEntities := a.GetCurrentLocalEntities()
- if !entityRecordsEqual(t, currentLocalEntities.Clients, expectedCurrentLocal.Clients) {
- // we only expect the newest local entity segment to be loaded (for the current month)
- t.Errorf("bad activity entity logs loaded. expected: %v got: %v", expectedCurrentLocal, currentLocalEntities)
- }
-
nsCount := a.GetStoredTokenCountByNamespaceID()
if !reflect.DeepEqual(nsCount, expectedTokenCounts) {
// we expect all token counts to be loaded
@@ -2044,31 +1717,14 @@ func TestActivityLog_refreshFromStoredLogWithBackgroundLoadingCancelled(t *testi
}
wg.Wait()
- // refreshFromStoredLog loads the most recent segment and then loads the older segments in the background
- // most recent global and local entity from setupActivityRecordsInStorage
expected := &activity.EntityActivityLog{
- Clients: expectedClientRecords[len(expectedClientRecords)-2:],
- }
-
- // most recent global entity from setupActivityRecordsInStorage
- expectedCurrent := &activity.EntityActivityLog{
- Clients: expectedClientRecords[len(expectedClientRecords)-2 : len(expectedClientRecords)-1],
- }
- // most recent local entity from setupActivityRecordsInStorage
- expectedCurrentLocal := &activity.EntityActivityLog{
Clients: expectedClientRecords[len(expectedClientRecords)-1:],
}
- currentEntities := a.GetCurrentGlobalEntities()
- if !entityRecordsEqual(t, currentEntities.Clients, expectedCurrent.Clients) {
+ currentEntities := a.GetCurrentEntities()
+ if !entityRecordsEqual(t, currentEntities.Clients, expected.Clients) {
// we only expect the newest entity segment to be loaded (for the current month)
- t.Errorf("bad activity entity logs loaded. expected: %v got: %v", expectedCurrent, currentEntities)
- }
-
- currentLocalEntities := a.GetCurrentLocalEntities()
- if !entityRecordsEqual(t, currentLocalEntities.Clients, expectedCurrentLocal.Clients) {
- // we only expect the newest local entity segment to be loaded (for the current month)
- t.Errorf("bad activity entity logs loaded. expected: %v got: %v", expectedCurrentLocal, currentLocalEntities)
+ t.Errorf("bad activity entity logs loaded. expected: %v got: %v", expected, currentEntities)
}
nsCount := a.GetStoredTokenCountByNamespaceID()
@@ -2116,12 +1772,6 @@ func TestActivityLog_refreshFromStoredLogNoTokens(t *testing.T) {
Clients: expectedClientRecords[1:],
}
expectedCurrent := &activity.EntityActivityLog{
- Clients: expectedClientRecords[len(expectedClientRecords)-2 : len(expectedClientRecords)-1],
- }
- expectedCurrentGlobal := &activity.EntityActivityLog{
- Clients: expectedClientRecords[len(expectedClientRecords)-2 : len(expectedClientRecords)-1],
- }
- expectedCurrentLocal := &activity.EntityActivityLog{
Clients: expectedClientRecords[len(expectedClientRecords)-1:],
}
@@ -2130,19 +1780,6 @@ func TestActivityLog_refreshFromStoredLogNoTokens(t *testing.T) {
// we expect all segments for the current month to be loaded
t.Errorf("bad activity entity logs loaded. expected: %v got: %v", expectedCurrent, currentEntities)
}
-
- currentGlobalEntities := a.GetCurrentGlobalEntities()
- if !entityRecordsEqual(t, currentGlobalEntities.Clients, expectedCurrentGlobal.Clients) {
- // we only expect the newest entity segment to be loaded (for the current month)
- t.Errorf("bad activity entity logs loaded. expected: %v got: %v", expectedCurrentGlobal, currentGlobalEntities)
- }
-
- currentLocalEntities := a.GetCurrentLocalEntities()
- if !entityRecordsEqual(t, currentLocalEntities.Clients, expectedCurrentLocal.Clients) {
- // we only expect the newest local entity segment to be loaded (for the current month)
- t.Errorf("bad activity entity logs loaded. expected: %v got: %v", expectedCurrentLocal, currentLocalEntities)
- }
-
activeClients := a.core.GetActiveClientsList()
if err := ActiveEntitiesEqual(activeClients, expectedActive.Clients); err != nil {
t.Error(err)
@@ -2242,7 +1879,7 @@ func TestActivityLog_refreshFromStoredLogPreviousMonth(t *testing.T) {
Clients: expectedClientRecords[1:],
}
expectedCurrent := &activity.EntityActivityLog{
- Clients: expectedClientRecords[len(expectedClientRecords)-2 : len(expectedClientRecords)-1],
+ Clients: expectedClientRecords[len(expectedClientRecords)-1:],
}
currentEntities := a.GetCurrentEntities()
@@ -2341,27 +1978,11 @@ func TestActivityLog_DeleteWorker(t *testing.T) {
"entity/1111/2",
"entity/1111/3",
"entity/1112/1",
- }
- for _, path := range paths {
- WriteToStorage(t, core, ActivityLogPrefix+path, []byte("test"))
- }
-
- localPaths := []string{
- "entity/1111/1",
- "entity/1111/2",
- "entity/1111/3",
- "entity/1112/1",
- }
- for _, path := range localPaths {
- WriteToStorage(t, core, ActivityLogLocalPrefix+path, []byte("test"))
- }
-
- tokenPaths := []string{
"directtokens/1111/1",
"directtokens/1112/1",
}
- for _, path := range tokenPaths {
- WriteToStorage(t, core, ActivityLogLocalPrefix+path, []byte("test"))
+ for _, path := range paths {
+ WriteToStorage(t, core, ActivityLogPrefix+path, []byte("test"))
}
doneCh := make(chan struct{})
@@ -2377,17 +1998,13 @@ func TestActivityLog_DeleteWorker(t *testing.T) {
// Check segments still present
readSegmentFromStorage(t, core, ActivityLogPrefix+"entity/1112/1")
- readSegmentFromStorage(t, core, ActivityLogLocalPrefix+"entity/1112/1")
- readSegmentFromStorage(t, core, ActivityLogLocalPrefix+"directtokens/1112/1")
+ readSegmentFromStorage(t, core, ActivityLogPrefix+"directtokens/1112/1")
// Check other segments not present
expectMissingSegment(t, core, ActivityLogPrefix+"entity/1111/1")
expectMissingSegment(t, core, ActivityLogPrefix+"entity/1111/2")
expectMissingSegment(t, core, ActivityLogPrefix+"entity/1111/3")
- expectMissingSegment(t, core, ActivityLogLocalPrefix+"entity/1111/1")
- expectMissingSegment(t, core, ActivityLogLocalPrefix+"entity/1111/2")
- expectMissingSegment(t, core, ActivityLogLocalPrefix+"entity/1111/3")
- expectMissingSegment(t, core, ActivityLogLocalPrefix+"directtokens/1111/1")
+ expectMissingSegment(t, core, ActivityLogPrefix+"directtokens/1111/1")
}
// checkAPIWarnings ensures there is a warning if switching from enabled -> disabled,
@@ -2506,7 +2123,7 @@ func TestActivityLog_EnableDisable(t *testing.T) {
path = fmt.Sprintf("%ventity/%v/0", ActivityLogPrefix, seg2)
readSegmentFromStorage(t, core, path)
- path = fmt.Sprintf("%vdirecttokens/%v/0", ActivityLogLocalPrefix, seg2)
+ path = fmt.Sprintf("%vdirecttokens/%v/0", ActivityLogPrefix, seg2)
}
readSegmentFromStorage(t, core, path)
}
@@ -2515,23 +2132,9 @@ func TestActivityLog_EndOfMonth(t *testing.T) {
// We only want *fake* end of months, *real* ones are too scary.
timeutil.SkipAtEndOfMonth(t)
- t.Parallel()
-
- storage := &logical.InmemStorage{}
- coreConfig := &CoreConfig{
- CredentialBackends: map[string]logical.Factory{
- "userpass": userpass.Factory,
- },
- Physical: storage.Underlying(),
- }
-
- cluster := NewTestCluster(t, coreConfig, nil)
- core := cluster.Cores[0].Core
- TestWaitActive(t, core)
-
- ctx := namespace.RootContext(nil)
-
+ core, _, _ := TestCoreUnsealed(t)
a := core.activityLog
+ ctx := namespace.RootContext(nil)
// Make sure we're enabled.
a.SetConfig(ctx, activityConfig{
@@ -2543,21 +2146,8 @@ func TestActivityLog_EndOfMonth(t *testing.T) {
id1 := "11111111-1111-1111-1111-111111111111"
id2 := "22222222-2222-2222-2222-222222222222"
id3 := "33333333-3333-3333-3333-333333333333"
- id4 := "44444444-4444-4444-4444-444444444444"
a.AddEntityToFragment(id1, "root", time.Now().Unix())
- // add local data
- localMountEntry := &MountEntry{
- Table: credentialTableType,
- Path: "localUserpass/",
- Local: true,
- Type: "userpass",
- Accessor: "localMountAccessor",
- }
- err := core.enableCredential(ctx, localMountEntry)
- require.NoError(t, err)
- a.AddClientToFragment(id4, "root", time.Now().Unix(), false, "localMountAccessor")
-
month0 := time.Now().UTC()
segment0 := a.GetStartTimestamp()
month1 := timeutil.StartOfNextMonth(month0)
@@ -2570,29 +2160,10 @@ func TestActivityLog_EndOfMonth(t *testing.T) {
path := fmt.Sprintf("%ventity/%v/0", ActivityLogPrefix, segment0)
protoSegment := readSegmentFromStorage(t, core, path)
out := &activity.EntityActivityLog{}
- err = proto.Unmarshal(protoSegment.Value, out)
+ err := proto.Unmarshal(protoSegment.Value, out)
if err != nil {
t.Fatal(err)
}
- expectedEntityIDs(t, out, []string{id1, id4})
-
- path = fmt.Sprintf("%ventity/%v/0", ActivityGlobalLogPrefix, segment0)
- protoSegment = readSegmentFromStorage(t, core, path)
- out = &activity.EntityActivityLog{}
- err = proto.Unmarshal(protoSegment.Value, out)
- if err != nil {
- t.Fatal(err)
- }
- expectedEntityIDs(t, out, []string{id1})
-
- path = fmt.Sprintf("%ventity/%v/0", ActivityLogLocalPrefix, segment0)
- protoSegment = readSegmentFromStorage(t, core, path)
- out = &activity.EntityActivityLog{}
- err = proto.Unmarshal(protoSegment.Value, out)
- if err != nil {
- t.Fatal(err)
- }
- expectedEntityIDs(t, out, []string{id4})
segment1 := a.GetStartTimestamp()
expectedTimestamp := timeutil.StartOfMonth(month1).Unix()
@@ -2637,21 +2208,16 @@ func TestActivityLog_EndOfMonth(t *testing.T) {
// Check all three segments still present, with correct entities
testCases := []struct {
- SegmentTimestamp int64
- ExpectedGlobalEntityIDs []string
- ExpectedLocalEntityIDs []string
+ SegmentTimestamp int64
+ ExpectedEntityIDs []string
}{
- {segment0, []string{id1}, []string{id4}},
- {segment1, []string{id2}, []string{}},
- {segment2, []string{id3}, []string{}},
+ {segment0, []string{id1}},
+ {segment1, []string{id2}},
+ {segment2, []string{id3}},
}
for i, tc := range testCases {
t.Logf("checking segment %v timestamp %v", i, tc.SegmentTimestamp)
-
- expectedAllEntities := make([]string, 0)
- expectedAllEntities = append(expectedAllEntities, tc.ExpectedGlobalEntityIDs...)
- expectedAllEntities = append(expectedAllEntities, tc.ExpectedLocalEntityIDs...)
path := fmt.Sprintf("%ventity/%v/0", ActivityLogPrefix, tc.SegmentTimestamp)
protoSegment := readSegmentFromStorage(t, core, path)
out := &activity.EntityActivityLog{}
@@ -2659,29 +2225,7 @@ func TestActivityLog_EndOfMonth(t *testing.T) {
if err != nil {
t.Fatalf("could not unmarshal protobuf: %v", err)
}
- expectedEntityIDs(t, out, expectedAllEntities)
-
- // Check for global entities at global storage path
- path = fmt.Sprintf("%ventity/%v/0", ActivityGlobalLogPrefix, tc.SegmentTimestamp)
- protoSegment = readSegmentFromStorage(t, core, path)
- out = &activity.EntityActivityLog{}
- err = proto.Unmarshal(protoSegment.Value, out)
- if err != nil {
- t.Fatalf("could not unmarshal protobuf: %v", err)
- }
- expectedEntityIDs(t, out, tc.ExpectedGlobalEntityIDs)
-
- // Check for local entities at local storage path
- if len(tc.ExpectedLocalEntityIDs) > 0 {
- path = fmt.Sprintf("%ventity/%v/0", ActivityLogLocalPrefix, tc.SegmentTimestamp)
- protoSegment = readSegmentFromStorage(t, core, path)
- out = &activity.EntityActivityLog{}
- err = proto.Unmarshal(protoSegment.Value, out)
- if err != nil {
- t.Fatalf("could not unmarshal protobuf: %v", err)
- }
- expectedEntityIDs(t, out, tc.ExpectedLocalEntityIDs)
- }
+ expectedEntityIDs(t, out, tc.ExpectedEntityIDs)
}
}
@@ -2827,7 +2371,7 @@ func TestActivityLog_CalculatePrecomputedQueriesWithMixedTWEs(t *testing.T) {
if err != nil {
t.Fatal(err)
}
- tokenPath := fmt.Sprintf("%vdirecttokens/%v/%v", ActivityLogLocalPrefix, segment.StartTime, segment.Segment)
+ tokenPath := fmt.Sprintf("%vdirecttokens/%v/%v", ActivityLogPrefix, segment.StartTime, segment.Segment)
WriteToStorage(t, core, tokenPath, data)
}
@@ -4150,7 +3694,7 @@ func TestActivityLog_Deletion(t *testing.T) {
paths[i] = append(paths[i], entityPath)
WriteToStorage(t, core, entityPath, []byte("test"))
}
- tokenPath := fmt.Sprintf("%vdirecttokens/%v/0", ActivityLogLocalPrefix, start.Unix())
+ tokenPath := fmt.Sprintf("%vdirecttokens/%v/0", ActivityLogPrefix, start.Unix())
paths[i] = append(paths[i], tokenPath)
WriteToStorage(t, core, tokenPath, []byte("test"))
@@ -5149,45 +4693,15 @@ func TestActivityLog_HandleEndOfMonth(t *testing.T) {
// clients and verifies that they are added correctly to the tracking data
// structures
func TestAddActivityToFragment(t *testing.T) {
- storage := &logical.InmemStorage{}
- coreConfig := &CoreConfig{
- CredentialBackends: map[string]logical.Factory{
- "userpass": userpass.Factory,
- },
- Physical: storage.Underlying(),
- }
-
- cluster := NewTestCluster(t, coreConfig, nil)
- core := cluster.Cores[0].Core
+ core, _, _ := TestCoreUnsealed(t)
a := core.activityLog
a.SetEnable(true)
- require.Nil(t, a.fragment)
- require.Nil(t, a.localFragment)
- require.Nil(t, a.currentGlobalFragment)
-
mount := "mount"
- localMount := "localMount"
ns := "root"
id := "id1"
-
- // keeps track of the number of clients added to localFragment
- localCount := 0
-
- // add a client to regular fragment
a.AddActivityToFragment(id, ns, 0, entityActivityType, mount)
- // create a local mount accessor for local clients
- localMe := &MountEntry{
- Table: credentialTableType,
- Path: "userpass-local/",
- Type: "userpass",
- Local: true,
- Accessor: localMount,
- }
- err := core.enableCredential(namespace.RootContext(nil), localMe)
- require.NoError(t, err)
-
testCases := []struct {
name string
id string
@@ -5195,7 +4709,6 @@ func TestAddActivityToFragment(t *testing.T) {
isAdded bool
expectedID string
isNonEntity bool
- isLocal bool
}{
{
name: "duplicate",
@@ -5203,7 +4716,6 @@ func TestAddActivityToFragment(t *testing.T) {
activityType: entityActivityType,
isAdded: false,
expectedID: id,
- isLocal: false,
},
{
name: "new entity",
@@ -5211,7 +4723,6 @@ func TestAddActivityToFragment(t *testing.T) {
activityType: entityActivityType,
isAdded: true,
expectedID: "new-id",
- isLocal: false,
},
{
name: "new nonentity",
@@ -5220,7 +4731,6 @@ func TestAddActivityToFragment(t *testing.T) {
isAdded: true,
expectedID: "new-nonentity",
isNonEntity: true,
- isLocal: true,
},
{
name: "new acme",
@@ -5229,7 +4739,6 @@ func TestAddActivityToFragment(t *testing.T) {
isAdded: true,
expectedID: "pki-acme.new-acme",
isNonEntity: true,
- isLocal: false,
},
{
name: "new secret sync",
@@ -5238,82 +4747,20 @@ func TestAddActivityToFragment(t *testing.T) {
isAdded: true,
expectedID: "new-secret-sync",
isNonEntity: true,
- isLocal: false,
- },
- {
- name: "new local entity",
- id: "new-local-id",
- activityType: entityActivityType,
- isAdded: true,
- expectedID: "new-local-id",
- isNonEntity: false,
- isLocal: true,
},
}
for _, tc := range testCases {
t.Run(tc.name, func(t *testing.T) {
- var mountAccessor string
a.fragmentLock.RLock()
numClientsBefore := len(a.fragment.Clients)
a.fragmentLock.RUnlock()
- a.globalFragmentLock.RLock()
- globalClientsBefore := len(a.currentGlobalFragment.Clients)
- a.globalFragmentLock.RUnlock()
-
- numLocalClientsBefore := 0
-
- // add client to the fragment
- if tc.isLocal {
- // data already present in local fragment, get client count before adding activity to fragment
- a.localFragmentLock.RLock()
- numLocalClientsBefore = len(a.localFragment.Clients)
- a.localFragmentLock.RUnlock()
-
- mountAccessor = localMount
- a.AddActivityToFragment(tc.id, ns, 0, tc.activityType, localMount)
-
- require.NotNil(t, a.localFragment)
- localCount++
- } else {
- mountAccessor = mount
- a.AddActivityToFragment(tc.id, ns, 0, tc.activityType, mount)
- }
-
+ a.AddActivityToFragment(tc.id, ns, 0, tc.activityType, mount)
a.fragmentLock.RLock()
defer a.fragmentLock.RUnlock()
numClientsAfter := len(a.fragment.Clients)
- a.globalFragmentLock.RLock()
- defer a.globalFragmentLock.RUnlock()
- globalClientsAfter := len(a.currentGlobalFragment.Clients)
-
- // if local client, verify if local fragment is updated
- if tc.isLocal {
- a.localFragmentLock.RLock()
- defer a.localFragmentLock.RUnlock()
-
- numLocalClientsAfter := len(a.localFragment.Clients)
- switch tc.isAdded {
- case true:
- require.Equal(t, numLocalClientsBefore+1, numLocalClientsAfter)
- default:
- require.Equal(t, numLocalClientsBefore, numLocalClientsAfter)
- }
- } else {
- // verify global clients
- switch tc.isAdded {
- case true:
- if tc.activityType != nonEntityTokenActivityType {
- require.Equal(t, globalClientsBefore+1, globalClientsAfter)
- }
- default:
- require.Equal(t, globalClientsBefore, globalClientsAfter)
- }
- }
- // for now local clients are added to both regular fragment and local fragment.
- // this will be modified in ticket vault-31234
if tc.isAdded {
require.Equal(t, numClientsBefore+1, numClientsAfter)
} else {
@@ -5326,110 +4773,13 @@ func TestAddActivityToFragment(t *testing.T) {
NamespaceID: ns,
Timestamp: 0,
NonEntity: tc.isNonEntity,
- MountAccessor: mountAccessor,
+ MountAccessor: mount,
ClientType: tc.activityType,
}, a.partialMonthClientTracker[tc.expectedID]))
-
- if tc.isLocal {
- require.Contains(t, a.partialMonthLocalClientTracker, tc.expectedID)
- require.True(t, proto.Equal(&activity.EntityRecord{
- ClientID: tc.expectedID,
- NamespaceID: ns,
- Timestamp: 0,
- NonEntity: tc.isNonEntity,
- MountAccessor: mountAccessor,
- ClientType: tc.activityType,
- }, a.partialMonthLocalClientTracker[tc.expectedID]))
- } else {
- require.Contains(t, a.globalPartialMonthClientTracker, tc.expectedID)
- require.True(t, proto.Equal(&activity.EntityRecord{
- ClientID: tc.expectedID,
- NamespaceID: ns,
- Timestamp: 0,
- NonEntity: tc.isNonEntity,
- MountAccessor: mount,
- ClientType: tc.activityType,
- }, a.globalPartialMonthClientTracker[tc.expectedID]))
- }
})
}
}
-// TestGetAllPartialMonthClients adds activity for a local and regular clients and verifies that
-// GetAllPartialMonthClients returns the right local and global clients
-func TestGetAllPartialMonthClients(t *testing.T) {
- storage := &logical.InmemStorage{}
- coreConfig := &CoreConfig{
- CredentialBackends: map[string]logical.Factory{
- "userpass": userpass.Factory,
- },
- Physical: storage.Underlying(),
- }
-
- cluster := NewTestCluster(t, coreConfig, nil)
- core := cluster.Cores[0].Core
- a := core.activityLog
- a.SetEnable(true)
-
- require.Nil(t, a.fragment)
- require.Nil(t, a.localFragment)
- require.Nil(t, a.currentGlobalFragment)
-
- ns := "root"
- mount := "mount"
- localMount := "localMount"
- clientID := "id1"
- localClientID := "new-local-id"
-
- // add a client to regular fragment, this should be added to globalPartialMonthClientTracker
- a.AddActivityToFragment(clientID, ns, 0, entityActivityType, mount)
-
- require.NotNil(t, a.localFragment)
- require.NotNil(t, a.fragment)
- require.NotNil(t, a.currentGlobalFragment)
-
- // create a local mount accessor
- localMe := &MountEntry{
- Table: credentialTableType,
- Path: "userpass-local/",
- Type: "userpass",
- Local: true,
- Accessor: localMount,
- }
- err := core.enableCredential(namespace.RootContext(nil), localMe)
- require.NoError(t, err)
-
- // add client to local fragment, this should be added to partialMonthLocalClientTracker
- a.AddActivityToFragment(localClientID, ns, 0, entityActivityType, localMount)
-
- require.NotNil(t, a.localFragment)
-
- // GetAllPartialMonthClients returns the partialMonthLocalClientTracker and globalPartialMonthClientTracker
- localClients, globalClients := a.GetAllPartialMonthClients()
-
- // verify the returned localClients
- require.Len(t, localClients, 1)
- require.Contains(t, localClients, localClientID)
- require.True(t, proto.Equal(&activity.EntityRecord{
- ClientID: localClientID,
- NamespaceID: ns,
- Timestamp: 0,
- MountAccessor: localMount,
- ClientType: entityActivityType,
- }, localClients[localClientID]))
-
- // verify the returned globalClients
- require.Len(t, globalClients, 1)
- require.Contains(t, globalClients, clientID)
- require.True(t, proto.Equal(&activity.EntityRecord{
- ClientID: clientID,
- NamespaceID: ns,
- Timestamp: 0,
- MountAccessor: mount,
- ClientType: entityActivityType,
- }, globalClients[clientID]))
-}
-
// TestActivityLog_reportPrecomputedQueryMetrics creates 3 clients per type and
// calls reportPrecomputedQueryMetrics. The test verifies that the metric sink
// gets metrics reported correctly, based on the segment time matching the
@@ -5674,301 +5024,3 @@ func TestActivityLog_Export_CSV_Header(t *testing.T) {
require.Empty(t, deep.Equal(expectedColumnIndex, encoder.columnIndex))
}
-
-// TestCreateSegment_StoreSegment verifies that
-// the activity log will correctly create segments from
-// the fragments and store the right number of clients at
-// the proper path. This test should be modified to include local clients.
-func TestCreateSegment_StoreSegment(t *testing.T) {
- cluster := NewTestCluster(t, nil, nil)
- core := cluster.Cores[0].Core
- a := core.activityLog
- a.SetEnable(true)
-
- ctx := context.Background()
- timeStamp := time.Now()
-
- clientRecords := make([]*activity.EntityRecord, ActivitySegmentClientCapacity*2+1)
- for i := range clientRecords {
- clientRecords[i] = &activity.EntityRecord{
- ClientID: fmt.Sprintf("111122222-3333-4444-5555-%012v", i),
- Timestamp: timeStamp.Unix(),
- NonEntity: false,
- }
- }
-
- startTime := a.GetStartTimestamp()
- parsedTime := time.Unix(startTime, 0)
-
- testCases := []struct {
- testName string
- numClients int
- pathPrefix string
- maxClientsPerFragment int
- global bool
- forceStore bool
- }{
- {
- testName: "[global] max client size, drop clients",
- numClients: ActivitySegmentClientCapacity*2 + 1,
- pathPrefix: activityGlobalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: true,
- },
- {
- testName: "[global, no-force] max client size, drop clients",
- numClients: ActivitySegmentClientCapacity*2 + 1,
- pathPrefix: activityGlobalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: true,
- forceStore: true,
- },
- {
- testName: "[global] max segment size",
- numClients: ActivitySegmentClientCapacity,
- pathPrefix: activityGlobalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: true,
- },
- {
- testName: "[global, no-force] max segment size",
- numClients: ActivitySegmentClientCapacity,
- pathPrefix: activityGlobalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: true,
- forceStore: true,
- },
- {
- testName: "[global] max segment size, multiple fragments",
- numClients: ActivitySegmentClientCapacity,
- pathPrefix: activityGlobalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity - 1,
- global: true,
- },
- {
- testName: "[global, no-force] max segment size, multiple fragments",
- numClients: ActivitySegmentClientCapacity,
- pathPrefix: activityGlobalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity - 1,
- global: true,
- forceStore: true,
- },
- {
- testName: "[global] roll over",
- numClients: ActivitySegmentClientCapacity + 2,
- pathPrefix: activityGlobalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: true,
- },
- {
- testName: "[global, no-force] roll over",
- numClients: ActivitySegmentClientCapacity + 2,
- pathPrefix: activityGlobalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: true,
- forceStore: true,
- },
- {
- testName: "[global] max segment size, rollover multiple fragments",
- numClients: ActivitySegmentClientCapacity * 2,
- pathPrefix: activityGlobalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity - 1,
- global: true,
- },
- {
- testName: "[global, no-force] max segment size, rollover multiple fragments",
- numClients: ActivitySegmentClientCapacity * 2,
- pathPrefix: activityGlobalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity - 1,
- global: true,
- forceStore: true,
- },
-
- {
- testName: "[non-global] max segment size",
- numClients: ActivitySegmentClientCapacity,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: false,
- },
- {
- testName: "[non-global] max segment size, multiple fragments",
- numClients: ActivitySegmentClientCapacity,
- maxClientsPerFragment: ActivitySegmentClientCapacity - 1,
- global: false,
- },
- {
- testName: "[non-global] roll over",
- numClients: ActivitySegmentClientCapacity + 2,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: false,
- },
- {
- testName: "[non-global] max segment size, rollover multiple fragments",
- numClients: ActivitySegmentClientCapacity * 2,
- maxClientsPerFragment: ActivitySegmentClientCapacity - 1,
- global: false,
- },
- {
- testName: "[non-global] max client size, drop clients",
- numClients: ActivitySegmentClientCapacity*2 + 1,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: false,
- },
- {
- testName: "[local] max client size, drop clients",
- numClients: ActivitySegmentClientCapacity*2 + 1,
- pathPrefix: activityLocalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: false,
- },
- {
- testName: "[local, no-force] max client size, drop clients",
- numClients: ActivitySegmentClientCapacity*2 + 1,
- pathPrefix: activityLocalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: false,
- forceStore: true,
- },
- {
- testName: "[local] max segment size",
- numClients: ActivitySegmentClientCapacity,
- pathPrefix: activityLocalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: false,
- },
- {
- testName: "[local, no-force] max segment size",
- numClients: ActivitySegmentClientCapacity,
- pathPrefix: activityLocalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: false,
- forceStore: true,
- },
- {
- testName: "[local] max segment size, multiple fragments",
- numClients: ActivitySegmentClientCapacity,
- pathPrefix: activityLocalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity - 1,
- global: false,
- },
- {
- testName: "[local, no-force] max segment size, multiple fragments",
- numClients: ActivitySegmentClientCapacity,
- pathPrefix: activityLocalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity - 1,
- global: false,
- forceStore: true,
- },
- {
- testName: "[local] roll over",
- numClients: ActivitySegmentClientCapacity + 2,
- pathPrefix: activityLocalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: false,
- },
- {
- testName: "[local, no-force] roll over",
- numClients: ActivitySegmentClientCapacity + 2,
- pathPrefix: activityLocalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity,
- global: false,
- forceStore: true,
- },
- {
- testName: "[local] max segment size, rollover multiple fragments",
- numClients: ActivitySegmentClientCapacity * 2,
- pathPrefix: activityLocalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity - 1,
- global: false,
- },
- {
- testName: "[local, no-force] max segment size, rollover multiple fragments",
- numClients: ActivitySegmentClientCapacity * 2,
- pathPrefix: activityLocalPathPrefix,
- maxClientsPerFragment: ActivitySegmentClientCapacity - 1,
- global: false,
- forceStore: true,
- },
- }
-
- for _, test := range testCases {
- t.Run(test.testName, func(t *testing.T) {
- // Add clients to fragments
- fragments := make([]*activity.LogFragment, 0)
- remainder := test.numClients
- var i int
- for i = 0; i+test.maxClientsPerFragment < test.numClients; i = i + test.maxClientsPerFragment {
- clients := clientRecords[i : i+test.maxClientsPerFragment]
- remainder -= test.maxClientsPerFragment
- fragments = append(fragments, &activity.LogFragment{Clients: clients})
- }
- if remainder > 0 {
- clients := clientRecords[i : i+remainder]
- fragments = append(fragments, &activity.LogFragment{Clients: clients})
-
- }
-
- segment := &a.currentGlobalSegment
- if !test.global {
- segment = &a.currentSegment
- if test.pathPrefix == activityLocalPathPrefix {
- segment = &a.currentLocalSegment
- }
- }
-
- // Create segments and write to storage
- require.NoError(t, core.StoreCurrentSegment(ctx, fragments, segment, test.forceStore, test.pathPrefix))
-
- reader, err := a.NewSegmentFileReader(ctx, parsedTime)
- require.NoError(t, err)
- var clientTotal int
- if test.global {
- for {
- entity, err := reader.ReadGlobalEntity(ctx)
- if errors.Is(err, io.EOF) {
- break
- }
- require.NoError(t, err)
- clientTotal += len(entity.GetClients())
- }
- } else {
- if test.pathPrefix == activityLocalPathPrefix {
- for {
- entity, err := reader.ReadLocalEntity(ctx)
- if errors.Is(err, io.EOF) {
- break
- }
- require.NoError(t, err)
- clientTotal += len(entity.GetClients())
- }
- } else {
- for {
- entity, err := reader.ReadEntity(ctx)
- if errors.Is(err, io.EOF) {
- break
- }
- require.NoError(t, err)
- clientTotal += len(entity.GetClients())
- }
- }
- }
-
- // The current behavior is that there were greater than 2 * ActivitySegmentClientCapacity seen, then we
- // drop of the remainder of those clients seen during that time. Let's verify that this is the case
- expectedTotal := test.numClients
- if test.numClients > 2*ActivitySegmentClientCapacity {
- expectedTotal = 2 * ActivitySegmentClientCapacity
- }
-
- require.Equal(t, expectedTotal, clientTotal)
-
- // Delete any logs written in this test
- core.DeleteLogsAtPath(ctx, t, test.pathPrefix+activityEntityBasePath, startTime)
- // Reset client sequence number and current client slice back to original values
- segment.clientSequenceNumber = 0
- segment.currentClients = &activity.EntityActivityLog{
- Clients: make([]*activity.EntityRecord, 0),
- }
- })
- }
-}
diff --git a/vault/activity_log_testing_util.go b/vault/activity_log_testing_util.go
index f9bb25ba142e..bbaae9551191 100644
--- a/vault/activity_log_testing_util.go
+++ b/vault/activity_log_testing_util.go
@@ -57,27 +57,17 @@ func (c *Core) InjectActivityLogDataThisMonth(t *testing.T) map[string]*activity
return c.activityLog.partialMonthClientTracker
}
-// GetActiveClients returns the in-memory globalPartialMonthClientTracker and partialMonthLocalClientTracker from an
+// GetActiveClients returns the in-memory partialMonthClientTracker from an
// activity log.
func (c *Core) GetActiveClients() map[string]*activity.EntityRecord {
out := make(map[string]*activity.EntityRecord)
c.stateLock.RLock()
- c.activityLog.globalFragmentLock.RLock()
- c.activityLog.localFragmentLock.RLock()
-
- // add active global clients
- for k, v := range c.activityLog.globalPartialMonthClientTracker {
- out[k] = v
- }
-
- // add active local clients
- for k, v := range c.activityLog.partialMonthLocalClientTracker {
+ c.activityLog.fragmentLock.RLock()
+ for k, v := range c.activityLog.partialMonthClientTracker {
out[k] = v
}
-
- c.activityLog.globalFragmentLock.RUnlock()
- c.activityLog.localFragmentLock.RUnlock()
+ c.activityLog.fragmentLock.RUnlock()
c.stateLock.RUnlock()
return out
@@ -93,28 +83,6 @@ func (c *Core) GetActiveClientsList() []*activity.EntityRecord {
return out
}
-func (c *Core) GetActiveGlobalClientsList() []*activity.EntityRecord {
- out := []*activity.EntityRecord{}
- c.activityLog.globalFragmentLock.RLock()
- // add active global clients
- for _, v := range c.activityLog.globalPartialMonthClientTracker {
- out = append(out, v)
- }
- c.activityLog.globalFragmentLock.RUnlock()
- return out
-}
-
-func (c *Core) GetActiveLocalClientsList() []*activity.EntityRecord {
- out := []*activity.EntityRecord{}
- c.activityLog.localFragmentLock.RLock()
- // add active global clients
- for _, v := range c.activityLog.partialMonthLocalClientTracker {
- out = append(out, v)
- }
- c.activityLog.localFragmentLock.RUnlock()
- return out
-}
-
// GetCurrentEntities returns the current entity activity log
func (a *ActivityLog) GetCurrentEntities() *activity.EntityActivityLog {
a.l.RLock()
@@ -122,20 +90,6 @@ func (a *ActivityLog) GetCurrentEntities() *activity.EntityActivityLog {
return a.currentSegment.currentClients
}
-// GetCurrentGlobalEntities returns the current global entity activity log
-func (a *ActivityLog) GetCurrentGlobalEntities() *activity.EntityActivityLog {
- a.l.RLock()
- defer a.l.RUnlock()
- return a.currentGlobalSegment.currentClients
-}
-
-// GetCurrentLocalEntities returns the current local entity activity log
-func (a *ActivityLog) GetCurrentLocalEntities() *activity.EntityActivityLog {
- a.l.RLock()
- defer a.l.RUnlock()
- return a.currentLocalSegment.currentClients
-}
-
// WriteToStorage is used to put entity data in storage
// `path` should be the complete path (not relative to the view)
func WriteToStorage(t *testing.T, c *Core, path string, data []byte) {
@@ -214,12 +168,6 @@ func (a *ActivityLog) ExpectCurrentSegmentRefreshed(t *testing.T, expectedStart
if a.partialMonthClientTracker == nil {
t.Errorf("expected non-nil partialMonthClientTracker")
}
- if a.partialMonthLocalClientTracker == nil {
- t.Errorf("expected non-nil partialMonthLocalClientTracker")
- }
- if a.globalPartialMonthClientTracker == nil {
- t.Errorf("expected non-nil globalPartialMonthClientTracker")
- }
if len(a.currentSegment.currentClients.Clients) > 0 {
t.Errorf("expected no current entity segment to be loaded. got: %v", a.currentSegment.currentClients)
}
@@ -229,29 +177,13 @@ func (a *ActivityLog) ExpectCurrentSegmentRefreshed(t *testing.T, expectedStart
if len(a.partialMonthClientTracker) > 0 {
t.Errorf("expected no active entity segment to be loaded. got: %v", a.partialMonthClientTracker)
}
- if len(a.partialMonthLocalClientTracker) > 0 {
- t.Errorf("expected no active entity segment to be loaded. got: %v", a.partialMonthLocalClientTracker)
- }
- if len(a.globalPartialMonthClientTracker) > 0 {
- t.Errorf("expected no active entity segment to be loaded. got: %v", a.globalPartialMonthClientTracker)
- }
if verifyTimeNotZero {
if a.currentSegment.startTimestamp == 0 {
t.Error("bad start timestamp. expected no reset but timestamp was reset")
}
- if a.currentGlobalSegment.startTimestamp == 0 {
- t.Error("bad start timestamp. expected no reset but timestamp was reset")
- }
- if a.currentLocalSegment.startTimestamp == 0 {
- t.Error("bad start timestamp. expected no reset but timestamp was reset")
- }
} else if a.currentSegment.startTimestamp != expectedStart {
t.Errorf("bad start timestamp. expected: %v got: %v", expectedStart, a.currentSegment.startTimestamp)
- } else if a.currentGlobalSegment.startTimestamp != expectedStart {
- t.Errorf("bad start timestamp. expected: %v got: %v", expectedStart, a.currentGlobalSegment.startTimestamp)
- } else if a.currentLocalSegment.startTimestamp != expectedStart {
- t.Errorf("bad start timestamp. expected: %v got: %v", expectedStart, a.currentLocalSegment.startTimestamp)
}
}
@@ -270,12 +202,7 @@ func ActiveEntitiesEqual(active []*activity.EntityRecord, test []*activity.Entit
func (a *ActivityLog) GetStartTimestamp() int64 {
a.l.RLock()
defer a.l.RUnlock()
- // TODO: We will substitute a.currentSegment with a.currentLocalSegment when we remove
- // a.currentSegment from the code
- if a.currentGlobalSegment.startTimestamp != a.currentSegment.startTimestamp {
- return -1
- }
- return a.currentGlobalSegment.startTimestamp
+ return a.currentSegment.startTimestamp
}
// SetStartTimestamp sets the start timestamp on an activity log
@@ -283,15 +210,13 @@ func (a *ActivityLog) SetStartTimestamp(timestamp int64) {
a.l.Lock()
defer a.l.Unlock()
a.currentSegment.startTimestamp = timestamp
- a.currentGlobalSegment.startTimestamp = timestamp
- a.currentLocalSegment.startTimestamp = timestamp
}
// GetStoredTokenCountByNamespaceID returns the count of tokens by namespace ID
func (a *ActivityLog) GetStoredTokenCountByNamespaceID() map[string]uint64 {
a.l.RLock()
defer a.l.RUnlock()
- return a.currentLocalSegment.tokenCount.CountByNamespaceID
+ return a.currentSegment.tokenCount.CountByNamespaceID
}
// GetEntitySequenceNumber returns the current entity sequence number
@@ -301,20 +226,6 @@ func (a *ActivityLog) GetEntitySequenceNumber() uint64 {
return a.currentSegment.clientSequenceNumber
}
-// GetGlobalEntitySequenceNumber returns the current entity sequence number
-func (a *ActivityLog) GetGlobalEntitySequenceNumber() uint64 {
- a.l.RLock()
- defer a.l.RUnlock()
- return a.currentGlobalSegment.clientSequenceNumber
-}
-
-// GetLocalEntitySequenceNumber returns the current entity sequence number
-func (a *ActivityLog) GetLocalEntitySequenceNumber() uint64 {
- a.l.RLock()
- defer a.l.RUnlock()
- return a.currentLocalSegment.clientSequenceNumber
-}
-
// SetEnable sets the enabled flag on the activity log
func (a *ActivityLog) SetEnable(enabled bool) {
a.l.Lock()
@@ -336,51 +247,3 @@ func (a *ActivityLog) GetEnabled() bool {
func (c *Core) GetActivityLog() *ActivityLog {
return c.activityLog
}
-
-func (c *Core) GetActiveGlobalFragment() *activity.LogFragment {
- c.activityLog.globalFragmentLock.RLock()
- defer c.activityLog.globalFragmentLock.RUnlock()
- return c.activityLog.currentGlobalFragment
-}
-
-func (c *Core) GetSecondaryGlobalFragments() []*activity.LogFragment {
- c.activityLog.globalFragmentLock.RLock()
- defer c.activityLog.globalFragmentLock.RUnlock()
- return c.activityLog.secondaryGlobalClientFragments
-}
-
-func (c *Core) GetActiveLocalFragment() *activity.LogFragment {
- c.activityLog.localFragmentLock.RLock()
- defer c.activityLog.localFragmentLock.RUnlock()
- return c.activityLog.localFragment
-}
-
-func (c *Core) GetActiveFragment() *activity.LogFragment {
- c.activityLog.fragmentLock.RLock()
- defer c.activityLog.fragmentLock.RUnlock()
- return c.activityLog.fragment
-}
-
-// StoreCurrentSegment is a test only method to create and store
-// segments from fragments. This allows createCurrentSegmentFromFragments to remain
-// private
-func (c *Core) StoreCurrentSegment(ctx context.Context, fragments []*activity.LogFragment, currentSegment *segmentInfo, force bool, storagePathPrefix string) error {
- return c.activityLog.createCurrentSegmentFromFragments(ctx, fragments, currentSegment, force, storagePathPrefix)
-}
-
-// DeleteLogsAtPath is test helper function deletes all logs at the given path
-func (c *Core) DeleteLogsAtPath(ctx context.Context, t *testing.T, storagePath string, startTime int64) {
- basePath := storagePath + fmt.Sprint(startTime) + "/"
- a := c.activityLog
- segments, err := a.view.List(ctx, basePath)
- if err != nil {
- t.Fatalf("could not list path %v", err)
- return
- }
- for _, p := range segments {
- err = a.view.Delete(ctx, basePath+p)
- if err != nil {
- t.Fatalf("could not delete path %v", err)
- }
- }
-}
diff --git a/vault/activity_log_util_common.go b/vault/activity_log_util_common.go
index c019d03a4739..3e71ee8cc0ac 100644
--- a/vault/activity_log_util_common.go
+++ b/vault/activity_log_util_common.go
@@ -424,18 +424,14 @@ type singleTypeSegmentReader struct {
a *ActivityLog
}
type segmentReader struct {
- tokens *singleTypeSegmentReader
- entities *singleTypeSegmentReader
- globalEntities *singleTypeSegmentReader
- localEntities *singleTypeSegmentReader
+ tokens *singleTypeSegmentReader
+ entities *singleTypeSegmentReader
}
// SegmentReader is an interface that provides methods to read tokens and entities in order
type SegmentReader interface {
ReadToken(ctx context.Context) (*activity.TokenCount, error)
ReadEntity(ctx context.Context) (*activity.EntityActivityLog, error)
- ReadGlobalEntity(ctx context.Context) (*activity.EntityActivityLog, error)
- ReadLocalEntity(ctx context.Context) (*activity.EntityActivityLog, error)
}
func (a *ActivityLog) NewSegmentFileReader(ctx context.Context, startTime time.Time) (SegmentReader, error) {
@@ -443,19 +439,11 @@ func (a *ActivityLog) NewSegmentFileReader(ctx context.Context, startTime time.T
if err != nil {
return nil, err
}
- globalEntities, err := a.newSingleTypeSegmentReader(ctx, startTime, activityGlobalPathPrefix+activityEntityBasePath)
+ tokens, err := a.newSingleTypeSegmentReader(ctx, startTime, activityTokenBasePath)
if err != nil {
return nil, err
}
- localEntities, err := a.newSingleTypeSegmentReader(ctx, startTime, activityLocalPathPrefix+activityEntityBasePath)
- if err != nil {
- return nil, err
- }
- tokens, err := a.newSingleTypeSegmentReader(ctx, startTime, activityTokenLocalBasePath)
- if err != nil {
- return nil, err
- }
- return &segmentReader{entities: entities, globalEntities: globalEntities, localEntities: localEntities, tokens: tokens}, nil
+ return &segmentReader{entities: entities, tokens: tokens}, nil
}
func (a *ActivityLog) newSingleTypeSegmentReader(ctx context.Context, startTime time.Time, prefix string) (*singleTypeSegmentReader, error) {
@@ -521,28 +509,6 @@ func (e *segmentReader) ReadEntity(ctx context.Context) (*activity.EntityActivit
return out, nil
}
-// ReadGlobalEntity reads a global entity from the global segment
-// If there is none available, then the error will be io.EOF
-func (e *segmentReader) ReadGlobalEntity(ctx context.Context) (*activity.EntityActivityLog, error) {
- out := &activity.EntityActivityLog{}
- err := e.globalEntities.nextValue(ctx, out)
- if err != nil {
- return nil, err
- }
- return out, nil
-}
-
-// ReadLocalEntity reads a local entity from the local segment
-// If there is none available, then the error will be io.EOF
-func (e *segmentReader) ReadLocalEntity(ctx context.Context) (*activity.EntityActivityLog, error) {
- out := &activity.EntityActivityLog{}
- err := e.localEntities.nextValue(ctx, out)
- if err != nil {
- return nil, err
- }
- return out, nil
-}
-
// namespaceRecordToCountsResponse converts the record to the ResponseCounts
// type. The function sums entity, non-entity, and secret sync counts to get the
// total client count.
diff --git a/vault/activity_log_util_common_test.go b/vault/activity_log_util_common_test.go
index 7201cdc651f9..48a3e8dea43b 100644
--- a/vault/activity_log_util_common_test.go
+++ b/vault/activity_log_util_common_test.go
@@ -990,22 +990,6 @@ func Test_ActivityLog_ComputeCurrentMonth_NamespaceMounts(t *testing.T) {
}
}
-// writeGlobalEntitySegment writes a single global segment file with the given time and index for an entity
-func writeGlobalEntitySegment(t *testing.T, core *Core, ts time.Time, index int, item *activity.EntityActivityLog) {
- t.Helper()
- protoItem, err := proto.Marshal(item)
- require.NoError(t, err)
- WriteToStorage(t, core, makeSegmentPath(t, activityGlobalPathPrefix+activityEntityBasePath, ts, index), protoItem)
-}
-
-// writeLocalEntitySegment writes a single local segment file with the given time and index for an entity
-func writeLocalEntitySegment(t *testing.T, core *Core, ts time.Time, index int, item *activity.EntityActivityLog) {
- t.Helper()
- protoItem, err := proto.Marshal(item)
- require.NoError(t, err)
- WriteToStorage(t, core, makeSegmentPath(t, activityLocalPathPrefix+activityEntityBasePath, ts, index), protoItem)
-}
-
// writeEntitySegment writes a single segment file with the given time and index for an entity
func writeEntitySegment(t *testing.T, core *Core, ts time.Time, index int, item *activity.EntityActivityLog) {
t.Helper()
@@ -1019,7 +1003,7 @@ func writeTokenSegment(t *testing.T, core *Core, ts time.Time, index int, item *
t.Helper()
protoItem, err := proto.Marshal(item)
require.NoError(t, err)
- WriteToStorage(t, core, makeSegmentPath(t, activityTokenLocalBasePath, ts, index), protoItem)
+ WriteToStorage(t, core, makeSegmentPath(t, activityTokenBasePath, ts, index), protoItem)
}
// makeSegmentPath formats the path for a segment at a particular time and index
@@ -1036,10 +1020,8 @@ func TestSegmentFileReader_BadData(t *testing.T) {
now := time.Now()
// write bad data that won't be able to be unmarshaled at index 0
- WriteToStorage(t, core, makeSegmentPath(t, activityTokenLocalBasePath, now, 0), []byte("fake data"))
+ WriteToStorage(t, core, makeSegmentPath(t, activityTokenBasePath, now, 0), []byte("fake data"))
WriteToStorage(t, core, makeSegmentPath(t, activityEntityBasePath, now, 0), []byte("fake data"))
- WriteToStorage(t, core, makeSegmentPath(t, activityGlobalPathPrefix+activityEntityBasePath, now, 0), []byte("fake data"))
- WriteToStorage(t, core, makeSegmentPath(t, activityLocalPathPrefix+activityEntityBasePath, now, 0), []byte("fake data"))
// write entity at index 1
entity := &activity.EntityActivityLog{Clients: []*activity.EntityRecord{
@@ -1049,12 +1031,6 @@ func TestSegmentFileReader_BadData(t *testing.T) {
}}
writeEntitySegment(t, core, now, 1, entity)
- // write global data at index 1
- writeGlobalEntitySegment(t, core, now, 1, entity)
-
- // write local data at index 1
- writeLocalEntitySegment(t, core, now, 1, entity)
-
// write token at index 1
token := &activity.TokenCount{CountByNamespaceID: map[string]uint64{
"ns": 1,
@@ -1071,22 +1047,6 @@ func TestSegmentFileReader_BadData(t *testing.T) {
require.True(t, proto.Equal(gotEntity, entity))
require.Nil(t, err)
- // first the bad global entity is read, which returns an error
- _, err = reader.ReadGlobalEntity(context.Background())
- require.Error(t, err)
- // then, the reader can read the good entity at index 1
- gotEntity, err = reader.ReadGlobalEntity(context.Background())
- require.True(t, proto.Equal(gotEntity, entity))
- require.Nil(t, err)
-
- // first the bad local entity is read, which returns an error
- _, err = reader.ReadLocalEntity(context.Background())
- require.Error(t, err)
- // then, the reader can read the good entity at index 1
- gotEntity, err = reader.ReadLocalEntity(context.Background())
- require.True(t, proto.Equal(gotEntity, entity))
- require.Nil(t, err)
-
// the bad token causes an error
_, err = reader.ReadToken(context.Background())
require.Error(t, err)
@@ -1103,9 +1063,8 @@ func TestSegmentFileReader_MissingData(t *testing.T) {
now := time.Now()
// write entities and tokens at indexes 0, 1, 2
for i := 0; i < 3; i++ {
- WriteToStorage(t, core, makeSegmentPath(t, activityTokenLocalBasePath, now, i), []byte("fake data"))
+ WriteToStorage(t, core, makeSegmentPath(t, activityTokenBasePath, now, i), []byte("fake data"))
WriteToStorage(t, core, makeSegmentPath(t, activityEntityBasePath, now, i), []byte("fake data"))
- WriteToStorage(t, core, makeSegmentPath(t, activityGlobalPathPrefix+activityEntityBasePath, now, i), []byte("fake data"))
}
// write entity at index 3
@@ -1115,13 +1074,6 @@ func TestSegmentFileReader_MissingData(t *testing.T) {
},
}}
writeEntitySegment(t, core, now, 3, entity)
-
- // write global entity at index 3
- writeGlobalEntitySegment(t, core, now, 3, entity)
-
- // write local entity at index 3
- writeLocalEntitySegment(t, core, now, 3, entity)
-
// write token at index 3
token := &activity.TokenCount{CountByNamespaceID: map[string]uint64{
"ns": 1,
@@ -1132,10 +1084,8 @@ func TestSegmentFileReader_MissingData(t *testing.T) {
// delete the indexes 0, 1, 2
for i := 0; i < 3; i++ {
- require.NoError(t, core.barrier.Delete(context.Background(), makeSegmentPath(t, activityTokenLocalBasePath, now, i)))
+ require.NoError(t, core.barrier.Delete(context.Background(), makeSegmentPath(t, activityTokenBasePath, now, i)))
require.NoError(t, core.barrier.Delete(context.Background(), makeSegmentPath(t, activityEntityBasePath, now, i)))
- require.NoError(t, core.barrier.Delete(context.Background(), makeSegmentPath(t, activityGlobalPathPrefix+activityEntityBasePath, now, i)))
- require.NoError(t, core.barrier.Delete(context.Background(), makeSegmentPath(t, activityLocalPathPrefix+activityEntityBasePath, now, i)))
}
// we expect the reader to only return the data at index 3, and then be done
@@ -1150,18 +1100,6 @@ func TestSegmentFileReader_MissingData(t *testing.T) {
require.True(t, proto.Equal(gotToken, token))
_, err = reader.ReadToken(context.Background())
require.Equal(t, err, io.EOF)
-
- gotEntity, err = reader.ReadGlobalEntity(context.Background())
- require.NoError(t, err)
- require.True(t, proto.Equal(gotEntity, entity))
- _, err = reader.ReadGlobalEntity(context.Background())
- require.Equal(t, err, io.EOF)
-
- gotEntity, err = reader.ReadLocalEntity(context.Background())
- require.NoError(t, err)
- require.True(t, proto.Equal(gotEntity, entity))
- _, err = reader.ReadLocalEntity(context.Background())
- require.Equal(t, err, io.EOF)
}
// TestSegmentFileReader_NoData verifies that the reader return io.EOF when there is no data
diff --git a/vault/bench/smoke_bench_test.go b/vault/bench/smoke_bench_test.go
deleted file mode 100644
index 812caef8151e..000000000000
--- a/vault/bench/smoke_bench_test.go
+++ /dev/null
@@ -1,136 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: BUSL-1.1
-
-package bench
-
-import (
- "context"
- "crypto/md5"
- "fmt"
- "testing"
-
- "github.com/hashicorp/go-uuid"
- "github.com/hashicorp/vault/api"
- vaulthttp "github.com/hashicorp/vault/http"
- "github.com/hashicorp/vault/vault"
- "github.com/stretchr/testify/require"
-)
-
-const (
- v2MountPath = "secret-v2"
- secretPath = "my-secret"
-)
-
-var secretData = map[string]interface{}{
- "foo": "bar",
-}
-
-// BenchmarkSmoke_KVV2 runs basic benchmarks on writes and reads to KVV2 on an inmem test cluster.
-func BenchmarkSmoke_KVV2(b *testing.B) {
- cluster := vault.NewTestCluster(b, &vault.CoreConfig{}, &vault.TestClusterOptions{
- HandlerFunc: vaulthttp.Handler,
- })
- client := cluster.Cores[0].Client
-
- // mount the KVv2 backend
- err := client.Sys().Mount(v2MountPath, &api.MountInput{
- Type: "kv-v2",
- })
- require.NoError(b, err)
-
- data, err := client.KVv2(v2MountPath).Put(context.Background(), secretPath, secretData)
- require.NoError(b, err)
-
- data, err = client.KVv2(v2MountPath).Get(context.Background(), secretPath)
- require.NoError(b, err)
-
- require.Equal(b, "kv", data.Raw.MountType)
- require.Equal(b, secretData, data.Data)
-
- bench := func(b *testing.B, dataSize int) {
- data, err := uuid.GenerateRandomBytes(dataSize)
- require.NoError(b, err)
-
- testName := b.Name()
-
- b.ResetTimer()
- for i := 0; i < b.N; i++ {
- key := fmt.Sprintf("%s/%x", secretPath, md5.Sum([]byte(fmt.Sprintf("%s-%d", testName, i))))
- _, err := client.KVv2(v2MountPath).Put(context.Background(), key, map[string]interface{}{
- "foo": string(data),
- })
- require.NoError(b, err)
- _, err = client.KVv2(v2MountPath).Get(context.Background(), secretPath)
- require.NoError(b, err)
- }
- }
-
- b.Run("kv-puts-and-gets", func(b *testing.B) { bench(b, 1024) })
-}
-
-// BenchmarkSmoke_ClusterCreation benchmarks the creation, start, and a cleanup of a vault.TestCluster.
-// Note that the cluster created here uses inmem Physical and HAPhysical backends.
-func BenchmarkSmoke_ClusterCreation(b *testing.B) {
- bench := func(b *testing.B) {
- b.ResetTimer()
- for i := 0; i < b.N; i++ {
- cluster := vault.NewTestCluster(b, &vault.CoreConfig{}, &vault.TestClusterOptions{
- HandlerFunc: vaulthttp.Handler,
- })
- cluster.Cleanup()
- }
- }
-
- b.Run("cluster-creation", func(b *testing.B) { bench(b) })
-}
-
-// BenchmarkSmoke_MountUnmount runs some basic benchmarking on mounting and unmounting
-func BenchmarkSmoke_MountUnmount(b *testing.B) {
- cluster := vault.NewTestCluster(b, &vault.CoreConfig{}, &vault.TestClusterOptions{
- HandlerFunc: vaulthttp.Handler,
- })
- client := cluster.Cores[0].Client
-
- bench := func(b *testing.B) {
- b.ResetTimer()
- for i := 0; i < b.N; i++ {
- err := client.Sys().Mount(v2MountPath, &api.MountInput{
- Type: "kv-v2",
- })
- require.NoError(b, err)
- err = client.Sys().Unmount(v2MountPath)
- require.NoError(b, err)
- }
- }
-
- b.Run("mount-unmount", func(b *testing.B) { bench(b) })
-}
-
-// BenchmarkSmoke_TokenCreationRevocation runs some basic benchmarking on tokens
-func BenchmarkSmoke_TokenCreationRevocation(b *testing.B) {
- cluster := vault.NewTestCluster(b, &vault.CoreConfig{}, &vault.TestClusterOptions{
- HandlerFunc: vaulthttp.Handler,
- })
- client := cluster.Cores[0].Client
- rootToken := client.Token()
-
- bench := func(b *testing.B) {
- b.ResetTimer()
- for i := 0; i < b.N; i++ {
- client.SetToken(rootToken)
- secret, err := client.Auth().Token().Create(&api.TokenCreateRequest{
- Policies: []string{"default"},
- TTL: "30m",
- })
- require.NoError(b, err)
- require.NotNil(b, secret)
- require.NotNil(b, secret.Auth)
- require.NotNil(b, secret.Auth.ClientToken)
- client.SetToken(secret.Auth.ClientToken)
- err = client.Auth().Token().RevokeSelf(secret.Auth.ClientToken)
- require.NoError(b, err)
- }
- }
-
- b.Run("token-creation-revocation", func(b *testing.B) { bench(b) })
-}
diff --git a/vault/core.go b/vault/core.go
index 12a9a9116936..981d2a2fe706 100644
--- a/vault/core.go
+++ b/vault/core.go
@@ -4573,16 +4573,3 @@ func (c *Core) setupAuditedHeadersConfig(ctx context.Context) error {
return nil
}
-
-// IsRemovedFromCluster checks whether this node has been removed from the
-// cluster. This is only applicable to physical HA backends that satisfy the
-// RemovableNodeHABackend interface. The value of the `ok` result will be false
-// if the HA and underlyingPhysical backends are nil or do not support this operation.
-func (c *Core) IsRemovedFromCluster() (removed, ok bool) {
- removableNodeHA := c.getRemovableHABackend()
- if removableNodeHA == nil {
- return false, false
- }
-
- return removableNodeHA.IsRemoved(), true
-}
diff --git a/vault/core_test.go b/vault/core_test.go
index 1681d2077510..350657405121 100644
--- a/vault/core_test.go
+++ b/vault/core_test.go
@@ -3400,11 +3400,15 @@ func TestDefaultDeadlock(t *testing.T) {
InduceDeadlock(t, testCore, 0)
}
+func RestoreDeadlockOpts() func() {
+ opts := deadlock.Opts
+ return func() {
+ deadlock.Opts = opts
+ }
+}
+
func InduceDeadlock(t *testing.T, vaultcore *Core, expected uint32) {
- priorDeadlockFunc := deadlock.Opts.OnPotentialDeadlock
- defer func() {
- deadlock.Opts.OnPotentialDeadlock = priorDeadlockFunc
- }()
+ defer RestoreDeadlockOpts()()
var deadlocks uint32
deadlock.Opts.OnPotentialDeadlock = func() {
atomic.AddUint32(&deadlocks, 1)
@@ -3696,60 +3700,3 @@ func TestBarrier_DeadlockDetection(t *testing.T) {
t.Fatal("barrierLock doesn't have deadlock detection enabled, it should")
}
}
-
-// TestCore_IsRemovedFromCluster exercises all the execution paths in the
-// IsRemovedFromCluster convenience method of the Core struct.
-func TestCore_IsRemovedFromCluster(t *testing.T) {
- core := &Core{}
-
- // Test case where both HA and underlying physical backends ares nil
- removed, ok := core.IsRemovedFromCluster()
- if removed || ok {
- t.Fatalf("expected removed and ok to be false, got removed: %v, ok: %v", removed, ok)
- }
-
- // Test case where HA backend is nil, but the underlying physical is there and does not support RemovableNodeHABackend
- core.underlyingPhysical = &MockHABackend{}
- removed, ok = core.IsRemovedFromCluster()
- if removed || ok {
- t.Fatalf("expected removed and ok to be false, got removed: %v, ok: %v", removed, ok)
- }
-
- // Test case where HA backend is nil, but the underlying physical is there, supports RemovableNodeHABackend, and is not removed
- mockHA := &MockRemovableNodeHABackend{}
- core.underlyingPhysical = mockHA
- removed, ok = core.IsRemovedFromCluster()
- if removed || !ok {
- t.Fatalf("expected removed to be false and ok to be true, got removed: %v, ok: %v", removed, ok)
- }
-
- // Test case where HA backend is nil, but the underlying physical is there, supports RemovableNodeHABackend, and is removed
- mockHA.Removed = true
- removed, ok = core.IsRemovedFromCluster()
- if !removed || !ok {
- t.Fatalf("expected removed to be false and ok to be true, got removed: %v, ok: %v", removed, ok)
- }
-
- // Test case where HA backend does not support RemovableNodeHABackend
- core.underlyingPhysical = &MockHABackend{}
- core.ha = &MockHABackend{}
- removed, ok = core.IsRemovedFromCluster()
- if removed || ok {
- t.Fatalf("expected removed and ok to be false, got removed: %v, ok: %v", removed, ok)
- }
-
- // Test case where HA backend supports RemovableNodeHABackend and is not removed
- mockHA.Removed = false
- core.ha = mockHA
- removed, ok = core.IsRemovedFromCluster()
- if removed || !ok {
- t.Fatalf("expected removed and ok to be true, got removed: %v, ok: %v", removed, ok)
- }
-
- // Test case where HA backend supports RemovableNodeHABackend and is removed
- mockHA.Removed = true
- removed, ok = core.IsRemovedFromCluster()
- if !removed || !ok {
- t.Fatalf("expected removed to be false and ok to be true, got removed: %v, ok: %v", removed, ok)
- }
-}
diff --git a/vault/external_tests/activity_testonly/acme_regeneration_test.go b/vault/external_tests/activity_testonly/acme_regeneration_test.go
index c663b174b84e..dbd8355f81a5 100644
--- a/vault/external_tests/activity_testonly/acme_regeneration_test.go
+++ b/vault/external_tests/activity_testonly/acme_regeneration_test.go
@@ -54,7 +54,7 @@ func TestACMERegeneration_RegenerateWithCurrentMonth(t *testing.T) {
})
require.NoError(t, err)
now := time.Now().UTC()
- _, _, _, err = clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewPreviousMonthData(3).
// 3 months ago, 15 non-entity clients and 10 ACME clients
NewClientsSeen(15, clientcountutil.WithClientType("non-entity-token")).
@@ -116,7 +116,7 @@ func TestACMERegeneration_RegenerateMuchOlder(t *testing.T) {
client := cluster.Cores[0].Client
now := time.Now().UTC()
- _, _, _, err := clientcountutil.NewActivityLogData(client).
+ _, err := clientcountutil.NewActivityLogData(client).
NewPreviousMonthData(5).
// 5 months ago, 15 non-entity clients and 10 ACME clients
NewClientsSeen(15, clientcountutil.WithClientType("non-entity-token")).
@@ -159,7 +159,7 @@ func TestACMERegeneration_RegeneratePreviousMonths(t *testing.T) {
client := cluster.Cores[0].Client
now := time.Now().UTC()
- _, _, _, err := clientcountutil.NewActivityLogData(client).
+ _, err := clientcountutil.NewActivityLogData(client).
NewPreviousMonthData(3).
// 3 months ago, 15 non-entity clients and 10 ACME clients
NewClientsSeen(15, clientcountutil.WithClientType("non-entity-token")).
diff --git a/vault/external_tests/activity_testonly/activity_testonly_oss_test.go b/vault/external_tests/activity_testonly/activity_testonly_oss_test.go
index 4b59142008b6..dbb11c845332 100644
--- a/vault/external_tests/activity_testonly/activity_testonly_oss_test.go
+++ b/vault/external_tests/activity_testonly/activity_testonly_oss_test.go
@@ -29,7 +29,7 @@ func Test_ActivityLog_Disable(t *testing.T) {
"enabled": "enable",
})
require.NoError(t, err)
- _, _, _, err = clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewPreviousMonthData(1).
NewClientsSeen(5).
NewCurrentMonthData().
diff --git a/vault/external_tests/activity_testonly/activity_testonly_test.go b/vault/external_tests/activity_testonly/activity_testonly_test.go
index 3e3a1259b2e3..8141357efe70 100644
--- a/vault/external_tests/activity_testonly/activity_testonly_test.go
+++ b/vault/external_tests/activity_testonly/activity_testonly_test.go
@@ -86,7 +86,7 @@ func Test_ActivityLog_LoseLeadership(t *testing.T) {
})
require.NoError(t, err)
- _, _, _, err = clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewCurrentMonthData().
NewClientsSeen(10).
Write(context.Background(), generation.WriteOptions_WRITE_ENTITIES)
@@ -121,7 +121,7 @@ func Test_ActivityLog_ClientsOverlapping(t *testing.T) {
"enabled": "enable",
})
require.NoError(t, err)
- _, _, _, err = clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewPreviousMonthData(1).
NewClientsSeen(7).
NewCurrentMonthData().
@@ -169,7 +169,7 @@ func Test_ActivityLog_ClientsNewCurrentMonth(t *testing.T) {
"enabled": "enable",
})
require.NoError(t, err)
- _, _, _, err = clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewPreviousMonthData(1).
NewClientsSeen(5).
NewCurrentMonthData().
@@ -203,7 +203,7 @@ func Test_ActivityLog_EmptyDataMonths(t *testing.T) {
"enabled": "enable",
})
require.NoError(t, err)
- _, _, _, err = clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewCurrentMonthData().
NewClientsSeen(10).
Write(context.Background(), generation.WriteOptions_WRITE_PRECOMPUTED_QUERIES, generation.WriteOptions_WRITE_ENTITIES)
@@ -243,7 +243,7 @@ func Test_ActivityLog_FutureEndDate(t *testing.T) {
"enabled": "enable",
})
require.NoError(t, err)
- _, _, _, err = clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewPreviousMonthData(1).
NewClientsSeen(10).
NewCurrentMonthData().
@@ -316,7 +316,7 @@ func Test_ActivityLog_ClientTypeResponse(t *testing.T) {
_, err := client.Logical().Write("sys/internal/counters/config", map[string]interface{}{
"enabled": "enable",
})
- _, _, _, err = clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewCurrentMonthData().
NewClientsSeen(10, clientcountutil.WithClientType(tc.clientType)).
Write(context.Background(), generation.WriteOptions_WRITE_ENTITIES)
@@ -369,7 +369,7 @@ func Test_ActivityLogCurrentMonth_Response(t *testing.T) {
_, err := client.Logical().Write("sys/internal/counters/config", map[string]interface{}{
"enabled": "enable",
})
- _, _, _, err = clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewCurrentMonthData().
NewClientsSeen(10, clientcountutil.WithClientType(tc.clientType)).
Write(context.Background(), generation.WriteOptions_WRITE_ENTITIES)
@@ -420,7 +420,7 @@ func Test_ActivityLog_Deduplication(t *testing.T) {
_, err := client.Logical().Write("sys/internal/counters/config", map[string]interface{}{
"enabled": "enable",
})
- _, _, _, err = clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewPreviousMonthData(3).
NewClientsSeen(10, clientcountutil.WithClientType(tc.clientType)).
NewPreviousMonthData(2).
@@ -462,7 +462,7 @@ func Test_ActivityLog_MountDeduplication(t *testing.T) {
require.NoError(t, err)
now := time.Now().UTC()
- _, localPaths, globalPaths, err := clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewPreviousMonthData(1).
NewClientSeen(clientcountutil.WithClientMount("sys")).
NewClientSeen(clientcountutil.WithClientMount("secret")).
@@ -473,10 +473,6 @@ func Test_ActivityLog_MountDeduplication(t *testing.T) {
NewClientSeen(clientcountutil.WithClientMount("sys")).
Write(context.Background(), generation.WriteOptions_WRITE_PRECOMPUTED_QUERIES, generation.WriteOptions_WRITE_ENTITIES)
require.NoError(t, err)
- // cubbyhole is local, 2 segments must exist for 2 months seen
- require.Len(t, localPaths, 2)
- // 2 global segments must exist for 2 months seen
- require.Len(t, globalPaths, 2)
resp, err := client.Logical().ReadWithData("sys/internal/counters/activity", map[string][]string{
"end_time": {timeutil.EndOfMonth(now).Format(time.RFC3339)},
@@ -673,7 +669,7 @@ func Test_ActivityLog_Export_Sudo(t *testing.T) {
rootToken := client.Token()
- _, _, _, err = clientcountutil.NewActivityLogData(client).
+ _, err = clientcountutil.NewActivityLogData(client).
NewCurrentMonthData().
NewClientsSeen(10).
Write(context.Background(), generation.WriteOptions_WRITE_ENTITIES)
@@ -849,7 +845,7 @@ func TestHandleQuery_MultipleMounts(t *testing.T) {
}
// Write all the client count data
- _, _, _, err = activityLogGenerator.Write(context.Background(), generation.WriteOptions_WRITE_PRECOMPUTED_QUERIES, generation.WriteOptions_WRITE_ENTITIES)
+ _, err = activityLogGenerator.Write(context.Background(), generation.WriteOptions_WRITE_PRECOMPUTED_QUERIES, generation.WriteOptions_WRITE_ENTITIES)
require.NoError(t, err)
endOfCurrentMonth := timeutil.EndOfMonth(time.Now().UTC())
diff --git a/vault/external_tests/audit/audit_test.go b/vault/external_tests/audit/audit_test.go
deleted file mode 100644
index 3dedb298d057..000000000000
--- a/vault/external_tests/audit/audit_test.go
+++ /dev/null
@@ -1,300 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: BUSL-1.1
-
-package audit
-
-import (
- "bufio"
- "context"
- "encoding/json"
- "fmt"
- "os"
- "strings"
- "testing"
-
- "github.com/hashicorp/vault/api"
- "github.com/hashicorp/vault/api/auth/userpass"
- "github.com/hashicorp/vault/helper/testhelpers/minimal"
- "github.com/stretchr/testify/require"
-)
-
-// TestAudit_HMACFields verifies that all appropriate fields in audit
-// request and response entries are HMACed properly. The fields in question are:
-// - request.headers.x-correlation-id
-// - request.data: all sub-fields
-// - respnse.auth.client_token
-// - response.auth.accessor
-// - response.data: all sub-fields
-// - response.wrap_info.token
-// - response.wrap_info.accessor
-func TestAudit_HMACFields(t *testing.T) {
- const hmacPrefix = "hmac-sha256:"
-
- cluster := minimal.NewTestSoloCluster(t, nil)
- client := cluster.Cores[0].Client
-
- tempDir := t.TempDir()
- logFile, err := os.CreateTemp(tempDir, "")
- require.NoError(t, err)
- devicePath := "file"
- deviceData := map[string]any{
- "type": "file",
- "description": "",
- "local": false,
- "options": map[string]any{
- "file_path": logFile.Name(),
- },
- }
-
- _, err = client.Logical().Write("sys/config/auditing/request-headers/x-correlation-id", map[string]interface{}{
- "hmac": true,
- })
- require.NoError(t, err)
-
- // Request 1
- // Enable the audit device. A test probe request will audited along
- // with the associated creation response
- _, err = client.Logical().Write("sys/audit/"+devicePath, deviceData)
- require.NoError(t, err)
-
- // Request 2
- // Ensure the device has been created.
- devices, err := client.Sys().ListAudit()
- require.NoError(t, err)
- require.Len(t, devices, 1)
-
- // Request 3
- // Enable the userpass auth method (this will be an audited action)
- err = client.Sys().EnableAuthWithOptions("userpass", &api.EnableAuthOptions{
- Type: "userpass",
- })
- require.NoError(t, err)
-
- username := "jdoe"
- password := "abc123"
-
- // Request 4
- // Create a user with a password (another audited action)
- _, err = client.Logical().Write(fmt.Sprintf("auth/userpass/users/%s", username), map[string]interface{}{
- "password": password,
- })
- require.NoError(t, err)
-
- authInput, err := userpass.NewUserpassAuth(username, &userpass.Password{FromString: password})
- require.NoError(t, err)
-
- newClient, err := client.Clone()
- require.NoError(t, err)
-
- correlationID := "correlation-id-foo"
- newClient.AddHeader("x-correlation-id", correlationID)
-
- // Request 5
- authOutput, err := newClient.Auth().Login(context.Background(), authInput)
- require.NoError(t, err)
-
- // Request 6
- hashedPassword, err := client.Sys().AuditHash(devicePath, password)
- require.NoError(t, err)
-
- // Request 7
- hashedClientToken, err := client.Sys().AuditHash(devicePath, authOutput.Auth.ClientToken)
- require.NoError(t, err)
-
- // Request 8
- hashedAccessor, err := client.Sys().AuditHash(devicePath, authOutput.Auth.Accessor)
- require.NoError(t, err)
-
- // Request 9
- wrapResp, err := client.Logical().Write("sys/wrapping/wrap", map[string]interface{}{
- "foo": "bar",
- })
- require.NoError(t, err)
-
- // Request 10
- hashedBar, err := client.Sys().AuditHash(devicePath, "bar")
- require.NoError(t, err)
-
- // Request 11
- hashedWrapAccessor, err := client.Sys().AuditHash(devicePath, wrapResp.WrapInfo.Accessor)
- require.NoError(t, err)
-
- // Request 12
- hashedWrapToken, err := client.Sys().AuditHash(devicePath, wrapResp.WrapInfo.Token)
- require.NoError(t, err)
-
- // Request 13
- hashedCorrelationID, err := client.Sys().AuditHash(devicePath, correlationID)
- require.NoError(t, err)
-
- // Request 14
- // Disable the audit device. The request will be audited but not the response.
- _, err = client.Logical().Delete("sys/audit/" + devicePath)
- require.NoError(t, err)
-
- // Request 15
- // Ensure the device has been deleted. This will not be audited.
- devices, err = client.Sys().ListAudit()
- require.NoError(t, err)
- require.Len(t, devices, 0)
-
- entries := make([]map[string]interface{}, 0)
- scanner := bufio.NewScanner(logFile)
-
- for scanner.Scan() {
- entry := make(map[string]interface{})
-
- err := json.Unmarshal(scanner.Bytes(), &entry)
- require.NoError(t, err)
-
- entries = append(entries, entry)
- }
-
- // This count includes the initial test probe upon creation of the audit device
- require.Equal(t, 27, len(entries))
-
- loginReqEntry := entries[8]
- loginRespEntry := entries[9]
-
- loginRequestFromReq := loginReqEntry["request"].(map[string]interface{})
- loginRequestDataFromReq := loginRequestFromReq["data"].(map[string]interface{})
- loginHeadersFromReq := loginRequestFromReq["headers"].(map[string]interface{})
-
- loginRequestFromResp := loginRespEntry["request"].(map[string]interface{})
- loginRequestDataFromResp := loginRequestFromResp["data"].(map[string]interface{})
- loginHeadersFromResp := loginRequestFromResp["headers"].(map[string]interface{})
-
- loginAuth := loginRespEntry["auth"].(map[string]interface{})
-
- require.True(t, strings.HasPrefix(loginRequestDataFromReq["password"].(string), hmacPrefix))
- require.Equal(t, loginRequestDataFromReq["password"].(string), hashedPassword)
-
- require.True(t, strings.HasPrefix(loginRequestDataFromResp["password"].(string), hmacPrefix))
- require.Equal(t, loginRequestDataFromResp["password"].(string), hashedPassword)
-
- require.True(t, strings.HasPrefix(loginAuth["client_token"].(string), hmacPrefix))
- require.Equal(t, loginAuth["client_token"].(string), hashedClientToken)
-
- require.True(t, strings.HasPrefix(loginAuth["accessor"].(string), hmacPrefix))
- require.Equal(t, loginAuth["accessor"].(string), hashedAccessor)
-
- xCorrelationIDFromReq := loginHeadersFromReq["x-correlation-id"].([]interface{})
- require.Equal(t, len(xCorrelationIDFromReq), 1)
- require.True(t, strings.HasPrefix(xCorrelationIDFromReq[0].(string), hmacPrefix))
- require.Equal(t, xCorrelationIDFromReq[0].(string), hashedCorrelationID)
-
- xCorrelationIDFromResp := loginHeadersFromResp["x-correlation-id"].([]interface{})
- require.Equal(t, len(xCorrelationIDFromResp), 1)
- require.True(t, strings.HasPrefix(xCorrelationIDFromReq[0].(string), hmacPrefix))
- require.Equal(t, xCorrelationIDFromResp[0].(string), hashedCorrelationID)
-
- wrapReqEntry := entries[16]
- wrapRespEntry := entries[17]
-
- wrapRequestFromReq := wrapReqEntry["request"].(map[string]interface{})
- wrapRequestDataFromReq := wrapRequestFromReq["data"].(map[string]interface{})
-
- wrapRequestFromResp := wrapRespEntry["request"].(map[string]interface{})
- wrapRequestDataFromResp := wrapRequestFromResp["data"].(map[string]interface{})
-
- require.True(t, strings.HasPrefix(wrapRequestDataFromReq["foo"].(string), hmacPrefix))
- require.Equal(t, wrapRequestDataFromReq["foo"].(string), hashedBar)
-
- require.True(t, strings.HasPrefix(wrapRequestDataFromResp["foo"].(string), hmacPrefix))
- require.Equal(t, wrapRequestDataFromResp["foo"].(string), hashedBar)
-
- wrapResponseData := wrapRespEntry["response"].(map[string]interface{})
- wrapInfo := wrapResponseData["wrap_info"].(map[string]interface{})
-
- require.True(t, strings.HasPrefix(wrapInfo["accessor"].(string), hmacPrefix))
- require.Equal(t, wrapInfo["accessor"].(string), hashedWrapAccessor)
-
- require.True(t, strings.HasPrefix(wrapInfo["token"].(string), hmacPrefix))
- require.Equal(t, wrapInfo["token"].(string), hashedWrapToken)
-}
-
-// TestAudit_Headers validates that headers are audited correctly. This includes
-// the default headers (x-correlation-id and user-agent) along with user-specified
-// headers.
-func TestAudit_Headers(t *testing.T) {
- cluster := minimal.NewTestSoloCluster(t, nil)
- client := cluster.Cores[0].Client
-
- tempDir := t.TempDir()
- logFile, err := os.CreateTemp(tempDir, "")
- require.NoError(t, err)
- devicePath := "file"
- deviceData := map[string]any{
- "type": "file",
- "description": "",
- "local": false,
- "options": map[string]any{
- "file_path": logFile.Name(),
- },
- }
-
- _, err = client.Logical().Write("sys/config/auditing/request-headers/x-some-header", map[string]interface{}{
- "hmac": false,
- })
- require.NoError(t, err)
-
- // User-Agent header is audited by default
- client.AddHeader("User-Agent", "foo-agent")
-
- // X-Some-Header has been added to audited headers manually
- client.AddHeader("X-Some-Header", "some-value")
-
- // X-Some-Other-Header will not be audited
- client.AddHeader("X-Some-Other-Header", "some-other-value")
-
- // Request 1
- // Enable the audit device. A test probe request will audited along
- // with the associated creation response
- _, err = client.Logical().Write("sys/audit/"+devicePath, deviceData)
- require.NoError(t, err)
-
- // Request 2
- // Ensure the device has been created.
- devices, err := client.Sys().ListAudit()
- require.NoError(t, err)
- require.Len(t, devices, 1)
-
- // Request 3
- resp, err := client.Sys().SealStatus()
- require.NoError(t, err)
- require.NotEmpty(t, resp)
-
- expectedHeaders := map[string]interface{}{
- "user-agent": []interface{}{"foo-agent"},
- "x-some-header": []interface{}{"some-value"},
- }
-
- entries := make([]map[string]interface{}, 0)
- scanner := bufio.NewScanner(logFile)
-
- for scanner.Scan() {
- entry := make(map[string]interface{})
-
- err := json.Unmarshal(scanner.Bytes(), &entry)
- require.NoError(t, err)
-
- request, ok := entry["request"].(map[string]interface{})
- require.True(t, ok)
-
- // test probe will not have headers set
- requestPath, ok := request["path"].(string)
- require.True(t, ok)
-
- if requestPath != "sys/audit/test" {
- headers, ok := request["headers"].(map[string]interface{})
-
- require.True(t, ok)
- require.Equal(t, expectedHeaders, headers)
- }
-
- entries = append(entries, entry)
- }
-
- // This count includes the initial test probe upon creation of the audit device
- require.Equal(t, 4, len(entries))
-}
diff --git a/vault/external_tests/raft/raft_test.go b/vault/external_tests/raft/raft_test.go
index 33dc48d67c43..9f36486c3e4f 100644
--- a/vault/external_tests/raft/raft_test.go
+++ b/vault/external_tests/raft/raft_test.go
@@ -1360,33 +1360,3 @@ func TestRaft_Join_InitStatus(t *testing.T) {
verifyInitStatus(i, true)
}
}
-
-// TestRaftCluster_Removed creates a 3 node raft cluster and then removes one of
-// the nodes. The test verifies that a write on the removed node errors, and that
-// the removed node is sealed.
-func TestRaftCluster_Removed(t *testing.T) {
- t.Parallel()
- cluster, _ := raftCluster(t, nil)
- defer cluster.Cleanup()
-
- follower := cluster.Cores[2]
- followerClient := follower.Client
- _, err := followerClient.Logical().Write("secret/foo", map[string]interface{}{
- "test": "data",
- })
- require.NoError(t, err)
-
- _, err = cluster.Cores[0].Client.Logical().Write("/sys/storage/raft/remove-peer", map[string]interface{}{
- "server_id": follower.NodeID,
- })
- followerClient.SetCheckRedirect(func(request *http.Request, requests []*http.Request) error {
- require.Fail(t, "request caused a redirect", request.URL.Path)
- return fmt.Errorf("no redirects allowed")
- })
- require.NoError(t, err)
- _, err = followerClient.Logical().Write("secret/foo", map[string]interface{}{
- "test": "other_data",
- })
- require.Error(t, err)
- require.True(t, follower.Sealed())
-}
diff --git a/vault/external_tests/raftha/raft_ha_test.go b/vault/external_tests/raftha/raft_ha_test.go
index bbeb78eb3923..3f8c57d53533 100644
--- a/vault/external_tests/raftha/raft_ha_test.go
+++ b/vault/external_tests/raftha/raft_ha_test.go
@@ -14,7 +14,6 @@ import (
vaulthttp "github.com/hashicorp/vault/http"
"github.com/hashicorp/vault/sdk/helper/logging"
"github.com/hashicorp/vault/vault"
- "github.com/stretchr/testify/require"
)
func TestRaft_HA_NewCluster(t *testing.T) {
@@ -55,20 +54,6 @@ func TestRaft_HA_NewCluster(t *testing.T) {
})
}
-// TestRaftHA_Recover_Cluster test that we can recover data and re-boostrap a cluster
-// that was created with raft HA enabled but is not using raft as the storage backend.
-func TestRaftHA_Recover_Cluster(t *testing.T) {
- logger := logging.NewVaultLogger(hclog.Debug).Named(t.Name())
- t.Run("file", func(t *testing.T) {
- physBundle := teststorage.MakeFileBackend(t, logger)
- testRaftHARecoverCluster(t, physBundle, logger)
- })
- t.Run("inmem", func(t *testing.T) {
- physBundle := teststorage.MakeInmemBackend(t, logger)
- testRaftHARecoverCluster(t, physBundle, logger)
- })
-}
-
func testRaftHANewCluster(t *testing.T, bundler teststorage.PhysicalBackendBundler, addClientCerts bool) {
var conf vault.CoreConfig
opts := vault.TestClusterOptions{HandlerFunc: vaulthttp.Handler}
@@ -132,95 +117,6 @@ func testRaftHANewCluster(t *testing.T, bundler teststorage.PhysicalBackendBundl
}
}
-// testRaftHARecoverCluster : in this test, we're going to create a raft HA cluster and store a test secret in a KVv2
-// We're going to simulate an outage and destroy the cluster but we'll keep the storage backend.
-// We'll recreate a new cluster with the same storage backend and ensure that we can recover using
-// sys/storage/raft/bootstrap. We'll check that the new cluster
-// is functional and no data was lost: we can get the test secret from the KVv2.
-func testRaftHARecoverCluster(t *testing.T, physBundle *vault.PhysicalBackendBundle, logger hclog.Logger) {
- opts := vault.TestClusterOptions{
- HandlerFunc: vaulthttp.Handler,
- // We're not testing the HA, only that it can be recovered. No need for multiple cores.
- NumCores: 1,
- }
-
- haStorage, haCleanup := teststorage.MakeReusableRaftHAStorage(t, logger, opts.NumCores, physBundle)
- defer haCleanup()
- haStorage.Setup(nil, &opts)
- cluster := vault.NewTestCluster(t, nil, &opts)
-
- var (
- clusterBarrierKeys [][]byte
- clusterRootToken string
- )
- clusterBarrierKeys = cluster.BarrierKeys
- clusterRootToken = cluster.RootToken
- leaderCore := cluster.Cores[0]
- testhelpers.EnsureCoreUnsealed(t, cluster, leaderCore)
-
- leaderClient := cluster.Cores[0].Client
- leaderClient.SetToken(clusterRootToken)
-
- // Mount a KVv2 backend to store a test data
- err := leaderClient.Sys().Mount("kv", &api.MountInput{
- Type: "kv-v2",
- })
- require.NoError(t, err)
-
- kvData := map[string]interface{}{
- "data": map[string]interface{}{
- "kittens": "awesome",
- },
- }
-
- // Store the test data in the KVv2 backend
- _, err = leaderClient.Logical().Write("kv/data/test_known_data", kvData)
- require.NoError(t, err)
-
- // We delete the current cluster. We keep the storage backend so we can recover the cluster
- cluster.Cleanup()
-
- // We now have a raft HA cluster with a KVv2 backend enabled and a test data.
- // We're now going to delete the cluster and create a new raft HA cluster with the same backend storage
- // and ensure we can recover to a working vault cluster and don't lose the data from the backend storage.
-
- opts = vault.TestClusterOptions{
- HandlerFunc: vaulthttp.Handler,
- // We're not testing the HA, only that it can be recovered. No need for multiple cores.
- NumCores: 1,
- // It's already initialized as we keep the same storage backend.
- SkipInit: true,
- }
- haStorage, haCleanup = teststorage.MakeReusableRaftHAStorage(t, logger, opts.NumCores, physBundle)
- defer haCleanup()
- haStorage.Setup(nil, &opts)
- clusterRestored := vault.NewTestCluster(t, nil, &opts)
-
- clusterRestored.BarrierKeys = clusterBarrierKeys
- clusterRestored.RootToken = clusterRootToken
- leaderCoreRestored := clusterRestored.Cores[0]
-
- testhelpers.EnsureCoresUnsealed(t, clusterRestored)
-
- leaderClientRestored := clusterRestored.Cores[0].Client
-
- // We now reset the TLS keyring and bootstrap the cluster again.
- _, err = leaderClientRestored.Logical().Write("sys/storage/raft/bootstrap", nil)
- require.NoError(t, err)
-
- vault.TestWaitActive(t, leaderCoreRestored.Core)
- // Core should be active and cluster in a working state. We should be able to
- // read the data from the KVv2 backend.
- leaderClientRestored.SetToken(clusterRootToken)
- secretRaw, err := leaderClientRestored.Logical().Read("kv/data/test_known_data")
- require.NoError(t, err)
-
- data := secretRaw.Data["data"]
- dataAsMap := data.(map[string]interface{})
- require.NotNil(t, dataAsMap)
- require.Equal(t, "awesome", dataAsMap["kittens"])
-}
-
func TestRaft_HA_ExistingCluster(t *testing.T) {
t.Parallel()
conf := vault.CoreConfig{
diff --git a/vault/ha.go b/vault/ha.go
index 46fc7f7757b4..9e063cfde985 100644
--- a/vault/ha.go
+++ b/vault/ha.go
@@ -1223,16 +1223,3 @@ func (c *Core) SetNeverBecomeActive(on bool) {
atomic.StoreUint32(c.neverBecomeActive, 0)
}
}
-
-func (c *Core) getRemovableHABackend() physical.RemovableNodeHABackend {
- var haBackend physical.RemovableNodeHABackend
- if removableHA, ok := c.ha.(physical.RemovableNodeHABackend); ok {
- haBackend = removableHA
- }
-
- if removableHA, ok := c.underlyingPhysical.(physical.RemovableNodeHABackend); ok {
- haBackend = removableHA
- }
-
- return haBackend
-}
diff --git a/vault/hcp_link/proto/link_control/link_control.pb.go b/vault/hcp_link/proto/link_control/link_control.pb.go
index 952f0b0359a2..fccbb9c0bb60 100644
--- a/vault/hcp_link/proto/link_control/link_control.pb.go
+++ b/vault/hcp_link/proto/link_control/link_control.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: vault/hcp_link/proto/link_control/link_control.proto
@@ -31,9 +31,11 @@ type PurgePolicyRequest struct {
func (x *PurgePolicyRequest) Reset() {
*x = PurgePolicyRequest{}
- mi := &file_vault_hcp_link_proto_link_control_link_control_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_link_control_link_control_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *PurgePolicyRequest) String() string {
@@ -44,7 +46,7 @@ func (*PurgePolicyRequest) ProtoMessage() {}
func (x *PurgePolicyRequest) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_link_control_link_control_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -67,9 +69,11 @@ type PurgePolicyResponse struct {
func (x *PurgePolicyResponse) Reset() {
*x = PurgePolicyResponse{}
- mi := &file_vault_hcp_link_proto_link_control_link_control_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_link_control_link_control_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *PurgePolicyResponse) String() string {
@@ -80,7 +84,7 @@ func (*PurgePolicyResponse) ProtoMessage() {}
func (x *PurgePolicyResponse) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_link_control_link_control_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -150,6 +154,32 @@ func file_vault_hcp_link_proto_link_control_link_control_proto_init() {
if File_vault_hcp_link_proto_link_control_link_control_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_vault_hcp_link_proto_link_control_link_control_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*PurgePolicyRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_link_control_link_control_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*PurgePolicyResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/vault/hcp_link/proto/link_control/link_control_grpc.pb.go b/vault/hcp_link/proto/link_control/link_control_grpc.pb.go
index 39fdf39c18e9..2cf5dc7758d6 100644
--- a/vault/hcp_link/proto/link_control/link_control_grpc.pb.go
+++ b/vault/hcp_link/proto/link_control/link_control_grpc.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
-// - protoc-gen-go-grpc v1.5.1
+// - protoc-gen-go-grpc v1.4.0
// - protoc (unknown)
// source: vault/hcp_link/proto/link_control/link_control.proto
@@ -18,8 +18,8 @@ import (
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
-// Requires gRPC-Go v1.64.0 or later.
-const _ = grpc.SupportPackageIsVersion9
+// Requires gRPC-Go v1.62.0 or later.
+const _ = grpc.SupportPackageIsVersion8
const (
HCPLinkControl_PurgePolicy_FullMethodName = "/link_control.HCPLinkControl/PurgePolicy"
@@ -54,7 +54,7 @@ func (c *hCPLinkControlClient) PurgePolicy(ctx context.Context, in *PurgePolicyR
// HCPLinkControlServer is the server API for HCPLinkControl service.
// All implementations must embed UnimplementedHCPLinkControlServer
-// for forward compatibility.
+// for forward compatibility
type HCPLinkControlServer interface {
// PurgePolicy Forgets the current Batch token, and its associated policy,
// such that the policy is forced to be refreshed.
@@ -62,18 +62,14 @@ type HCPLinkControlServer interface {
mustEmbedUnimplementedHCPLinkControlServer()
}
-// UnimplementedHCPLinkControlServer must be embedded to have
-// forward compatible implementations.
-//
-// NOTE: this should be embedded by value instead of pointer to avoid a nil
-// pointer dereference when methods are called.
-type UnimplementedHCPLinkControlServer struct{}
+// UnimplementedHCPLinkControlServer must be embedded to have forward compatible implementations.
+type UnimplementedHCPLinkControlServer struct {
+}
func (UnimplementedHCPLinkControlServer) PurgePolicy(context.Context, *PurgePolicyRequest) (*PurgePolicyResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method PurgePolicy not implemented")
}
func (UnimplementedHCPLinkControlServer) mustEmbedUnimplementedHCPLinkControlServer() {}
-func (UnimplementedHCPLinkControlServer) testEmbeddedByValue() {}
// UnsafeHCPLinkControlServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to HCPLinkControlServer will
@@ -83,13 +79,6 @@ type UnsafeHCPLinkControlServer interface {
}
func RegisterHCPLinkControlServer(s grpc.ServiceRegistrar, srv HCPLinkControlServer) {
- // If the following call pancis, it indicates UnimplementedHCPLinkControlServer was
- // embedded by pointer and is nil. This will cause panics if an
- // unimplemented method is ever invoked, so we test this at initialization
- // time to prevent it from happening at runtime later due to I/O.
- if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
- t.testEmbeddedByValue()
- }
s.RegisterService(&HCPLinkControl_ServiceDesc, srv)
}
diff --git a/vault/hcp_link/proto/meta/meta.pb.go b/vault/hcp_link/proto/meta/meta.pb.go
index 55ed3f7fe8b3..fa69987fab08 100644
--- a/vault/hcp_link/proto/meta/meta.pb.go
+++ b/vault/hcp_link/proto/meta/meta.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: vault/hcp_link/proto/meta/meta.proto
@@ -31,9 +31,11 @@ type ListNamespacesRequest struct {
func (x *ListNamespacesRequest) Reset() {
*x = ListNamespacesRequest{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ListNamespacesRequest) String() string {
@@ -44,7 +46,7 @@ func (*ListNamespacesRequest) ProtoMessage() {}
func (x *ListNamespacesRequest) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -69,9 +71,11 @@ type ListNamespacesResponse struct {
func (x *ListNamespacesResponse) Reset() {
*x = ListNamespacesResponse{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ListNamespacesResponse) String() string {
@@ -82,7 +86,7 @@ func (*ListNamespacesResponse) ProtoMessage() {}
func (x *ListNamespacesResponse) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -112,9 +116,11 @@ type ListMountsRequest struct {
func (x *ListMountsRequest) Reset() {
*x = ListMountsRequest{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ListMountsRequest) String() string {
@@ -125,7 +131,7 @@ func (*ListMountsRequest) ProtoMessage() {}
func (x *ListMountsRequest) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -152,9 +158,11 @@ type Mount struct {
func (x *Mount) Reset() {
*x = Mount{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Mount) String() string {
@@ -165,7 +173,7 @@ func (*Mount) ProtoMessage() {}
func (x *Mount) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[3]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -211,9 +219,11 @@ type ListMountsResponse struct {
func (x *ListMountsResponse) Reset() {
*x = ListMountsResponse{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ListMountsResponse) String() string {
@@ -224,7 +234,7 @@ func (*ListMountsResponse) ProtoMessage() {}
func (x *ListMountsResponse) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[4]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -254,9 +264,11 @@ type ListAuthsRequest struct {
func (x *ListAuthsRequest) Reset() {
*x = ListAuthsRequest{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ListAuthsRequest) String() string {
@@ -267,7 +279,7 @@ func (*ListAuthsRequest) ProtoMessage() {}
func (x *ListAuthsRequest) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[5]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -294,9 +306,11 @@ type Auth struct {
func (x *Auth) Reset() {
*x = Auth{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[6]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[6]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Auth) String() string {
@@ -307,7 +321,7 @@ func (*Auth) ProtoMessage() {}
func (x *Auth) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[6]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -353,9 +367,11 @@ type ListAuthResponse struct {
func (x *ListAuthResponse) Reset() {
*x = ListAuthResponse{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[7]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[7]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ListAuthResponse) String() string {
@@ -366,7 +382,7 @@ func (*ListAuthResponse) ProtoMessage() {}
func (x *ListAuthResponse) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[7]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -396,9 +412,11 @@ type GetClusterStatusRequest struct {
func (x *GetClusterStatusRequest) Reset() {
*x = GetClusterStatusRequest{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[8]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[8]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *GetClusterStatusRequest) String() string {
@@ -409,7 +427,7 @@ func (*GetClusterStatusRequest) ProtoMessage() {}
func (x *GetClusterStatusRequest) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[8]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -434,9 +452,11 @@ type HANode struct {
func (x *HANode) Reset() {
*x = HANode{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[9]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[9]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *HANode) String() string {
@@ -447,7 +467,7 @@ func (*HANode) ProtoMessage() {}
func (x *HANode) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[9]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -480,9 +500,11 @@ type HAStatus struct {
func (x *HAStatus) Reset() {
*x = HAStatus{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[10]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[10]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *HAStatus) String() string {
@@ -493,7 +515,7 @@ func (*HAStatus) ProtoMessage() {}
func (x *HAStatus) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[10]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -542,9 +564,11 @@ type RaftServer struct {
func (x *RaftServer) Reset() {
*x = RaftServer{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[11]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[11]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *RaftServer) String() string {
@@ -555,7 +579,7 @@ func (*RaftServer) ProtoMessage() {}
func (x *RaftServer) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[11]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -615,9 +639,11 @@ type RaftConfiguration struct {
func (x *RaftConfiguration) Reset() {
*x = RaftConfiguration{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[12]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[12]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *RaftConfiguration) String() string {
@@ -628,7 +654,7 @@ func (*RaftConfiguration) ProtoMessage() {}
func (x *RaftConfiguration) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[12]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -661,9 +687,11 @@ type AutopilotServer struct {
func (x *AutopilotServer) Reset() {
*x = AutopilotServer{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[13]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[13]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *AutopilotServer) String() string {
@@ -674,7 +702,7 @@ func (*AutopilotServer) ProtoMessage() {}
func (x *AutopilotServer) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[13]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -714,9 +742,11 @@ type AutopilotStatus struct {
func (x *AutopilotStatus) Reset() {
*x = AutopilotStatus{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[14]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[14]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *AutopilotStatus) String() string {
@@ -727,7 +757,7 @@ func (*AutopilotStatus) ProtoMessage() {}
func (x *AutopilotStatus) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[14]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -768,9 +798,11 @@ type RaftStatus struct {
func (x *RaftStatus) Reset() {
*x = RaftStatus{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[15]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[15]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *RaftStatus) String() string {
@@ -781,7 +813,7 @@ func (*RaftStatus) ProtoMessage() {}
func (x *RaftStatus) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[15]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -831,9 +863,11 @@ type GetClusterStatusResponse struct {
func (x *GetClusterStatusResponse) Reset() {
*x = GetClusterStatusResponse{}
- mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[16]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[16]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *GetClusterStatusResponse) String() string {
@@ -844,7 +878,7 @@ func (*GetClusterStatusResponse) ProtoMessage() {}
func (x *GetClusterStatusResponse) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_meta_meta_proto_msgTypes[16]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -1069,6 +1103,212 @@ func file_vault_hcp_link_proto_meta_meta_proto_init() {
if File_vault_hcp_link_proto_meta_meta_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*ListNamespacesRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*ListNamespacesResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*ListMountsRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[3].Exporter = func(v any, i int) any {
+ switch v := v.(*Mount); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[4].Exporter = func(v any, i int) any {
+ switch v := v.(*ListMountsResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[5].Exporter = func(v any, i int) any {
+ switch v := v.(*ListAuthsRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[6].Exporter = func(v any, i int) any {
+ switch v := v.(*Auth); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[7].Exporter = func(v any, i int) any {
+ switch v := v.(*ListAuthResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[8].Exporter = func(v any, i int) any {
+ switch v := v.(*GetClusterStatusRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[9].Exporter = func(v any, i int) any {
+ switch v := v.(*HANode); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[10].Exporter = func(v any, i int) any {
+ switch v := v.(*HAStatus); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[11].Exporter = func(v any, i int) any {
+ switch v := v.(*RaftServer); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[12].Exporter = func(v any, i int) any {
+ switch v := v.(*RaftConfiguration); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[13].Exporter = func(v any, i int) any {
+ switch v := v.(*AutopilotServer); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[14].Exporter = func(v any, i int) any {
+ switch v := v.(*AutopilotStatus); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[15].Exporter = func(v any, i int) any {
+ switch v := v.(*RaftStatus); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_meta_meta_proto_msgTypes[16].Exporter = func(v any, i int) any {
+ switch v := v.(*GetClusterStatusResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/vault/hcp_link/proto/meta/meta_grpc.pb.go b/vault/hcp_link/proto/meta/meta_grpc.pb.go
index 9768a06d59a0..be1186698589 100644
--- a/vault/hcp_link/proto/meta/meta_grpc.pb.go
+++ b/vault/hcp_link/proto/meta/meta_grpc.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
-// - protoc-gen-go-grpc v1.5.1
+// - protoc-gen-go-grpc v1.4.0
// - protoc (unknown)
// source: vault/hcp_link/proto/meta/meta.proto
@@ -18,8 +18,8 @@ import (
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
-// Requires gRPC-Go v1.64.0 or later.
-const _ = grpc.SupportPackageIsVersion9
+// Requires gRPC-Go v1.62.0 or later.
+const _ = grpc.SupportPackageIsVersion8
const (
HCPLinkMeta_ListNamespaces_FullMethodName = "/meta.HCPLinkMeta/ListNamespaces"
@@ -92,7 +92,7 @@ func (c *hCPLinkMetaClient) GetClusterStatus(ctx context.Context, in *GetCluster
// HCPLinkMetaServer is the server API for HCPLinkMeta service.
// All implementations must embed UnimplementedHCPLinkMetaServer
-// for forward compatibility.
+// for forward compatibility
type HCPLinkMetaServer interface {
// ListNamespaces will be used to recursively list all namespaces
ListNamespaces(context.Context, *ListNamespacesRequest) (*ListNamespacesResponse, error)
@@ -105,12 +105,9 @@ type HCPLinkMetaServer interface {
mustEmbedUnimplementedHCPLinkMetaServer()
}
-// UnimplementedHCPLinkMetaServer must be embedded to have
-// forward compatible implementations.
-//
-// NOTE: this should be embedded by value instead of pointer to avoid a nil
-// pointer dereference when methods are called.
-type UnimplementedHCPLinkMetaServer struct{}
+// UnimplementedHCPLinkMetaServer must be embedded to have forward compatible implementations.
+type UnimplementedHCPLinkMetaServer struct {
+}
func (UnimplementedHCPLinkMetaServer) ListNamespaces(context.Context, *ListNamespacesRequest) (*ListNamespacesResponse, error) {
return nil, status.Errorf(codes.Unimplemented, "method ListNamespaces not implemented")
@@ -125,7 +122,6 @@ func (UnimplementedHCPLinkMetaServer) GetClusterStatus(context.Context, *GetClus
return nil, status.Errorf(codes.Unimplemented, "method GetClusterStatus not implemented")
}
func (UnimplementedHCPLinkMetaServer) mustEmbedUnimplementedHCPLinkMetaServer() {}
-func (UnimplementedHCPLinkMetaServer) testEmbeddedByValue() {}
// UnsafeHCPLinkMetaServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to HCPLinkMetaServer will
@@ -135,13 +131,6 @@ type UnsafeHCPLinkMetaServer interface {
}
func RegisterHCPLinkMetaServer(s grpc.ServiceRegistrar, srv HCPLinkMetaServer) {
- // If the following call pancis, it indicates UnimplementedHCPLinkMetaServer was
- // embedded by pointer and is nil. This will cause panics if an
- // unimplemented method is ever invoked, so we test this at initialization
- // time to prevent it from happening at runtime later due to I/O.
- if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
- t.testEmbeddedByValue()
- }
s.RegisterService(&HCPLinkMeta_ServiceDesc, srv)
}
diff --git a/vault/hcp_link/proto/node_status/status.pb.go b/vault/hcp_link/proto/node_status/status.pb.go
index 1b275364c4a4..d23045303f63 100644
--- a/vault/hcp_link/proto/node_status/status.pb.go
+++ b/vault/hcp_link/proto/node_status/status.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: vault/hcp_link/proto/node_status/status.proto
@@ -93,9 +93,11 @@ type RaftStatus struct {
func (x *RaftStatus) Reset() {
*x = RaftStatus{}
- mi := &file_vault_hcp_link_proto_node_status_status_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_node_status_status_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *RaftStatus) String() string {
@@ -106,7 +108,7 @@ func (*RaftStatus) ProtoMessage() {}
func (x *RaftStatus) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_node_status_status_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -159,9 +161,11 @@ type LinkedClusterNodeStatusResponse struct {
func (x *LinkedClusterNodeStatusResponse) Reset() {
*x = LinkedClusterNodeStatusResponse{}
- mi := &file_vault_hcp_link_proto_node_status_status_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_hcp_link_proto_node_status_status_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *LinkedClusterNodeStatusResponse) String() string {
@@ -172,7 +176,7 @@ func (*LinkedClusterNodeStatusResponse) ProtoMessage() {}
func (x *LinkedClusterNodeStatusResponse) ProtoReflect() protoreflect.Message {
mi := &file_vault_hcp_link_proto_node_status_status_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -456,6 +460,32 @@ func file_vault_hcp_link_proto_node_status_status_proto_init() {
if File_vault_hcp_link_proto_node_status_status_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_vault_hcp_link_proto_node_status_status_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*RaftStatus); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_hcp_link_proto_node_status_status_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*LinkedClusterNodeStatusResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/vault/identity_store_test.go b/vault/identity_store_test.go
index cce4da413cd3..da16ae6fac92 100644
--- a/vault/identity_store_test.go
+++ b/vault/identity_store_test.go
@@ -5,8 +5,6 @@ package vault
import (
"context"
- "fmt"
- "math/rand"
"strings"
"testing"
"time"
@@ -15,15 +13,11 @@ import (
"github.com/go-test/deep"
uuid "github.com/hashicorp/go-uuid"
credGithub "github.com/hashicorp/vault/builtin/credential/github"
- "github.com/hashicorp/vault/builtin/credential/userpass"
credUserpass "github.com/hashicorp/vault/builtin/credential/userpass"
"github.com/hashicorp/vault/helper/identity"
"github.com/hashicorp/vault/helper/namespace"
"github.com/hashicorp/vault/helper/storagepacker"
- "github.com/hashicorp/vault/helper/testhelpers/corehelpers"
"github.com/hashicorp/vault/sdk/logical"
- "github.com/hashicorp/vault/sdk/physical"
- "github.com/hashicorp/vault/sdk/physical/inmem"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"google.golang.org/protobuf/proto"
@@ -1406,229 +1400,3 @@ func TestIdentityStoreInvalidate_TemporaryEntity(t *testing.T) {
assert.NoError(t, err)
assert.Nil(t, item)
}
-
-// TestEntityStoreLoadingIsDeterministic is a property-based test that ensures
-// the loading logic of the entity store is deterministic. This is important
-// because we perform certain merges and corrections of duplicates on load and
-// non-deterministic order can cause divergence between different nodes or even
-// after seal/unseal cycles on one node. Loading _should_ be deterministic
-// anyway if all data in storage was correct see comments inline for examples of
-// ways storage can be corrupt with respect to the expected schema invariants.
-func TestEntityStoreLoadingIsDeterministic(t *testing.T) {
- // Create some state in store that could trigger non-deterministic behavior.
- // The nature of the identity store schema is such that the order of loading
- // entities etc shouldn't matter even if it was non-deterministic, however due
- // to many and varied historical (and possibly current/future) bugs, we have
- // seen many cases where storage ends up with duplicates persisted. This is
- // not ideal of course and our code attempts to "fix" on the fly with merges
- // on load. But it's hampered by the fact that the current implementation does
- // not load entities in a deterministic order. which means that different
- // nodes potentially resolve merges differently. This test proves that that
- // happens and should hopefully provide some confidence that we don't
- // introduce non-determinism in the future somehow. It's a bit odd we have to
- // inject essentially invalid data into storage to trigger the issue but
- // that's what we get in real life sometimes!
- logger := corehelpers.NewTestLogger(t)
- ims, err := inmem.NewTransactionalInmemHA(nil, logger)
- require.NoError(t, err)
-
- cfg := &CoreConfig{
- Physical: ims,
- HAPhysical: ims.(physical.HABackend),
- Logger: logger,
- BuiltinRegistry: corehelpers.NewMockBuiltinRegistry(),
- CredentialBackends: map[string]logical.Factory{
- "userpass": userpass.Factory,
- },
- }
-
- c, sealKeys, rootToken := TestCoreUnsealedWithConfig(t, cfg)
-
- // Inject values into storage
- upme, err := TestUserpassMount(c, false)
- require.NoError(t, err)
- localMe, err := TestUserpassMount(c, true)
- require.NoError(t, err)
-
- ctx := context.Background()
-
- // We create 100 entities each with 1 non-local alias and 1 local alias. We
- // then randomly create duplicate alias or local alias entries with a
- // probability that is unrealistic but ensures we have duplicates on every
- // test run with high probability and more than 1 duplicate often.
- for i := 0; i <= 100; i++ {
- id := fmt.Sprintf("entity-%d", i)
- alias := fmt.Sprintf("alias-%d", i)
- localAlias := fmt.Sprintf("localalias-%d", i)
- e := makeEntityForPacker(t, id, c.identityStore.entityPacker)
- attachAlias(t, e, alias, upme)
- attachAlias(t, e, localAlias, localMe)
- err = TestHelperWriteToStoragePacker(ctx, c.identityStore.entityPacker, e.ID, e)
- require.NoError(t, err)
-
- // Subset of entities get a duplicate alias and/or duplicate local alias.
- // We'll use a probability of 0.3 for each dup so that we expect at least a
- // few double and maybe triple duplicates of each type every few test runs
- // and may have duplicates of both types or neither etc.
- pDup := 0.3
- rnd := rand.Float64()
- dupeNum := 1
- for rnd < pDup && dupeNum < 10 {
- e := makeEntityForPacker(t, fmt.Sprintf("entity-%d-dup-%d", i, dupeNum), c.identityStore.entityPacker)
- attachAlias(t, e, alias, upme)
- err = TestHelperWriteToStoragePacker(ctx, c.identityStore.entityPacker, e.ID, e)
- require.NoError(t, err)
- // Toss again to see if we continue
- rnd = rand.Float64()
- dupeNum++
- }
- // Toss the coin again to see if there are any local dupes
- dupeNum = 1
- rnd = rand.Float64()
- for rnd < pDup && dupeNum < 10 {
- e := makeEntityForPacker(t, fmt.Sprintf("entity-%d-localdup-%d", i, dupeNum), c.identityStore.entityPacker)
- attachAlias(t, e, localAlias, localMe)
- err = TestHelperWriteToStoragePacker(ctx, c.identityStore.entityPacker, e.ID, e)
- require.NoError(t, err)
- rnd = rand.Float64()
- dupeNum++
- }
- // One more edge case is that it's currently possible as of the time of
- // writing for a failure during entity invalidation to result in a permanent
- // "cached" entity in the local alias packer even though we do have the
- // replicated entity in the entity packer too. This is a bug and will
- // hopefully be fixed at some point soon, but even after it is it's
- // important that we still test for it since existing clusters may still
- // have this persistent state. Pick a low probability but one we're very
- // likely to hit in 100 iterations and write the entity to the local alias
- // table too (this mimics the behavior of cacheTemporaryEntity).
- pFailedLocalAliasInvalidation := 0.02
- if rand.Float64() < pFailedLocalAliasInvalidation {
- err = TestHelperWriteToStoragePacker(ctx, c.identityStore.localAliasPacker, e.ID+tmpSuffix, e)
- require.NoError(t, err)
- }
- }
-
- // Create some groups
- for i := 0; i <= 100; i++ {
- id := fmt.Sprintf("group-%d", i)
- bucketKey := c.identityStore.groupPacker.BucketKey(id)
- // Add an alias to every other group
- alias := ""
- if i%2 == 0 {
- alias = fmt.Sprintf("groupalias-%d", i)
- }
- e := makeGroupWithIDAndAlias(t, id, alias, bucketKey, upme)
- err = TestHelperWriteToStoragePacker(ctx, c.identityStore.groupPacker, e.ID, e)
- require.NoError(t, err)
- }
- // Now add 10 groups with the same alias to ensure duplicates don't cause
- // non-deterministic behavior.
- for i := 0; i <= 10; i++ {
- id := fmt.Sprintf("group-dup-%d", i)
- bucketKey := c.identityStore.groupPacker.BucketKey(id)
- e := makeGroupWithIDAndAlias(t, id, "groupalias-dup", bucketKey, upme)
- err = TestHelperWriteToStoragePacker(ctx, c.identityStore.groupPacker, e.ID, e)
- require.NoError(t, err)
- }
-
- entIdentityStoreDeterminismTestSetup(t, ctx, c, upme, localMe)
-
- // Storage is now primed for the test.
-
- // To test that this is deterministic we need to load from storage a bunch of
- // times and make sure we get the same result. For easier debugging we'll
- // build a list of human readable ids that we can compare.
- lastIDs := []string{}
- for i := 0; i < 10; i++ {
- // Seal and unseal to reload the identity store
- require.NoError(t, c.Seal(rootToken))
- require.True(t, c.Sealed())
- for _, key := range sealKeys {
- unsealed, err := c.Unseal(key)
- require.NoError(t, err)
- if unsealed {
- break
- }
- }
- require.False(t, c.Sealed())
-
- // Identity store should be loaded now. Check it's contents.
- loadedIDs := []string{}
-
- tx := c.identityStore.db.Txn(false)
-
- // Entities + their aliases
- iter, err := tx.LowerBound(entitiesTable, "id", "")
- require.NoError(t, err)
- for item := iter.Next(); item != nil; item = iter.Next() {
- // We already added "type" prefixes to the IDs when creating them so just
- // append here.
- e := item.(*identity.Entity)
- loadedIDs = append(loadedIDs, e.ID)
- for _, a := range e.Aliases {
- loadedIDs = append(loadedIDs, a.ID)
- }
- }
- // This is a non-triviality check to make sure we actually loaded stuff and
- // are not just passing because of a bug in the test.
- numLoaded := len(loadedIDs)
- require.Greater(t, numLoaded, 300, "not enough entities and aliases loaded on attempt %d", i)
-
- // Groups
- iter, err = tx.LowerBound(groupsTable, "id", "")
- require.NoError(t, err)
- for item := iter.Next(); item != nil; item = iter.Next() {
- g := item.(*identity.Group)
- loadedIDs = append(loadedIDs, g.ID)
- if g.Alias != nil {
- loadedIDs = append(loadedIDs, g.Alias.ID)
- }
- }
- // This is a non-triviality check to make sure we actually loaded stuff and
- // are not just passing because of a bug in the test.
- groupsLoaded := len(loadedIDs) - numLoaded
- require.Greater(t, groupsLoaded, 140, "not enough groups and aliases loaded on attempt %d", i)
-
- entIdentityStoreDeterminismAssert(t, i, loadedIDs, lastIDs)
-
- if i > 0 {
- // Should be in the same order if we are deterministic since MemDB has strong ordering.
- require.Equal(t, lastIDs, loadedIDs, "different result on attempt %d", i)
- }
- lastIDs = loadedIDs
- }
-}
-
-func makeGroupWithIDAndAlias(t *testing.T, id, alias, bucketKey string, me *MountEntry) *identity.Group {
- g := &identity.Group{
- ID: id,
- Name: id,
- NamespaceID: namespace.RootNamespaceID,
- BucketKey: bucketKey,
- }
- if alias != "" {
- g.Alias = &identity.Alias{
- ID: id,
- Name: alias,
- CanonicalID: id,
- MountType: me.Type,
- MountAccessor: me.Accessor,
- }
- }
- return g
-}
-
-func makeLocalAliasWithID(t *testing.T, id, entityID string, bucketKey string, me *MountEntry) *identity.LocalAliases {
- return &identity.LocalAliases{
- Aliases: []*identity.Alias{
- {
- ID: id,
- Name: id,
- CanonicalID: entityID,
- MountType: me.Type,
- MountAccessor: me.Accessor,
- },
- },
- }
-}
diff --git a/vault/identity_store_test_stubs_oss.go b/vault/identity_store_test_stubs_oss.go
deleted file mode 100644
index e3a570388522..000000000000
--- a/vault/identity_store_test_stubs_oss.go
+++ /dev/null
@@ -1,21 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: BUSL-1.1
-
-//go:build !enterprise
-
-package vault
-
-import (
- "context"
- "testing"
-)
-
-//go:generate go run github.com/hashicorp/vault/tools/stubmaker
-
-func entIdentityStoreDeterminismTestSetup(t *testing.T, ctx context.Context, c *Core, upme, localme *MountEntry) {
- // no op
-}
-
-func entIdentityStoreDeterminismAssert(t *testing.T, i int, loadedIDs, lastIDs []string) {
- // no op
-}
diff --git a/vault/identity_store_util.go b/vault/identity_store_util.go
index 67d27aa82994..4e963e48e719 100644
--- a/vault/identity_store_util.go
+++ b/vault/identity_store_util.go
@@ -9,7 +9,6 @@ import (
"fmt"
"strings"
"sync"
- "testing"
"time"
metrics "github.com/armon/go-metrics"
@@ -219,20 +218,12 @@ func (i *IdentityStore) loadCachedEntitiesOfLocalAliases(ctx context.Context) er
i.logger.Debug("cached entities of local alias entries", "num_buckets", len(existing))
// Make the channels used for the worker pool
- broker := make(chan int)
+ broker := make(chan string)
quit := make(chan bool)
- // We want to process the buckets in deterministic order so that duplicate
- // merging is deterministic. We still want to load in parallel though so
- // create a slice of result channels, one for each bucket. We need each result
- // and err chan to be 1 buffered so we can leave a result there even if the
- // processing loop is blocking on an earlier bucket still.
- results := make([]chan *storagepacker.Bucket, len(existing))
- errs := make([]chan error, len(existing))
- for j := range existing {
- results[j] = make(chan *storagepacker.Bucket, 1)
- errs[j] = make(chan error, 1)
- }
+ // Buffer these channels to prevent deadlocks
+ errs := make(chan error, len(existing))
+ result := make(chan *storagepacker.Bucket, len(existing))
// Use a wait group
wg := &sync.WaitGroup{}
@@ -245,21 +236,20 @@ func (i *IdentityStore) loadCachedEntitiesOfLocalAliases(ctx context.Context) er
for {
select {
- case idx, ok := <-broker:
+ case key, ok := <-broker:
// broker has been closed, we are done
if !ok {
return
}
- key := existing[idx]
bucket, err := i.localAliasPacker.GetBucket(ctx, localAliasesBucketsPrefix+key)
if err != nil {
- errs[idx] <- err
+ errs <- err
continue
}
// Write results out to the result channel
- results[idx] <- bucket
+ result <- bucket
// quit early
case <-quit:
@@ -273,7 +263,7 @@ func (i *IdentityStore) loadCachedEntitiesOfLocalAliases(ctx context.Context) er
wg.Add(1)
go func() {
defer wg.Done()
- for j := range existing {
+ for j, key := range existing {
if j%500 == 0 {
i.logger.Debug("cached entities of local aliases loading", "progress", j)
}
@@ -283,7 +273,7 @@ func (i *IdentityStore) loadCachedEntitiesOfLocalAliases(ctx context.Context) er
return
default:
- broker <- j
+ broker <- key
}
}
@@ -298,16 +288,16 @@ func (i *IdentityStore) loadCachedEntitiesOfLocalAliases(ctx context.Context) er
i.logger.Info("cached entities of local aliases restored")
}()
- // Restore each key by pulling from the slice of result chans
- for j := range existing {
+ // Restore each key by pulling from the result chan
+ for j := 0; j < len(existing); j++ {
select {
- case err := <-errs[j]:
+ case err := <-errs:
// Close all go routines
close(quit)
return err
- case bucket := <-results[j]:
+ case bucket := <-result:
// If there is no entry, nothing to restore
if bucket == nil {
continue
@@ -348,24 +338,13 @@ func (i *IdentityStore) loadEntities(ctx context.Context) error {
i.logger.Debug("entities collected", "num_existing", len(existing))
duplicatedAccessors := make(map[string]struct{})
- // Make the channels used for the worker pool. We send the index into existing
- // so that we can keep results in the same order as inputs. Note that this is
- // goroutine safe as long as we never mutate existing again in this method
- // which we don't.
- broker := make(chan int)
+ // Make the channels used for the worker pool
+ broker := make(chan string)
quit := make(chan bool)
- // We want to process the buckets in deterministic order so that duplicate
- // merging is deterministic. We still want to load in parallel though so
- // create a slice of result channels, one for each bucket. We need each result
- // and err chan to be 1 buffered so we can leave a result there even if the
- // processing loop is blocking on an earlier bucket still.
- results := make([]chan *storagepacker.Bucket, len(existing))
- errs := make([]chan error, len(existing))
- for j := range existing {
- results[j] = make(chan *storagepacker.Bucket, 1)
- errs[j] = make(chan error, 1)
- }
+ // Buffer these channels to prevent deadlocks
+ errs := make(chan error, len(existing))
+ result := make(chan *storagepacker.Bucket, len(existing))
// Use a wait group
wg := &sync.WaitGroup{}
@@ -378,21 +357,20 @@ func (i *IdentityStore) loadEntities(ctx context.Context) error {
for {
select {
- case idx, ok := <-broker:
+ case key, ok := <-broker:
// broker has been closed, we are done
if !ok {
return
}
- key := existing[idx]
bucket, err := i.entityPacker.GetBucket(ctx, storagepacker.StoragePackerBucketsPrefix+key)
if err != nil {
- errs[idx] <- err
+ errs <- err
continue
}
// Write results out to the result channel
- results[idx] <- bucket
+ result <- bucket
// quit early
case <-quit:
@@ -406,13 +384,17 @@ func (i *IdentityStore) loadEntities(ctx context.Context) error {
wg.Add(1)
go func() {
defer wg.Done()
- for j := range existing {
+ for j, key := range existing {
+ if j%500 == 0 {
+ i.logger.Debug("entities loading", "progress", j)
+ }
+
select {
case <-quit:
return
default:
- broker <- j
+ broker <- key
}
}
@@ -422,14 +404,14 @@ func (i *IdentityStore) loadEntities(ctx context.Context) error {
// Restore each key by pulling from the result chan
LOOP:
- for j := range existing {
+ for j := 0; j < len(existing); j++ {
select {
- case err = <-errs[j]:
+ case err = <-errs:
// Close all go routines
close(quit)
break LOOP
- case bucket := <-results[j]:
+ case bucket := <-result:
// If there is no entry, nothing to restore
if bucket == nil {
continue
@@ -2629,25 +2611,3 @@ func (i *IdentityStore) countEntitiesByMountAccessor(ctx context.Context) (map[s
return byMountAccessor, nil
}
-
-func makeEntityForPacker(_t *testing.T, id string, p *storagepacker.StoragePacker) *identity.Entity {
- return &identity.Entity{
- ID: id,
- Name: id,
- NamespaceID: namespace.RootNamespaceID,
- BucketKey: p.BucketKey(id),
- }
-}
-
-func attachAlias(t *testing.T, e *identity.Entity, name string, me *MountEntry) *identity.Alias {
- t.Helper()
- a := &identity.Alias{
- ID: name,
- Name: name,
- CanonicalID: e.ID,
- MountType: me.Type,
- MountAccessor: me.Accessor,
- }
- e.UpsertAlias(a)
- return a
-}
diff --git a/vault/logical_system.go b/vault/logical_system.go
index 3e328fd1dc26..4b3b18b2ec4d 100644
--- a/vault/logical_system.go
+++ b/vault/logical_system.go
@@ -5503,25 +5503,24 @@ func (b *SystemBackend) pathInternalOpenAPI(ctx context.Context, req *logical.Re
}
type SealStatusResponse struct {
- Type string `json:"type"`
- Initialized bool `json:"initialized"`
- Sealed bool `json:"sealed"`
- T int `json:"t"`
- N int `json:"n"`
- Progress int `json:"progress"`
- Nonce string `json:"nonce"`
- Version string `json:"version"`
- BuildDate string `json:"build_date"`
- Migration bool `json:"migration"`
- ClusterName string `json:"cluster_name,omitempty"`
- ClusterID string `json:"cluster_id,omitempty"`
- RecoverySeal bool `json:"recovery_seal"`
- StorageType string `json:"storage_type,omitempty"`
- HCPLinkStatus string `json:"hcp_link_status,omitempty"`
- HCPLinkResourceID string `json:"hcp_link_resource_ID,omitempty"`
- Warnings []string `json:"warnings,omitempty"`
- RecoverySealType string `json:"recovery_seal_type,omitempty"`
- RemovedFromCluster *bool `json:"removed_from_cluster,omitempty"`
+ Type string `json:"type"`
+ Initialized bool `json:"initialized"`
+ Sealed bool `json:"sealed"`
+ T int `json:"t"`
+ N int `json:"n"`
+ Progress int `json:"progress"`
+ Nonce string `json:"nonce"`
+ Version string `json:"version"`
+ BuildDate string `json:"build_date"`
+ Migration bool `json:"migration"`
+ ClusterName string `json:"cluster_name,omitempty"`
+ ClusterID string `json:"cluster_id,omitempty"`
+ RecoverySeal bool `json:"recovery_seal"`
+ StorageType string `json:"storage_type,omitempty"`
+ HCPLinkStatus string `json:"hcp_link_status,omitempty"`
+ HCPLinkResourceID string `json:"hcp_link_resource_ID,omitempty"`
+ Warnings []string `json:"warnings,omitempty"`
+ RecoverySealType string `json:"recovery_seal_type,omitempty"`
}
type SealBackendStatus struct {
@@ -5558,22 +5557,16 @@ func (core *Core) GetSealStatus(ctx context.Context, lock bool) (*SealStatusResp
hcpLinkStatus, resourceIDonHCP := core.GetHCPLinkStatus()
redactVersion, _, redactClusterName, _ := logical.CtxRedactionSettingsValue(ctx)
- var removed *bool
- isRemoved, shouldInclude := core.IsRemovedFromCluster()
- if shouldInclude {
- removed = &isRemoved
- }
if sealConfig == nil {
s := &SealStatusResponse{
- Type: core.SealAccess().BarrierSealConfigType().String(),
- Initialized: initialized,
- Sealed: true,
- RecoverySeal: core.SealAccess().RecoveryKeySupported(),
- StorageType: core.StorageType(),
- Version: version.GetVersion().VersionNumber(),
- BuildDate: version.BuildDate,
- RemovedFromCluster: removed,
+ Type: core.SealAccess().BarrierSealConfigType().String(),
+ Initialized: initialized,
+ Sealed: true,
+ RecoverySeal: core.SealAccess().RecoveryKeySupported(),
+ StorageType: core.StorageType(),
+ Version: version.GetVersion().VersionNumber(),
+ BuildDate: version.BuildDate,
}
if redactVersion {
@@ -5615,22 +5608,21 @@ func (core *Core) GetSealStatus(ctx context.Context, lock bool) (*SealStatusResp
progress, nonce := core.SecretProgress(lock)
s := &SealStatusResponse{
- Type: sealType,
- Initialized: initialized,
- Sealed: sealed,
- T: sealConfig.SecretThreshold,
- N: sealConfig.SecretShares,
- Progress: progress,
- Nonce: nonce,
- Version: version.GetVersion().VersionNumber(),
- BuildDate: version.BuildDate,
- Migration: core.IsInSealMigrationMode(lock) && !core.IsSealMigrated(lock),
- RemovedFromCluster: removed,
- ClusterName: clusterName,
- ClusterID: clusterID,
- RecoverySeal: core.SealAccess().RecoveryKeySupported(),
- RecoverySealType: recoverySealType,
- StorageType: core.StorageType(),
+ Type: sealType,
+ Initialized: initialized,
+ Sealed: sealed,
+ T: sealConfig.SecretThreshold,
+ N: sealConfig.SecretShares,
+ Progress: progress,
+ Nonce: nonce,
+ Version: version.GetVersion().VersionNumber(),
+ BuildDate: version.BuildDate,
+ Migration: core.IsInSealMigrationMode(lock) && !core.IsSealMigrated(lock),
+ ClusterName: clusterName,
+ ClusterID: clusterID,
+ RecoverySeal: core.SealAccess().RecoveryKeySupported(),
+ RecoverySealType: recoverySealType,
+ StorageType: core.StorageType(),
}
if resourceIDonHCP != "" {
diff --git a/vault/logical_system_activity_write_testonly.go b/vault/logical_system_activity_write_testonly.go
index 3f6f4caa5663..de52e4de35a8 100644
--- a/vault/logical_system_activity_write_testonly.go
+++ b/vault/logical_system_activity_write_testonly.go
@@ -85,16 +85,14 @@ func (b *SystemBackend) handleActivityWriteData(ctx context.Context, request *lo
for _, opt := range input.Write {
opts[opt] = struct{}{}
}
- paths, localPaths, globalPaths, err := generated.write(ctx, opts, b.Core.activityLog, now)
+ paths, err := generated.write(ctx, opts, b.Core.activityLog, now)
if err != nil {
b.logger.Debug("failed to write activity log data", "error", err.Error())
return logical.ErrorResponse("failed to write data"), err
}
return &logical.Response{
Data: map[string]interface{}{
- "paths": paths,
- "local_paths": localPaths,
- "global_paths": globalPaths,
+ "paths": paths,
},
}, nil
}
@@ -103,19 +101,9 @@ func (b *SystemBackend) handleActivityWriteData(ctx context.Context, request *lo
type singleMonthActivityClients struct {
// clients are indexed by ID
clients []*activity.EntityRecord
- // globalClients are indexed by ID
- globalClients []*activity.EntityRecord
- // localClients are indexed by ID
- localClients []*activity.EntityRecord
// predefinedSegments map from the segment number to the client's index in
// the clients slice
predefinedSegments map[int][]int
- // predefinedGlobalSegments map from the segment number to the client's index in
- // the clients slice
- predefinedGlobalSegments map[int][]int
- // predefinedLocalSegments map from the segment number to the client's index in
- // the clients slice
- predefinedLocalSegments map[int][]int
// generationParameters holds the generation request
generationParameters *generation.Data
}
@@ -126,24 +114,11 @@ type multipleMonthsActivityClients struct {
months []*singleMonthActivityClients
}
-func (s *singleMonthActivityClients) addEntityRecord(core *Core, record *activity.EntityRecord, segmentIndex *int) {
+func (s *singleMonthActivityClients) addEntityRecord(record *activity.EntityRecord, segmentIndex *int) {
s.clients = append(s.clients, record)
- local, _ := core.activityLog.isClientLocal(record)
- if !local {
- s.globalClients = append(s.globalClients, record)
- } else {
- s.localClients = append(s.localClients, record)
- }
if segmentIndex != nil {
index := len(s.clients) - 1
s.predefinedSegments[*segmentIndex] = append(s.predefinedSegments[*segmentIndex], index)
- if !local {
- globalIndex := len(s.globalClients) - 1
- s.predefinedGlobalSegments[*segmentIndex] = append(s.predefinedGlobalSegments[*segmentIndex], globalIndex)
- } else {
- localIndex := len(s.localClients) - 1
- s.predefinedLocalSegments[*segmentIndex] = append(s.predefinedLocalSegments[*segmentIndex], localIndex)
- }
}
}
@@ -151,7 +126,7 @@ func (s *singleMonthActivityClients) addEntityRecord(core *Core, record *activit
// keys are the segment index, and the value are the clients that were seen in
// that index. If the value is an empty slice, then it's an empty index. If the
// value is nil, then it's a skipped index
-func (s *singleMonthActivityClients) populateSegments(predefinedSegments map[int][]int, clients []*activity.EntityRecord) (map[int][]*activity.EntityRecord, error) {
+func (s *singleMonthActivityClients) populateSegments() (map[int][]*activity.EntityRecord, error) {
segments := make(map[int][]*activity.EntityRecord)
ignoreIndexes := make(map[int]struct{})
skipIndexes := s.generationParameters.SkipSegmentIndexes
@@ -167,11 +142,11 @@ func (s *singleMonthActivityClients) populateSegments(predefinedSegments map[int
}
// if we have predefined segments, then we can construct the map using those
- if len(predefinedSegments) > 0 {
- for segment, clientIndexes := range predefinedSegments {
+ if len(s.predefinedSegments) > 0 {
+ for segment, clientIndexes := range s.predefinedSegments {
clientsInSegment := make([]*activity.EntityRecord, 0, len(clientIndexes))
for _, idx := range clientIndexes {
- clientsInSegment = append(clientsInSegment, clients[idx])
+ clientsInSegment = append(clientsInSegment, s.clients[idx])
}
segments[segment] = clientsInSegment
}
@@ -180,8 +155,8 @@ func (s *singleMonthActivityClients) populateSegments(predefinedSegments map[int
// determine how many segments are necessary to store the clients for this month
// using the default storage limits
- numNecessarySegments := len(clients) / ActivitySegmentClientCapacity
- if len(clients)%ActivitySegmentClientCapacity != 0 {
+ numNecessarySegments := len(s.clients) / ActivitySegmentClientCapacity
+ if len(s.clients)%ActivitySegmentClientCapacity != 0 {
numNecessarySegments++
}
totalSegmentCount := numNecessarySegments
@@ -198,8 +173,8 @@ func (s *singleMonthActivityClients) populateSegments(predefinedSegments map[int
}
// determine how many clients should be in each segment
- segmentSizes := len(clients) / usableSegmentCount
- if len(clients)%usableSegmentCount != 0 {
+ segmentSizes := len(s.clients) / usableSegmentCount
+ if len(s.clients)%usableSegmentCount != 0 {
segmentSizes++
}
@@ -209,14 +184,14 @@ func (s *singleMonthActivityClients) populateSegments(predefinedSegments map[int
clientIndex := 0
for i := 0; i < totalSegmentCount; i++ {
- if clientIndex >= len(clients) {
+ if clientIndex >= len(s.clients) {
break
}
if _, ok := ignoreIndexes[i]; ok {
continue
}
- for len(segments[i]) < segmentSizes && clientIndex < len(clients) {
- segments[i] = append(segments[i], clients[clientIndex])
+ for len(segments[i]) < segmentSizes && clientIndex < len(s.clients) {
+ segments[i] = append(segments[i], s.clients[clientIndex])
clientIndex++
}
}
@@ -225,7 +200,7 @@ func (s *singleMonthActivityClients) populateSegments(predefinedSegments map[int
// addNewClients generates clients according to the given parameters, and adds them to the month
// the client will always have the mountAccessor as its mount accessor
-func (s *singleMonthActivityClients) addNewClients(c *generation.Client, mountAccessor string, segmentIndex *int, monthsAgo int32, now time.Time, core *Core) error {
+func (s *singleMonthActivityClients) addNewClients(c *generation.Client, mountAccessor string, segmentIndex *int, monthsAgo int32, now time.Time) error {
count := 1
if c.Count > 1 {
count = int(c.Count)
@@ -249,8 +224,7 @@ func (s *singleMonthActivityClients) addNewClients(c *generation.Client, mountAc
return err
}
}
-
- s.addEntityRecord(core, record, segmentIndex)
+ s.addEntityRecord(record, segmentIndex)
}
return nil
}
@@ -319,7 +293,7 @@ func (m *multipleMonthsActivityClients) processMonth(ctx context.Context, core *
}
}
- err = m.addClientToMonth(month.GetMonthsAgo(), clients, mountAccessor, segmentIndex, now, core)
+ err = m.addClientToMonth(month.GetMonthsAgo(), clients, mountAccessor, segmentIndex, now)
if err != nil {
return err
}
@@ -345,14 +319,14 @@ func (m *multipleMonthsActivityClients) processMonth(ctx context.Context, core *
return nil
}
-func (m *multipleMonthsActivityClients) addClientToMonth(monthsAgo int32, c *generation.Client, mountAccessor string, segmentIndex *int, now time.Time, core *Core) error {
+func (m *multipleMonthsActivityClients) addClientToMonth(monthsAgo int32, c *generation.Client, mountAccessor string, segmentIndex *int, now time.Time) error {
if c.Repeated || c.RepeatedFromMonth > 0 {
- return m.addRepeatedClients(monthsAgo, c, mountAccessor, segmentIndex, core)
+ return m.addRepeatedClients(monthsAgo, c, mountAccessor, segmentIndex)
}
- return m.months[monthsAgo].addNewClients(c, mountAccessor, segmentIndex, monthsAgo, now, core)
+ return m.months[monthsAgo].addNewClients(c, mountAccessor, segmentIndex, monthsAgo, now)
}
-func (m *multipleMonthsActivityClients) addRepeatedClients(monthsAgo int32, c *generation.Client, mountAccessor string, segmentIndex *int, core *Core) error {
+func (m *multipleMonthsActivityClients) addRepeatedClients(monthsAgo int32, c *generation.Client, mountAccessor string, segmentIndex *int) error {
addingTo := m.months[monthsAgo]
repeatedFromMonth := monthsAgo + 1
if c.RepeatedFromMonth > 0 {
@@ -365,7 +339,7 @@ func (m *multipleMonthsActivityClients) addRepeatedClients(monthsAgo int32, c *g
}
for _, client := range repeatedFrom.clients {
if c.ClientType == client.ClientType && mountAccessor == client.MountAccessor && c.Namespace == client.NamespaceID {
- addingTo.addEntityRecord(core, client, segmentIndex)
+ addingTo.addEntityRecord(client, segmentIndex)
numClients--
if numClients == 0 {
break
@@ -395,10 +369,8 @@ func (m *multipleMonthsActivityClients) timestampForMonth(i int, now time.Time)
return now
}
-func (m *multipleMonthsActivityClients) write(ctx context.Context, opts map[generation.WriteOptions]struct{}, activityLog *ActivityLog, now time.Time) ([]string, []string, []string, error) {
+func (m *multipleMonthsActivityClients) write(ctx context.Context, opts map[generation.WriteOptions]struct{}, activityLog *ActivityLog, now time.Time) ([]string, error) {
paths := []string{}
- globalPaths := []string{}
- localPaths := []string{}
_, writePQ := opts[generation.WriteOptions_WRITE_PRECOMPUTED_QUERIES]
_, writeDistinctClients := opts[generation.WriteOptions_WRITE_DISTINCT_CLIENTS]
@@ -411,9 +383,9 @@ func (m *multipleMonthsActivityClients) write(ctx context.Context, opts map[gene
continue
}
timestamp := m.timestampForMonth(i, now)
- segments, err := month.populateSegments(month.predefinedSegments, month.clients)
+ segments, err := month.populateSegments()
if err != nil {
- return nil, nil, nil, err
+ return nil, err
}
for segmentIndex, segment := range segments {
if segment == nil {
@@ -425,56 +397,12 @@ func (m *multipleMonthsActivityClients) write(ctx context.Context, opts map[gene
currentClients: &activity.EntityActivityLog{Clients: segment},
clientSequenceNumber: uint64(segmentIndex),
tokenCount: &activity.TokenCount{},
- }, true, "")
+ }, true)
if err != nil {
- return nil, nil, nil, err
+ return nil, err
}
paths = append(paths, entityPath)
}
- if len(month.globalClients) > 0 {
- globalSegments, err := month.populateSegments(month.predefinedGlobalSegments, month.globalClients)
- if err != nil {
- return nil, nil, nil, err
- }
- for segmentIndex, segment := range globalSegments {
- if segment == nil {
- // skip the index
- continue
- }
- entityPath, err := activityLog.saveSegmentEntitiesInternal(ctx, segmentInfo{
- startTimestamp: timestamp.Unix(),
- currentClients: &activity.EntityActivityLog{Clients: segment},
- clientSequenceNumber: uint64(segmentIndex),
- tokenCount: &activity.TokenCount{},
- }, true, activityGlobalPathPrefix)
- if err != nil {
- return nil, nil, nil, err
- }
- globalPaths = append(globalPaths, entityPath)
- }
- }
- if len(month.localClients) > 0 {
- localSegments, err := month.populateSegments(month.predefinedLocalSegments, month.localClients)
- if err != nil {
- return nil, nil, nil, err
- }
- for segmentIndex, segment := range localSegments {
- if segment == nil {
- // skip the index
- continue
- }
- entityPath, err := activityLog.saveSegmentEntitiesInternal(ctx, segmentInfo{
- startTimestamp: timestamp.Unix(),
- currentClients: &activity.EntityActivityLog{Clients: segment},
- clientSequenceNumber: uint64(segmentIndex),
- tokenCount: &activity.TokenCount{},
- }, true, activityLocalPathPrefix)
- if err != nil {
- return nil, nil, nil, err
- }
- localPaths = append(localPaths, entityPath)
- }
- }
}
if writePQ || writeDistinctClients {
// start with the oldest month of data, and create precomputed queries
@@ -495,16 +423,16 @@ func (m *multipleMonthsActivityClients) write(ctx context.Context, opts map[gene
if writeIntentLog {
err := activityLog.writeIntentLog(ctx, m.latestTimestamp(now, false).Unix(), m.latestTimestamp(now, true).UTC())
if err != nil {
- return nil, nil, nil, err
+ return nil, err
}
}
wg := sync.WaitGroup{}
err := activityLog.refreshFromStoredLog(ctx, &wg, now)
if err != nil {
- return nil, nil, nil, err
+ return nil, err
}
wg.Wait()
- return paths, localPaths, globalPaths, nil
+ return paths, nil
}
func (m *multipleMonthsActivityClients) latestTimestamp(now time.Time, includeCurrentMonth bool) time.Time {
@@ -532,9 +460,7 @@ func newMultipleMonthsActivityClients(numberOfMonths int) *multipleMonthsActivit
}
for i := 0; i < numberOfMonths; i++ {
m.months[i] = &singleMonthActivityClients{
- predefinedSegments: make(map[int][]int),
- predefinedGlobalSegments: make(map[int][]int),
- predefinedLocalSegments: make(map[int][]int),
+ predefinedSegments: make(map[int][]int),
}
}
return m
@@ -558,28 +484,6 @@ type sliceSegmentReader struct {
i int
}
-// ReadGlobalEntity here is a dummy implementation.
-// Segment reader is never used when writing using the ClientCountUtil library
-func (p *sliceSegmentReader) ReadGlobalEntity(ctx context.Context) (*activity.EntityActivityLog, error) {
- if p.i == len(p.records) {
- return nil, io.EOF
- }
- record := p.records[p.i]
- p.i++
- return &activity.EntityActivityLog{Clients: record}, nil
-}
-
-// ReadLocalEntity here is a dummy implementation.
-// Segment reader is never used when writing using the ClientCountUtil library
-func (p *sliceSegmentReader) ReadLocalEntity(ctx context.Context) (*activity.EntityActivityLog, error) {
- if p.i == len(p.records) {
- return nil, io.EOF
- }
- record := p.records[p.i]
- p.i++
- return &activity.EntityActivityLog{Clients: record}, nil
-}
-
func (p *sliceSegmentReader) ReadToken(ctx context.Context) (*activity.TokenCount, error) {
return nil, io.EOF
}
diff --git a/vault/logical_system_activity_write_testonly_test.go b/vault/logical_system_activity_write_testonly_test.go
index 4df992172d2b..5254c0aaed6b 100644
--- a/vault/logical_system_activity_write_testonly_test.go
+++ b/vault/logical_system_activity_write_testonly_test.go
@@ -167,12 +167,10 @@ func Test_singleMonthActivityClients_addNewClients(t *testing.T) {
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
- core, _, _ := TestCoreUnsealed(t)
m := &singleMonthActivityClients{
- predefinedSegments: make(map[int][]int),
- predefinedGlobalSegments: make(map[int][]int),
+ predefinedSegments: make(map[int][]int),
}
- err := m.addNewClients(tt.clients, tt.mount, tt.segmentIndex, 0, time.Now().UTC(), core)
+ err := m.addNewClients(tt.clients, tt.mount, tt.segmentIndex, 0, time.Now().UTC())
require.NoError(t, err)
numNew := tt.clients.Count
if numNew == 0 {
@@ -341,42 +339,41 @@ func Test_multipleMonthsActivityClients_processMonth_segmented(t *testing.T) {
// from 1 month ago and 2 months ago, and verifies that the correct clients are
// added based on namespace, mount, and non-entity attributes
func Test_multipleMonthsActivityClients_addRepeatedClients(t *testing.T) {
- core, _, _ := TestCoreUnsealed(t)
now := time.Now().UTC()
m := newMultipleMonthsActivityClients(3)
defaultMount := "default"
- require.NoError(t, m.addClientToMonth(2, &generation.Client{Count: 2}, "identity", nil, now, core))
- require.NoError(t, m.addClientToMonth(2, &generation.Client{Count: 2, Namespace: "other_ns"}, defaultMount, nil, now, core))
- require.NoError(t, m.addClientToMonth(1, &generation.Client{Count: 2}, defaultMount, nil, now, core))
- require.NoError(t, m.addClientToMonth(1, &generation.Client{Count: 2, ClientType: "non-entity"}, defaultMount, nil, now, core))
+ require.NoError(t, m.addClientToMonth(2, &generation.Client{Count: 2}, "identity", nil, now))
+ require.NoError(t, m.addClientToMonth(2, &generation.Client{Count: 2, Namespace: "other_ns"}, defaultMount, nil, now))
+ require.NoError(t, m.addClientToMonth(1, &generation.Client{Count: 2}, defaultMount, nil, now))
+ require.NoError(t, m.addClientToMonth(1, &generation.Client{Count: 2, ClientType: "non-entity"}, defaultMount, nil, now))
month2Clients := m.months[2].clients
month1Clients := m.months[1].clients
thisMonth := m.months[0]
// this will match the first client in month 1
- require.NoError(t, m.addRepeatedClients(0, &generation.Client{Count: 1, Repeated: true}, defaultMount, nil, core))
+ require.NoError(t, m.addRepeatedClients(0, &generation.Client{Count: 1, Repeated: true}, defaultMount, nil))
require.Contains(t, month1Clients, thisMonth.clients[0])
// this will match the 3rd client in month 1
- require.NoError(t, m.addRepeatedClients(0, &generation.Client{Count: 1, Repeated: true, ClientType: "non-entity"}, defaultMount, nil, core))
+ require.NoError(t, m.addRepeatedClients(0, &generation.Client{Count: 1, Repeated: true, ClientType: "non-entity"}, defaultMount, nil))
require.Equal(t, month1Clients[2], thisMonth.clients[1])
// this will match the first two clients in month 1
- require.NoError(t, m.addRepeatedClients(0, &generation.Client{Count: 2, Repeated: true}, defaultMount, nil, core))
+ require.NoError(t, m.addRepeatedClients(0, &generation.Client{Count: 2, Repeated: true}, defaultMount, nil))
require.Equal(t, month1Clients[0:2], thisMonth.clients[2:4])
// this will match the first client in month 2
- require.NoError(t, m.addRepeatedClients(0, &generation.Client{Count: 1, RepeatedFromMonth: 2}, "identity", nil, core))
+ require.NoError(t, m.addRepeatedClients(0, &generation.Client{Count: 1, RepeatedFromMonth: 2}, "identity", nil))
require.Equal(t, month2Clients[0], thisMonth.clients[4])
// this will match the 3rd client in month 2
- require.NoError(t, m.addRepeatedClients(0, &generation.Client{Count: 1, RepeatedFromMonth: 2, Namespace: "other_ns"}, defaultMount, nil, core))
+ require.NoError(t, m.addRepeatedClients(0, &generation.Client{Count: 1, RepeatedFromMonth: 2, Namespace: "other_ns"}, defaultMount, nil))
require.Equal(t, month2Clients[2], thisMonth.clients[5])
- require.Error(t, m.addRepeatedClients(0, &generation.Client{Count: 1, RepeatedFromMonth: 2, Namespace: "other_ns"}, "other_mount", nil, core))
+ require.Error(t, m.addRepeatedClients(0, &generation.Client{Count: 1, RepeatedFromMonth: 2, Namespace: "other_ns"}, "other_mount", nil))
}
// Test_singleMonthActivityClients_populateSegments calls populateSegments for a
@@ -459,7 +456,7 @@ func Test_singleMonthActivityClients_populateSegments(t *testing.T) {
for _, tc := range cases {
t.Run(tc.name, func(t *testing.T) {
s := singleMonthActivityClients{predefinedSegments: tc.segments, clients: clients, generationParameters: &generation.Data{EmptySegmentIndexes: tc.emptyIndexes, SkipSegmentIndexes: tc.skipIndexes, NumSegments: int32(tc.numSegments)}}
- gotSegments, err := s.populateSegments(s.predefinedSegments, s.clients)
+ gotSegments, err := s.populateSegments()
require.NoError(t, err)
require.Equal(t, tc.wantSegments, gotSegments)
})
diff --git a/vault/logical_system_test.go b/vault/logical_system_test.go
index 66dfbbe34f87..47c71c1fb528 100644
--- a/vault/logical_system_test.go
+++ b/vault/logical_system_test.go
@@ -7349,48 +7349,3 @@ func TestFuzz_sanitizePath(t *testing.T) {
require.True(t, valid(path, newPath), `"%s" not sanitized correctly, got "%s"`, path, newPath)
}
}
-
-// TestSealStatus_Removed checks if the seal-status endpoint returns the
-// correct value for RemovedFromCluster when provided with different backends
-func TestSealStatus_Removed(t *testing.T) {
- removedCore, err := TestCoreWithMockRemovableNodeHABackend(t, true)
- require.NoError(t, err)
- notRemovedCore, err := TestCoreWithMockRemovableNodeHABackend(t, false)
- require.NoError(t, err)
- testCases := []struct {
- name string
- core *Core
- wantField bool
- wantTrue bool
- }{
- {
- name: "removed",
- core: removedCore,
- wantField: true,
- wantTrue: true,
- },
- {
- name: "not removed",
- core: notRemovedCore,
- wantField: true,
- wantTrue: false,
- },
- {
- name: "different backend",
- core: TestCore(t),
- wantField: false,
- },
- }
- for _, tc := range testCases {
- t.Run(tc.name, func(t *testing.T) {
- status, err := tc.core.GetSealStatus(context.Background(), true)
- require.NoError(t, err)
- if tc.wantField {
- require.NotNil(t, status.RemovedFromCluster)
- require.Equal(t, tc.wantTrue, *status.RemovedFromCluster)
- } else {
- require.Nil(t, status.RemovedFromCluster)
- }
- })
- }
-}
diff --git a/vault/raft.go b/vault/raft.go
index 416334868f9b..746cbbde0038 100644
--- a/vault/raft.go
+++ b/vault/raft.go
@@ -765,12 +765,7 @@ func (c *Core) raftCreateTLSKeyring(ctx context.Context) (*raft.TLSKeyring, erro
}
if raftTLSEntry != nil {
- // For Raft storage, the keyring should already be there, but
- // for situations with non-Raft storage and Raft HA, we can ignore this,
- // as it will need to be remade.
- if _, usingRaftStorage := c.underlyingPhysical.(*raft.RaftBackend); usingRaftStorage {
- return nil, fmt.Errorf("TLS keyring already present")
- }
+ return nil, fmt.Errorf("TLS keyring already present")
}
raftTLS, err := raft.GenerateTLSKey(c.secureRandomReader)
diff --git a/vault/request_forwarding.go b/vault/request_forwarding.go
index 51dfa3a9093f..619222c344aa 100644
--- a/vault/request_forwarding.go
+++ b/vault/request_forwarding.go
@@ -22,116 +22,13 @@ import (
"github.com/hashicorp/vault/helper/forwarding"
"github.com/hashicorp/vault/sdk/helper/consts"
"github.com/hashicorp/vault/sdk/logical"
- "github.com/hashicorp/vault/sdk/physical"
"github.com/hashicorp/vault/vault/cluster"
"github.com/hashicorp/vault/vault/replication"
"golang.org/x/net/http2"
"google.golang.org/grpc"
- "google.golang.org/grpc/codes"
"google.golang.org/grpc/keepalive"
- "google.golang.org/grpc/metadata"
- "google.golang.org/grpc/status"
)
-var (
- NotHAMember = "node is not in HA cluster membership"
- StatusNotHAMember = status.Errorf(codes.FailedPrecondition, NotHAMember)
-)
-
-const haNodeIDKey = "ha_node_id"
-
-func haIDFromContext(ctx context.Context) (string, bool) {
- md, ok := metadata.FromIncomingContext(ctx)
- if !ok {
- return "", false
- }
- res := md.Get(haNodeIDKey)
- if len(res) == 0 {
- return "", false
- }
- return res[0], true
-}
-
-// haMembershipServerCheck extracts the client's HA node ID from the context
-// and checks if this client has been removed. The function returns
-// StatusNotHAMember if the client has been removed
-func haMembershipServerCheck(ctx context.Context, c *Core, haBackend physical.RemovableNodeHABackend) error {
- if haBackend == nil {
- return nil
- }
- nodeID, ok := haIDFromContext(ctx)
- if !ok {
- return nil
- }
- removed, err := haBackend.IsNodeRemoved(ctx, nodeID)
- if err != nil {
- c.logger.Error("failed to check if node is removed", "error", err)
- return err
- }
- if removed {
- return StatusNotHAMember
- }
- return nil
-}
-
-func haMembershipUnaryServerInterceptor(c *Core, haBackend physical.RemovableNodeHABackend) grpc.UnaryServerInterceptor {
- return func(ctx context.Context, req any, info *grpc.UnaryServerInfo, handler grpc.UnaryHandler) (resp any, err error) {
- err = haMembershipServerCheck(ctx, c, haBackend)
- if err != nil {
- return nil, err
- }
- return handler(ctx, req)
- }
-}
-
-func haMembershipStreamServerInterceptor(c *Core, haBackend physical.RemovableNodeHABackend) grpc.StreamServerInterceptor {
- return func(srv any, ss grpc.ServerStream, info *grpc.StreamServerInfo, handler grpc.StreamHandler) error {
- err := haMembershipServerCheck(ss.Context(), c, haBackend)
- if err != nil {
- return err
- }
- return handler(srv, ss)
- }
-}
-
-// haMembershipClientCheck checks if the given error from the server
-// is StatusNotHAMember. If so, the client will mark itself as removed
-// and shutdown
-func haMembershipClientCheck(err error, c *Core, haBackend physical.RemovableNodeHABackend) {
- if !errors.Is(err, StatusNotHAMember) {
- return
- }
- removeErr := haBackend.RemoveSelf()
- if removeErr != nil {
- c.logger.Debug("failed to remove self", "error", removeErr)
- }
- go c.ShutdownCoreError(errors.New("node removed from HA configuration"))
-}
-
-func haMembershipUnaryClientInterceptor(c *Core, haBackend physical.RemovableNodeHABackend) grpc.UnaryClientInterceptor {
- return func(ctx context.Context, method string, req, reply any, cc *grpc.ClientConn, invoker grpc.UnaryInvoker, opts ...grpc.CallOption) error {
- if haBackend == nil {
- return invoker(ctx, method, req, reply, cc, opts...)
- }
- ctx = metadata.AppendToOutgoingContext(ctx, haNodeIDKey, haBackend.NodeID())
- err := invoker(ctx, method, req, reply, cc, opts...)
- haMembershipClientCheck(err, c, haBackend)
- return err
- }
-}
-
-func haMembershipStreamClientInterceptor(c *Core, haBackend physical.RemovableNodeHABackend) grpc.StreamClientInterceptor {
- return func(ctx context.Context, desc *grpc.StreamDesc, cc *grpc.ClientConn, method string, streamer grpc.Streamer, opts ...grpc.CallOption) (grpc.ClientStream, error) {
- if haBackend == nil {
- return streamer(ctx, desc, cc, method, opts...)
- }
- ctx = metadata.AppendToOutgoingContext(ctx, haNodeIDKey, haBackend.NodeID())
- stream, err := streamer(ctx, desc, cc, method, opts...)
- haMembershipClientCheck(err, c, haBackend)
- return stream, err
- }
-}
-
type requestForwardingHandler struct {
fws *http2.Server
fwRPCServer *grpc.Server
@@ -150,7 +47,6 @@ type requestForwardingClusterClient struct {
func NewRequestForwardingHandler(c *Core, fws *http2.Server, perfStandbySlots chan struct{}, perfStandbyRepCluster *replication.Cluster) (*requestForwardingHandler, error) {
// Resolve locally to avoid races
ha := c.ha != nil
- removableHABackend := c.getRemovableHABackend()
fwRPCServer := grpc.NewServer(
grpc.KeepaliveParams(keepalive.ServerParameters{
@@ -158,8 +54,6 @@ func NewRequestForwardingHandler(c *Core, fws *http2.Server, perfStandbySlots ch
}),
grpc.MaxRecvMsgSize(math.MaxInt32),
grpc.MaxSendMsgSize(math.MaxInt32),
- grpc.StreamInterceptor(haMembershipStreamServerInterceptor(c, removableHABackend)),
- grpc.UnaryInterceptor(haMembershipUnaryServerInterceptor(c, removableHABackend)),
)
if ha && c.clusterHandler != nil {
@@ -380,8 +274,6 @@ func (c *Core) refreshRequestForwardingConnection(ctx context.Context, clusterAd
core: c,
})
- removableHABackend := c.getRemovableHABackend()
-
// Set up grpc forwarding handling
// It's not really insecure, but we have to dial manually to get the
// ALPN header right. It's just "insecure" because GRPC isn't managing
@@ -393,8 +285,6 @@ func (c *Core) refreshRequestForwardingConnection(ctx context.Context, clusterAd
grpc.WithKeepaliveParams(keepalive.ClientParameters{
Time: 2 * c.clusterHeartbeatInterval,
}),
- grpc.WithStreamInterceptor(haMembershipStreamClientInterceptor(c, removableHABackend)),
- grpc.WithUnaryInterceptor(haMembershipUnaryClientInterceptor(c, removableHABackend)),
grpc.WithDefaultCallOptions(
grpc.MaxCallRecvMsgSize(math.MaxInt32),
grpc.MaxCallSendMsgSize(math.MaxInt32),
@@ -484,10 +374,6 @@ func (c *Core) ForwardRequest(req *http.Request) (int, http.Header, []byte, erro
if err != nil {
metrics.IncrCounter([]string{"ha", "rpc", "client", "forward", "errors"}, 1)
c.logger.Error("error during forwarded RPC request", "error", err)
-
- if errors.Is(err, StatusNotHAMember) {
- return 0, nil, nil, fmt.Errorf("error during forwarding RPC request: %w", err)
- }
return 0, nil, nil, fmt.Errorf("error during forwarding RPC request")
}
diff --git a/vault/request_forwarding_service.pb.go b/vault/request_forwarding_service.pb.go
index f414f016ad90..d787d831b1db 100644
--- a/vault/request_forwarding_service.pb.go
+++ b/vault/request_forwarding_service.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: vault/request_forwarding_service.proto
@@ -56,9 +56,11 @@ type EchoRequest struct {
func (x *EchoRequest) Reset() {
*x = EchoRequest{}
- mi := &file_vault_request_forwarding_service_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_request_forwarding_service_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *EchoRequest) String() string {
@@ -69,7 +71,7 @@ func (*EchoRequest) ProtoMessage() {}
func (x *EchoRequest) ProtoReflect() protoreflect.Message {
mi := &file_vault_request_forwarding_service_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -206,9 +208,11 @@ type EchoReply struct {
func (x *EchoReply) Reset() {
*x = EchoReply{}
- mi := &file_vault_request_forwarding_service_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_request_forwarding_service_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *EchoReply) String() string {
@@ -219,7 +223,7 @@ func (*EchoReply) ProtoMessage() {}
func (x *EchoReply) ProtoReflect() protoreflect.Message {
mi := &file_vault_request_forwarding_service_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -298,9 +302,11 @@ type NodeInformation struct {
func (x *NodeInformation) Reset() {
*x = NodeInformation{}
- mi := &file_vault_request_forwarding_service_proto_msgTypes[2]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_request_forwarding_service_proto_msgTypes[2]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *NodeInformation) String() string {
@@ -311,7 +317,7 @@ func (*NodeInformation) ProtoMessage() {}
func (x *NodeInformation) ProtoReflect() protoreflect.Message {
mi := &file_vault_request_forwarding_service_proto_msgTypes[2]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -381,9 +387,11 @@ type ClientKey struct {
func (x *ClientKey) Reset() {
*x = ClientKey{}
- mi := &file_vault_request_forwarding_service_proto_msgTypes[3]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_request_forwarding_service_proto_msgTypes[3]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *ClientKey) String() string {
@@ -394,7 +402,7 @@ func (*ClientKey) ProtoMessage() {}
func (x *ClientKey) ProtoReflect() protoreflect.Message {
mi := &file_vault_request_forwarding_service_proto_msgTypes[3]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -445,9 +453,11 @@ type PerfStandbyElectionInput struct {
func (x *PerfStandbyElectionInput) Reset() {
*x = PerfStandbyElectionInput{}
- mi := &file_vault_request_forwarding_service_proto_msgTypes[4]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_request_forwarding_service_proto_msgTypes[4]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *PerfStandbyElectionInput) String() string {
@@ -458,7 +468,7 @@ func (*PerfStandbyElectionInput) ProtoMessage() {}
func (x *PerfStandbyElectionInput) ProtoReflect() protoreflect.Message {
mi := &file_vault_request_forwarding_service_proto_msgTypes[4]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -488,9 +498,11 @@ type PerfStandbyElectionResponse struct {
func (x *PerfStandbyElectionResponse) Reset() {
*x = PerfStandbyElectionResponse{}
- mi := &file_vault_request_forwarding_service_proto_msgTypes[5]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_request_forwarding_service_proto_msgTypes[5]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *PerfStandbyElectionResponse) String() string {
@@ -501,7 +513,7 @@ func (*PerfStandbyElectionResponse) ProtoMessage() {}
func (x *PerfStandbyElectionResponse) ProtoReflect() protoreflect.Message {
mi := &file_vault_request_forwarding_service_proto_msgTypes[5]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -737,6 +749,80 @@ func file_vault_request_forwarding_service_proto_init() {
if File_vault_request_forwarding_service_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_vault_request_forwarding_service_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*EchoRequest); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_request_forwarding_service_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*EchoReply); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_request_forwarding_service_proto_msgTypes[2].Exporter = func(v any, i int) any {
+ switch v := v.(*NodeInformation); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_request_forwarding_service_proto_msgTypes[3].Exporter = func(v any, i int) any {
+ switch v := v.(*ClientKey); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_request_forwarding_service_proto_msgTypes[4].Exporter = func(v any, i int) any {
+ switch v := v.(*PerfStandbyElectionInput); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_request_forwarding_service_proto_msgTypes[5].Exporter = func(v any, i int) any {
+ switch v := v.(*PerfStandbyElectionResponse); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/vault/request_forwarding_service_grpc.pb.go b/vault/request_forwarding_service_grpc.pb.go
index 38b053b2b7b4..ff082568ea14 100644
--- a/vault/request_forwarding_service_grpc.pb.go
+++ b/vault/request_forwarding_service_grpc.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go-grpc. DO NOT EDIT.
// versions:
-// - protoc-gen-go-grpc v1.5.1
+// - protoc-gen-go-grpc v1.4.0
// - protoc (unknown)
// source: vault/request_forwarding_service.proto
@@ -19,8 +19,8 @@ import (
// This is a compile-time assertion to ensure that this generated file
// is compatible with the grpc package it is being compiled against.
-// Requires gRPC-Go v1.64.0 or later.
-const _ = grpc.SupportPackageIsVersion9
+// Requires gRPC-Go v1.62.0 or later.
+const _ = grpc.SupportPackageIsVersion8
const (
RequestForwarding_ForwardRequest_FullMethodName = "/vault.RequestForwarding/ForwardRequest"
@@ -34,7 +34,7 @@ const (
type RequestForwardingClient interface {
ForwardRequest(ctx context.Context, in *forwarding.Request, opts ...grpc.CallOption) (*forwarding.Response, error)
Echo(ctx context.Context, in *EchoRequest, opts ...grpc.CallOption) (*EchoReply, error)
- PerformanceStandbyElectionRequest(ctx context.Context, in *PerfStandbyElectionInput, opts ...grpc.CallOption) (grpc.ServerStreamingClient[PerfStandbyElectionResponse], error)
+ PerformanceStandbyElectionRequest(ctx context.Context, in *PerfStandbyElectionInput, opts ...grpc.CallOption) (RequestForwarding_PerformanceStandbyElectionRequestClient, error)
}
type requestForwardingClient struct {
@@ -65,13 +65,13 @@ func (c *requestForwardingClient) Echo(ctx context.Context, in *EchoRequest, opt
return out, nil
}
-func (c *requestForwardingClient) PerformanceStandbyElectionRequest(ctx context.Context, in *PerfStandbyElectionInput, opts ...grpc.CallOption) (grpc.ServerStreamingClient[PerfStandbyElectionResponse], error) {
+func (c *requestForwardingClient) PerformanceStandbyElectionRequest(ctx context.Context, in *PerfStandbyElectionInput, opts ...grpc.CallOption) (RequestForwarding_PerformanceStandbyElectionRequestClient, error) {
cOpts := append([]grpc.CallOption{grpc.StaticMethod()}, opts...)
stream, err := c.cc.NewStream(ctx, &RequestForwarding_ServiceDesc.Streams[0], RequestForwarding_PerformanceStandbyElectionRequest_FullMethodName, cOpts...)
if err != nil {
return nil, err
}
- x := &grpc.GenericClientStream[PerfStandbyElectionInput, PerfStandbyElectionResponse]{ClientStream: stream}
+ x := &requestForwardingPerformanceStandbyElectionRequestClient{ClientStream: stream}
if err := x.ClientStream.SendMsg(in); err != nil {
return nil, err
}
@@ -81,25 +81,36 @@ func (c *requestForwardingClient) PerformanceStandbyElectionRequest(ctx context.
return x, nil
}
-// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name.
-type RequestForwarding_PerformanceStandbyElectionRequestClient = grpc.ServerStreamingClient[PerfStandbyElectionResponse]
+type RequestForwarding_PerformanceStandbyElectionRequestClient interface {
+ Recv() (*PerfStandbyElectionResponse, error)
+ grpc.ClientStream
+}
+
+type requestForwardingPerformanceStandbyElectionRequestClient struct {
+ grpc.ClientStream
+}
+
+func (x *requestForwardingPerformanceStandbyElectionRequestClient) Recv() (*PerfStandbyElectionResponse, error) {
+ m := new(PerfStandbyElectionResponse)
+ if err := x.ClientStream.RecvMsg(m); err != nil {
+ return nil, err
+ }
+ return m, nil
+}
// RequestForwardingServer is the server API for RequestForwarding service.
// All implementations must embed UnimplementedRequestForwardingServer
-// for forward compatibility.
+// for forward compatibility
type RequestForwardingServer interface {
ForwardRequest(context.Context, *forwarding.Request) (*forwarding.Response, error)
Echo(context.Context, *EchoRequest) (*EchoReply, error)
- PerformanceStandbyElectionRequest(*PerfStandbyElectionInput, grpc.ServerStreamingServer[PerfStandbyElectionResponse]) error
+ PerformanceStandbyElectionRequest(*PerfStandbyElectionInput, RequestForwarding_PerformanceStandbyElectionRequestServer) error
mustEmbedUnimplementedRequestForwardingServer()
}
-// UnimplementedRequestForwardingServer must be embedded to have
-// forward compatible implementations.
-//
-// NOTE: this should be embedded by value instead of pointer to avoid a nil
-// pointer dereference when methods are called.
-type UnimplementedRequestForwardingServer struct{}
+// UnimplementedRequestForwardingServer must be embedded to have forward compatible implementations.
+type UnimplementedRequestForwardingServer struct {
+}
func (UnimplementedRequestForwardingServer) ForwardRequest(context.Context, *forwarding.Request) (*forwarding.Response, error) {
return nil, status.Errorf(codes.Unimplemented, "method ForwardRequest not implemented")
@@ -107,11 +118,10 @@ func (UnimplementedRequestForwardingServer) ForwardRequest(context.Context, *for
func (UnimplementedRequestForwardingServer) Echo(context.Context, *EchoRequest) (*EchoReply, error) {
return nil, status.Errorf(codes.Unimplemented, "method Echo not implemented")
}
-func (UnimplementedRequestForwardingServer) PerformanceStandbyElectionRequest(*PerfStandbyElectionInput, grpc.ServerStreamingServer[PerfStandbyElectionResponse]) error {
+func (UnimplementedRequestForwardingServer) PerformanceStandbyElectionRequest(*PerfStandbyElectionInput, RequestForwarding_PerformanceStandbyElectionRequestServer) error {
return status.Errorf(codes.Unimplemented, "method PerformanceStandbyElectionRequest not implemented")
}
func (UnimplementedRequestForwardingServer) mustEmbedUnimplementedRequestForwardingServer() {}
-func (UnimplementedRequestForwardingServer) testEmbeddedByValue() {}
// UnsafeRequestForwardingServer may be embedded to opt out of forward compatibility for this service.
// Use of this interface is not recommended, as added methods to RequestForwardingServer will
@@ -121,13 +131,6 @@ type UnsafeRequestForwardingServer interface {
}
func RegisterRequestForwardingServer(s grpc.ServiceRegistrar, srv RequestForwardingServer) {
- // If the following call pancis, it indicates UnimplementedRequestForwardingServer was
- // embedded by pointer and is nil. This will cause panics if an
- // unimplemented method is ever invoked, so we test this at initialization
- // time to prevent it from happening at runtime later due to I/O.
- if t, ok := srv.(interface{ testEmbeddedByValue() }); ok {
- t.testEmbeddedByValue()
- }
s.RegisterService(&RequestForwarding_ServiceDesc, srv)
}
@@ -172,11 +175,21 @@ func _RequestForwarding_PerformanceStandbyElectionRequest_Handler(srv interface{
if err := stream.RecvMsg(m); err != nil {
return err
}
- return srv.(RequestForwardingServer).PerformanceStandbyElectionRequest(m, &grpc.GenericServerStream[PerfStandbyElectionInput, PerfStandbyElectionResponse]{ServerStream: stream})
+ return srv.(RequestForwardingServer).PerformanceStandbyElectionRequest(m, &requestForwardingPerformanceStandbyElectionRequestServer{ServerStream: stream})
+}
+
+type RequestForwarding_PerformanceStandbyElectionRequestServer interface {
+ Send(*PerfStandbyElectionResponse) error
+ grpc.ServerStream
}
-// This type alias is provided for backwards compatibility with existing code that references the prior non-generic stream type by name.
-type RequestForwarding_PerformanceStandbyElectionRequestServer = grpc.ServerStreamingServer[PerfStandbyElectionResponse]
+type requestForwardingPerformanceStandbyElectionRequestServer struct {
+ grpc.ServerStream
+}
+
+func (x *requestForwardingPerformanceStandbyElectionRequestServer) Send(m *PerfStandbyElectionResponse) error {
+ return x.ServerStream.SendMsg(m)
+}
// RequestForwarding_ServiceDesc is the grpc.ServiceDesc for RequestForwarding service.
// It's only intended for direct use with grpc.RegisterService,
diff --git a/vault/request_forwarding_test.go b/vault/request_forwarding_test.go
deleted file mode 100644
index 9df49b5a99fa..000000000000
--- a/vault/request_forwarding_test.go
+++ /dev/null
@@ -1,131 +0,0 @@
-// Copyright (c) HashiCorp, Inc.
-// SPDX-License-Identifier: BUSL-1.1
-
-package vault
-
-import (
- "context"
- "errors"
- "testing"
-
- "github.com/hashicorp/go-hclog"
- "github.com/hashicorp/vault/sdk/physical"
- "github.com/stretchr/testify/require"
- "google.golang.org/grpc/metadata"
-)
-
-// Test_haIDFromContext verifies that the HA node ID gets correctly extracted
-// from a gRPC context
-func Test_haIDFromContext(t *testing.T) {
- testCases := []struct {
- name string
- md metadata.MD
- wantID string
- wantOk bool
- }{
- {
- name: "no ID",
- md: metadata.MD{},
- wantID: "",
- wantOk: false,
- },
- {
- name: "with ID",
- md: metadata.MD{haNodeIDKey: {"node_id"}},
- wantID: "node_id",
- wantOk: true,
- },
- {
- name: "with empty string ID",
- md: metadata.MD{haNodeIDKey: {""}},
- wantID: "",
- wantOk: true,
- },
- {
- name: "with empty ID",
- md: metadata.MD{haNodeIDKey: {}},
- wantID: "",
- wantOk: false,
- },
-
- {
- name: "with multiple IDs",
- md: metadata.MD{haNodeIDKey: {"1", "2"}},
- wantID: "1",
- wantOk: true,
- },
- }
- for _, tc := range testCases {
- t.Run(tc.name, func(t *testing.T) {
- ctx := metadata.NewIncomingContext(context.Background(), tc.md)
- id, ok := haIDFromContext(ctx)
- require.Equal(t, tc.wantID, id)
- require.Equal(t, tc.wantOk, ok)
- })
- }
-}
-
-type mockHARemovableNodeBackend struct {
- physical.RemovableNodeHABackend
- isRemoved func(context.Context, string) (bool, error)
-}
-
-func (m *mockHARemovableNodeBackend) IsNodeRemoved(ctx context.Context, nodeID string) (bool, error) {
- return m.isRemoved(ctx, nodeID)
-}
-
-func newMockHARemovableNodeBackend(isRemoved func(context.Context, string) (bool, error)) physical.RemovableNodeHABackend {
- return &mockHARemovableNodeBackend{isRemoved: isRemoved}
-}
-
-// Test_haMembershipServerCheck verifies that the correct error is returned
-// when the context contains a removed node ID
-func Test_haMembershipServerCheck(t *testing.T) {
- nodeIDCtx := metadata.NewIncomingContext(context.Background(), metadata.MD{haNodeIDKey: {"node_id"}})
- otherErr := errors.New("error checking")
- testCases := []struct {
- name string
- nodeIDCtx context.Context
- haBackend physical.RemovableNodeHABackend
- wantError error
- }{
- {
- name: "nil backend",
- haBackend: nil,
- nodeIDCtx: nodeIDCtx,
- }, {
- name: "no node ID context",
- haBackend: newMockHARemovableNodeBackend(func(ctx context.Context, s string) (bool, error) {
- return false, nil
- }),
- nodeIDCtx: context.Background(),
- }, {
- name: "node removed",
- haBackend: newMockHARemovableNodeBackend(func(ctx context.Context, s string) (bool, error) {
- return true, nil
- }),
- nodeIDCtx: nodeIDCtx,
- wantError: StatusNotHAMember,
- }, {
- name: "node removed err",
- haBackend: newMockHARemovableNodeBackend(func(ctx context.Context, s string) (bool, error) {
- return false, otherErr
- }),
- nodeIDCtx: nodeIDCtx,
- wantError: otherErr,
- },
- }
- for _, tc := range testCases {
- t.Run(tc.name, func(t *testing.T) {
- c := &Core{
- logger: hclog.NewNullLogger(),
- }
- err := haMembershipServerCheck(tc.nodeIDCtx, c, tc.haBackend)
- if tc.wantError != nil {
- require.EqualError(t, err, tc.wantError.Error())
- } else {
- require.NoError(t, err)
- }
- })
- }
-}
diff --git a/vault/seal/multi_wrap_value.pb.go b/vault/seal/multi_wrap_value.pb.go
index 9a2d943e6ef7..d831f4f5ebde 100644
--- a/vault/seal/multi_wrap_value.pb.go
+++ b/vault/seal/multi_wrap_value.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: vault/seal/multi_wrap_value.proto
@@ -38,9 +38,11 @@ type MultiWrapValue struct {
func (x *MultiWrapValue) Reset() {
*x = MultiWrapValue{}
- mi := &file_vault_seal_multi_wrap_value_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_seal_multi_wrap_value_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *MultiWrapValue) String() string {
@@ -51,7 +53,7 @@ func (*MultiWrapValue) ProtoMessage() {}
func (x *MultiWrapValue) ProtoReflect() protoreflect.Message {
mi := &file_vault_seal_multi_wrap_value_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -133,6 +135,20 @@ func file_vault_seal_multi_wrap_value_proto_init() {
if File_vault_seal_multi_wrap_value_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_vault_seal_multi_wrap_value_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*MultiWrapValue); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/vault/testing.go b/vault/testing.go
index e6f1f26fcf31..036087032383 100644
--- a/vault/testing.go
+++ b/vault/testing.go
@@ -31,6 +31,7 @@ import (
"time"
"github.com/armon/go-metrics"
+ "github.com/golang/protobuf/ptypes"
"github.com/hashicorp/go-cleanhttp"
log "github.com/hashicorp/go-hclog"
"github.com/hashicorp/go-secure-stdlib/reloadutil"
@@ -59,7 +60,6 @@ import (
"github.com/mitchellh/copystructure"
"golang.org/x/crypto/ed25519"
"golang.org/x/net/http2"
- protoreflect "google.golang.org/protobuf/reflect/protoreflect"
"google.golang.org/protobuf/types/known/anypb"
)
@@ -2215,29 +2215,17 @@ var (
_ testcluster.VaultClusterNode = &TestClusterCore{}
)
-func TestUserpassMount(c *Core, local bool) (*MountEntry, error) {
- name := "userpass"
- if local {
- name += "-local"
- }
+// TestCreateDuplicateEntityAliasesInStorage creates n entities with a duplicate alias in storage
+// This should only be used in testing
+func TestCreateDuplicateEntityAliasesInStorage(ctx context.Context, c *Core, n int) ([]string, error) {
userpassMe := &MountEntry{
Table: credentialTableType,
- Path: name + "/",
+ Path: "userpass/",
Type: "userpass",
- Description: name,
- Accessor: name,
- Local: local,
- }
- if err := c.enableCredential(namespace.RootContext(nil), userpassMe); err != nil {
- return nil, err
+ Description: "userpass",
+ Accessor: "userpass1",
}
- return userpassMe, nil
-}
-
-// TestCreateDuplicateEntityAliasesInStorage creates n entities with a duplicate alias in storage
-// This should only be used in testing
-func TestCreateDuplicateEntityAliasesInStorage(ctx context.Context, c *Core, n int) ([]string, error) {
- userpassMe, err := TestUserpassMount(c, false)
+ err := c.enableCredential(namespace.RootContext(nil), userpassMe)
if err != nil {
return nil, err
}
@@ -2262,7 +2250,16 @@ func TestCreateDuplicateEntityAliasesInStorage(ctx context.Context, c *Core, n i
NamespaceID: namespace.RootNamespaceID,
BucketKey: c.identityStore.entityPacker.BucketKey(entityID),
}
- if err := TestHelperWriteToStoragePacker(ctx, c.identityStore.entityPacker, e.ID, e); err != nil {
+
+ entity, err := ptypes.MarshalAny(e)
+ if err != nil {
+ return nil, err
+ }
+ item := &storagepacker.Item{
+ ID: e.ID,
+ Message: entity,
+ }
+ if err = c.identityStore.entityPacker.PutItem(ctx, item); err != nil {
return nil, err
}
}
@@ -2270,22 +2267,6 @@ func TestCreateDuplicateEntityAliasesInStorage(ctx context.Context, c *Core, n i
return entityIDs, nil
}
-// TestHelperWriteToStoragePacker takes care of boiler place to insert into a
-// storage packer. Just provide the raw protobuf object e.g. &identity.Entity{}
-// and it is wrapped and inserted for you. You still need to populate BucketKey
-// in the object if applicable before passing it.
-func TestHelperWriteToStoragePacker(ctx context.Context, p *storagepacker.StoragePacker, id string, m protoreflect.ProtoMessage) error {
- a, err := anypb.New(m)
- if err != nil {
- return err
- }
- i := &storagepacker.Item{
- ID: id,
- Message: a,
- }
- return p.PutItem(context.Background(), i)
-}
-
// TestCreateStorageGroup creates a group in storage only to bypass checks that the entities exist in memdb
// Should only be used in testing
func TestCreateStorageGroup(ctx context.Context, c *Core, entityIDs []string) error {
@@ -2315,44 +2296,3 @@ func TestCreateStorageGroup(ctx context.Context, c *Core, entityIDs []string) er
}
return nil
}
-
-// Mock HABackend is a non-functional HABackend for testing purposes
-type MockHABackend struct {
- physical.HABackend
- physical.Backend
-}
-
-func (m *MockHABackend) HAEnabled() bool {
- return true
-}
-
-// MockRemovableNodeHABackend is a barely functional RemovableNodeHABackend for testing purposes.
-// It has a functional IsRemoved method and an exported Removed field so that the desired state can be easily set.
-type MockRemovableNodeHABackend struct {
- physical.RemovableNodeHABackend
- physical.Backend
- Removed bool
-}
-
-func (m *MockRemovableNodeHABackend) HAEnabled() bool {
- return true
-}
-
-func (m *MockRemovableNodeHABackend) IsRemoved() bool {
- return m.Removed
-}
-
-func TestCoreWithMockRemovableNodeHABackend(t *testing.T, removed bool) (*Core, error) {
- t.Helper()
- logger := corehelpers.NewTestLogger(t)
- inmha, err := physInmem.NewInmemHA(nil, logger)
- if err != nil {
- t.Fatal(err)
- }
- conf := testCoreConfig(t, inmha, logger)
- mockHABackend := &MockRemovableNodeHABackend{Removed: removed}
- conf.HAPhysical = mockHABackend
- conf.RedirectAddr = "http://127.0.0.1:8200"
-
- return NewCore(conf)
-}
diff --git a/vault/tokens/token.pb.go b/vault/tokens/token.pb.go
index 95b13f57c3bd..30308caf4c90 100644
--- a/vault/tokens/token.pb.go
+++ b/vault/tokens/token.pb.go
@@ -3,7 +3,7 @@
// Code generated by protoc-gen-go. DO NOT EDIT.
// versions:
-// protoc-gen-go v1.35.2
+// protoc-gen-go v1.34.2
// protoc (unknown)
// source: vault/tokens/token.proto
@@ -36,9 +36,11 @@ type SignedToken struct {
func (x *SignedToken) Reset() {
*x = SignedToken{}
- mi := &file_vault_tokens_token_proto_msgTypes[0]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_tokens_token_proto_msgTypes[0]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *SignedToken) String() string {
@@ -49,7 +51,7 @@ func (*SignedToken) ProtoMessage() {}
func (x *SignedToken) ProtoReflect() protoreflect.Message {
mi := &file_vault_tokens_token_proto_msgTypes[0]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -97,9 +99,11 @@ type Token struct {
func (x *Token) Reset() {
*x = Token{}
- mi := &file_vault_tokens_token_proto_msgTypes[1]
- ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
- ms.StoreMessageInfo(mi)
+ if protoimpl.UnsafeEnabled {
+ mi := &file_vault_tokens_token_proto_msgTypes[1]
+ ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
+ ms.StoreMessageInfo(mi)
+ }
}
func (x *Token) String() string {
@@ -110,7 +114,7 @@ func (*Token) ProtoMessage() {}
func (x *Token) ProtoReflect() protoreflect.Message {
mi := &file_vault_tokens_token_proto_msgTypes[1]
- if x != nil {
+ if protoimpl.UnsafeEnabled && x != nil {
ms := protoimpl.X.MessageStateOf(protoimpl.Pointer(x))
if ms.LoadMessageInfo() == nil {
ms.StoreMessageInfo(mi)
@@ -199,6 +203,32 @@ func file_vault_tokens_token_proto_init() {
if File_vault_tokens_token_proto != nil {
return
}
+ if !protoimpl.UnsafeEnabled {
+ file_vault_tokens_token_proto_msgTypes[0].Exporter = func(v any, i int) any {
+ switch v := v.(*SignedToken); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ file_vault_tokens_token_proto_msgTypes[1].Exporter = func(v any, i int) any {
+ switch v := v.(*Token); i {
+ case 0:
+ return &v.state
+ case 1:
+ return &v.sizeCache
+ case 2:
+ return &v.unknownFields
+ default:
+ return nil
+ }
+ }
+ }
type x struct{}
out := protoimpl.TypeBuilder{
File: protoimpl.DescBuilder{
diff --git a/version/VERSION b/version/VERSION
index a90d62be3124..753029c3a5ec 100644
--- a/version/VERSION
+++ b/version/VERSION
@@ -1 +1 @@
-1.19.0-beta1
+1.18.2
\ No newline at end of file
diff --git a/website/content/api-docs/auth/cert.mdx b/website/content/api-docs/auth/cert.mdx
index 25deca159e15..a838e27e63e5 100644
--- a/website/content/api-docs/auth/cert.mdx
+++ b/website/content/api-docs/auth/cert.mdx
@@ -69,7 +69,7 @@ Sets a CA cert and associated parameters in a role name.
by a dash (-) instead of a dot (.) to allow usage in ACL templates.
- `ocsp_enabled` `(bool: false)` - If enabled, validate certificates' revocation
status using OCSP.
-- `ocsp_ca_certificates` `(string: "")` Any additional OCSP responder certificates needed to
+- `ocsp_ca_certificates` `(string: "")` Any additional CA certificates needed to
verify OCSP responses. Provided as base64 encoded PEM data.
- `ocsp_servers_override` `(array: [])`: A comma-separated list of OCSP server
addresses. If unset, the OCSP server is determined from the AuthorityInformationAccess
diff --git a/website/content/api-docs/secret/pki/index.mdx b/website/content/api-docs/secret/pki/index.mdx
index 198c4eda0ab0..76ca0bec63fd 100644
--- a/website/content/api-docs/secret/pki/index.mdx
+++ b/website/content/api-docs/secret/pki/index.mdx
@@ -87,9 +87,6 @@ update your API calls accordingly.
- [Tidy Status](#tidy-status)
- [Cancel Tidy](#cancel-tidy)
- [Certificate Issuance Protocols](/vault/api-docs/secret/pki/issuance)
- - [ACME - Automatic Certificate Management Environment](/vault/api-docs/secret/pki/issuance#acme-certificate-issuance)
- - [EST - Enrollment over Secure Transport ](/vault/api-docs/secret/pki/issuance#est-certificate-issuance)
- - [CMPv2 - Certificate Management Protocol (v2) ](/vault/api-docs/secret/pki/issuance#cmpv2-certificate-issuance)
- [Cluster Scalability](#cluster-scalability)
- [Managed Key](#managed-keys) (Enterprise Only)
- [Vault CLI with DER/PEM responses](#vault-cli-with-der-pem-responses)
@@ -3937,8 +3934,7 @@ $ curl \
"delta_rebuild_interval": "15m",
"cross_cluster_revocation": true,
"unified_crl": true,
- "unified_crl_on_existing_paths": true,
- "max_crl_entries": 100000
+ "unified_crl_on_existing_paths": true
},
"auth": null
}
@@ -4047,11 +4043,6 @@ the CRL.
without having to re-issue certificates or update scripts pulling
a single CRL.
-- `max_crl_entries` `(int: 100000)` -
- The maximum number of entries a CRL can contain. This option exists to
- prevent accidental runaway issuance/revocation from overloading Vault.
- If set to -1, the limit is disabled.
-
#### Sample payload
```json
@@ -4067,7 +4058,6 @@ the CRL.
"cross_cluster_revocation": true,
"unified_crl": true,
"unified_crl_on_existing_paths": true,
- "max_crl_entries": 100000
}
```
diff --git a/website/content/api-docs/secret/transit.mdx b/website/content/api-docs/secret/transit.mdx
index 140cad0d72c6..dbcafe5bd92c 100644
--- a/website/content/api-docs/secret/transit.mdx
+++ b/website/content/api-docs/secret/transit.mdx
@@ -797,16 +797,6 @@ will be returned.
data (also known as additional data or AAD) to also be authenticated with
AEAD ciphers (`aes128-gcm96`, `aes256-gcm`, and `chacha20-poly1305`).
-- `padding_scheme` `(string: "oaep")` – Specifies the RSA encryption padding
- scheme for RSA keys. Must be one of the following supported signature types:
-
- - `oaep`
- - `pkcs1v15`
-
- ~> **Warning:** `pkcs1v15` is a legacy padding scheme with security weaknesses.
- It is recommended that the default of OAEP be used unless specific backwards
- compatibility is required.
-
- `context` `(string: "")` – Specifies the **base64 encoded** context for key
derivation. This is required if key derivation is enabled for this key.
@@ -932,12 +922,6 @@ This endpoint decrypts the provided ciphertext using the named key.
data (also known as additional data or AAD) to also be authenticated with
AEAD ciphers (`aes128-gcm96`, `aes256-gcm`, and `chacha20-poly1305`).
-- `padding_scheme` `(string: "oaep")` – Specifies the RSA decryption padding
- scheme for RSA keys. Must be one of the following supported signature types:
-
- - `oaep`
- - `pkcs1v15`
-
- `context` `(string: "")` – Specifies the **base64 encoded** context for key
derivation. This is required if key derivation is enabled.
@@ -1024,22 +1008,6 @@ functionality to untrusted users or scripts.
- `ciphertext` `(string: )` – Specifies the ciphertext to re-encrypt.
-- `decrypt_padding_scheme` `(string: "oaep")` – Specifies the RSA padding
- scheme for RSA keys for the decrypt step. Must be one of the following supported signature types:
-
- - `oaep`
- - `pkcs1v15`
-
-- `encrypt_padding_scheme` `(string: "oaep")` – Specifies the RSA padding
- scheme for RSA keys for the encrypt step. Must be one of the following supported signature types:
-
- - `oaep`
- - `pkcs1v15`
-
- ~> **Warning:** `pkcs1v15` is a legacy padding scheme with security weaknesses.
- It is recommended that the default of OAEP be used unless specific backwards
- compatibility is required.
-
- `context` `(string: "")` – Specifies the **base64 encoded** context for key
derivation. This is required if key derivation is enabled.
@@ -1141,16 +1109,6 @@ then made available to trusted users.
- `bits` `(int: 256)` – Specifies the number of bits in the desired key. Can be
128, 256, or 512.
-- `padding_scheme` `(string: "oaep")` – Specifies the RSA encryption padding
- scheme for RSA keys. Must be one of the following supported signature types:
-
- - `oaep`
- - `pkcs1v15`
-
- ~> **Warning:** `pkcs1v15` is a legacy padding scheme with security weaknesses.
- It is recommended that the default of OAEP be used unless specific backwards
- compatibility is required.
-
### Sample payload
```json
@@ -1480,9 +1438,6 @@ supports signing.
signature rather than the `PKCSv1_5_DERnull` signature type usually
created. See [RFC 3447 Section 9.2](https://www.rfc-editor.org/rfc/rfc3447#section-9.2).
- ~> **Note**: using `hash_algorithm=sha2-512` requires setting `prehashed=true`
- for Ed25519 backed keys which enabled Ed25519ph signature support on Enterprise.
-
- `input` `(string: "")` – Specifies the **base64 encoded** input data. One of
`input` or `batch_input` must be supplied.
@@ -1519,9 +1474,6 @@ supports signing.
Required if key derivation is enabled; currently only available with ed25519
keys.
-- `signature_context` `(string: "")` - Base64
- encoded context for Ed25519ctx and Ed25519ph signatures.
-
- `prehashed` `(bool: false)` - Set to `true` when the input is already hashed.
If the key type is `rsa-2048`, `rsa-3072` or `rsa-4096`, then the algorithm used to hash
the input should be indicated by the `hash_algorithm` parameter. Just as the
@@ -1529,9 +1481,6 @@ supports signing.
data you want signed, when set, `input` is expected to be base64-encoded
binary hashed data, not hex-formatted. (As an example, on the command line,
you could generate a suitable input via `openssl dgst -sha256 -binary | base64`.)
- On Enterprise , enabling this along with
- hash_algorithm being set to `sha2-512` will activate Ed25519ph signatures for
- Ed25519 keys
- `signature_algorithm` `(string: "pss")` – When using a RSA key, specifies the RSA
signature algorithm to use for signing. Supported signature types are:
@@ -1672,9 +1621,6 @@ or [generate CMAC](#generate-cmac) API calls.
signature rather than the `PKCSv1_5_DERnull` signature type usually
verified. See [RFC 3447 Section 9.2](https://www.rfc-editor.org/rfc/rfc3447#section-9.2).
- ~> **Note**: using `hash_algorithm=sha2-512` requires setting `prehashed=true`
- for Ed25519 backed keys which enabled Ed25519ph signature support on Enterprise.
-
- `input` `(string: "")` – Specifies the **base64 encoded** input data. One of
`input` or `batch_input` must be supplied.
@@ -1726,19 +1672,9 @@ or [generate CMAC](#generate-cmac) API calls.
Required if key derivation is enabled; currently only available with ed25519
keys.
-- `signature_context` `(string: "")` - Base64
- encoded context for Ed25519ctx and Ed25519ph signatures.
-
-- `prehashed` `(bool: false)` - Set to `true` when the input is already hashed.
- If the key type is `rsa-2048`, `rsa-3072` or `rsa-4096`, then the algorithm used to hash
- the input should be indicated by the `hash_algorithm` parameter. Just as the
- value to sign should be the base64-encoded representation of the exact binary
- data you want signed, when set, `input` is expected to be base64-encoded
- binary hashed data, not hex-formatted. (As an example, on the command line,
- you could generate a suitable input via `openssl dgst -sha256 -binary | base64`.)
- On Enterprise , enabling this along with
- hash_algorithm being set to `sha2-512` will activate Ed25519ph signatures for
- Ed25519 keys
+- `prehashed` `(bool: false)` - Set to `true` when the input is already
+ hashed. If the key type is `rsa-2048`, `rsa-3072` or `rsa-4096`, then the algorithm used
+ to hash the input should be indicated by the `hash_algorithm` parameter.
- `signature_algorithm` `(string: "pss")` – When using a RSA key, specifies the RSA
signature algorithm to use for signature verification. Supported signature types
diff --git a/website/content/api-docs/system/seal-status.mdx b/website/content/api-docs/system/seal-status.mdx
index 2db53955a97f..feb69e68e3cf 100644
--- a/website/content/api-docs/system/seal-status.mdx
+++ b/website/content/api-docs/system/seal-status.mdx
@@ -30,19 +30,18 @@ The "t" parameter is the threshold, and "n" is the number of shares.
```json
{
- "build_date": "2024-11-15T14:17:42Z",
+ "type": "shamir",
"initialized": true,
- "migration": false,
- "n": 3,
+ "sealed": true,
+ "t": 3,
+ "n": 5,
+ "progress": 2,
"nonce": "",
- "progress": 1,
+ "version": "1.11.0",
+ "build_date": "2022-05-03T08:34:11Z",
+ "migration": false,
"recovery_seal": false,
- "removed_from_cluster": false,
- "sealed": true,
- "storage_type": "raft",
- "t": 2,
- "type": "shamir",
- "version": "1.19.0-beta1"
+ "storage_type": "file"
}
```
@@ -50,20 +49,19 @@ Sample response when Vault is unsealed.
```json
{
- "build_date": "2024-11-14T18:11:15Z",
- "cluster_id": "ebdd80fb-0c7f-bce9-f9b9-a0fa86aa3249",
- "cluster_name": "vault-cluster-f090409a",
+ "type": "shamir",
"initialized": true,
- "migration": false,
- "n": 3,
- "nonce": "",
+ "sealed": false,
+ "t": 3,
+ "n": 5,
"progress": 0,
+ "nonce": "",
+ "version": "1.11.0",
+ "build_date": "2022-05-03T08:34:11Z",
+ "migration": false,
+ "cluster_name": "vault-cluster-336172e1",
+ "cluster_id": "f94053ad-d80e-4270-2006-2efd67d0910a",
"recovery_seal": false,
- "removed_from_cluster": false,
- "sealed": false,
- "storage_type": "raft",
- "t": 2,
- "type": "shamir",
- "version": "1.19.0-beta1"
+ "storage_type": "file"
}
```
diff --git a/website/content/api-docs/system/storage/raftautopilot.mdx b/website/content/api-docs/system/storage/raftautopilot.mdx
index ea02f25539cb..19b453b8144d 100644
--- a/website/content/api-docs/system/storage/raftautopilot.mdx
+++ b/website/content/api-docs/system/storage/raftautopilot.mdx
@@ -35,69 +35,54 @@ $ curl \
```json
{
- "failure_tolerance": 1,
"healthy": true,
- "leader": "vault_1",
+ "failure_tolerance": 1,
"servers": {
- "vault_1": {
+ "raft1": {
+ "id": "raft1",
+ "name": "raft1",
"address": "127.0.0.1:8201",
- "healthy": true,
- "id": "vault_1",
+ "node_status": "alive",
"last_contact": "0s",
- "last_index": 63,
"last_term": 3,
- "name": "vault_1",
- "node_status": "alive",
- "node_type": "voter",
- "stable_since": "2024-08-29T16:02:45.639829+02:00",
+ "last_index": 459,
+ "healthy": true,
+ "stable_since": "2021-03-19T20:14:11.831678-04:00",
"status": "leader",
- "version": "1.17.3"
+ "meta": null
},
- "vault_2": {
- "address": "127.0.0.1:8203",
- "healthy": true,
- "id": "vault_2",
- "last_contact": "678.62575ms",
- "last_index": 63,
- "last_term": 3,
- "name": "vault_2",
+ "raft2": {
+ "id": "raft2",
+ "name": "raft2",
+ "address": "127.0.0.2:8201",
"node_status": "alive",
- "node_type": "voter",
- "stable_since": "2024-08-29T16:02:47.640976+02:00",
+ "last_contact": "516.49595ms",
+ "last_term": 3,
+ "last_index": 459,
+ "healthy": true,
+ "stable_since": "2021-03-19T20:14:19.831931-04:00",
"status": "voter",
- "version": "1.17.3"
+ "meta": null
},
- "vault_3": {
- "address": "127.0.0.1:8205",
- "healthy": true,
- "id": "vault_3",
- "last_contact": "3.969159375s",
- "last_index": 63,
- "last_term": 3,
- "name": "vault_3",
+ "raft3": {
+ "id": "raft3",
+ "name": "raft3",
+ "address": "127.0.0.3:8201",
"node_status": "alive",
- "node_type": "voter",
- "stable_since": "2024-08-29T16:02:49.640905+02:00",
+ "last_contact": "196.706591ms",
+ "last_term": 3,
+ "last_index": 459,
+ "healthy": true,
+ "stable_since": "2021-03-19T20:14:25.83565-04:00",
"status": "voter",
- "version": "1.17.3"
+ "meta": null
}
},
- "voters": [
- "vault_1",
- "vault_2",
- "vault_3"
- ]
+ "leader": "raft1",
+ "voters": ["raft1", "raft2", "raft3"],
+ "non_voters": null
}
```
-The `failure_tolerance` of a cluster is the number of nodes in the cluster that could
-fail gradually without causing an outage.
-
-When verifying the health of your cluster, check the following fields of each server:
-- `healthy`: whether Autopilot considers this node healthy or not
-- `status`: the voting status of the node. This will be `voter`, `leader`, or [`non-voter`](/vault/docs/concepts/integrated-storage#non-voting-nodes-enterprise-only)")
-- `last_index`: the index of the last applied Raft log. This should be close to the `last_index` value of the leader.
-- `version`: the version of Vault running on the server
-- `node_type`: the type of node. On CE, this will always be `voter`. See below for an explanation of Enterprise node types.
### Enterprise only
Vault Enterprise will include additional output in its API response to indicate the current state of redundancy zones,
@@ -164,7 +149,7 @@ automated upgrade progress (if any), and optimistic failure tolerance.
}
},
"status": "await-new-voters",
- "target_version": "1.17.5",
+ "target_version": "1.12.0",
"target_version_non_voters": [
"vault_5"
]
@@ -176,11 +161,6 @@ automated upgrade progress (if any), and optimistic failure tolerance.
}
```
-`optimistic_failure_tolerance` describes the number of healthy active and
-back-up voting servers that can fail gradually without causing an outage.
-
-@include 'autopilot/node-types.mdx'
-
## Get configuration
This endpoint is used to get the configuration of the autopilot subsystem of Integrated Storage.
@@ -223,7 +203,31 @@ This endpoint is used to modify the configuration of the autopilot subsystem of
### Parameters
-@include 'autopilot/config.mdx'
+- `cleanup_dead_servers` `(bool: false)` - Controls whether to remove dead servers from
+ the Raft peer list periodically or when a new server joins. This requires that
+ `min_quorum` is also set.
+
+- `last_contact_threshold` `(string: "10s")` - Limit on the amount of time a server can
+ go without leader contact before being considered unhealthy.
+
+- `dead_server_last_contact_threshold` `(string: "24h")` - Limit on the amount of time
+ a server can go without leader contact before being considered failed. This
+ takes effect only when `cleanup_dead_servers` is `true`. This can not be set to a value
+ smaller than 1m. **We strongly recommend that this is kept at a high duration, such as a day,
+ as it being too low could result in removal of nodes that aren't actually dead.**
+
+- `max_trailing_logs` `(int: 1000)` - Amount of entries in the Raft Log that a server
+ can be behind before being considered unhealthy.
+
+- `min_quorum` `(int: 3)` - Minimum number of servers allowed in a cluster before
+ autopilot can prune dead servers. This should at least be 3. Applicable only for
+ voting nodes.
+
+- `server_stabilization_time` `(string: "10s")` - Minimum amount of time a server must
+ be in a stable, healthy state before it can be added to the cluster.
+
+- `disable_upgrade_migration` `(bool: false)` - Disables automatically upgrading Vault using
+ autopilot. (Enterprise-only)
### Sample request
diff --git a/website/content/docs/agent-and-proxy/autoauth/index.mdx b/website/content/docs/agent-and-proxy/autoauth/index.mdx
index 442f140176a7..3bee8a43df31 100644
--- a/website/content/docs/agent-and-proxy/autoauth/index.mdx
+++ b/website/content/docs/agent-and-proxy/autoauth/index.mdx
@@ -105,10 +105,6 @@ The top level `auto_auth` block has two configuration entries:
- `sinks` `(array of objects: optional)` - Configuration for the sinks
-- `enable_reauth_on_new_credentials` `(bool: false)` - If enabled, Auto-auth will
- handle new credential events from supported auth methods (AliCloud/AWS/Cert/JWT/LDAP/OCI)
- and re-authenticate with the new credential.
-
### Configuration (Method)
~> Auto-auth does not support using tokens with a limited number of uses. Auto-auth
diff --git a/website/content/docs/agent-and-proxy/autoauth/methods/cert.mdx b/website/content/docs/agent-and-proxy/autoauth/methods/cert.mdx
index 41740cd544aa..8a04bac8b5ff 100644
--- a/website/content/docs/agent-and-proxy/autoauth/methods/cert.mdx
+++ b/website/content/docs/agent-and-proxy/autoauth/methods/cert.mdx
@@ -31,14 +31,5 @@ config stanza, Agent and Proxy will fall back to using TLS settings from their r
- `client_key` `(string: optional)` - Path on the local disk to a single
PEM-encoded private key matching the client certificate from client_cert.
-- `reload` `(bool: optional, default: false)` - If true, causes the local x509
- key-pair to be reloaded from disk on each authentication attempt. This is useful
- in situations where client certificates are short-lived and automatically renewed.
- Note that `enable_reauth_on_new_credentials` for auto-auth will need to be additionally
- enabled for immediate re-auth on a new certificate.
- See [Auto-Auth Configuration](/vault/docs/agent-and-proxy/autoauth#configuration).
-
-- `reload_period` `(duration: "1m", optional)` - The duration after which auto-auth
- will check if there are any changes for the files that are configured through
- `ca_cert`/`client_cert`/`client_key`. Defaults to `1m`.
- Uses [duration format strings](/vault/docs/concepts/duration-format).
+- `reload` `(bool: optional, default: false)` - If true, causes the local x509 key-pair to be reloaded from disk on each authentication attempt.
+ This is useful in situations where client certificates are short-lived and automatically renewed.
diff --git a/website/content/docs/audit/index.mdx b/website/content/docs/audit/index.mdx
index a3abf8ffd74a..3908ac40e7eb 100644
--- a/website/content/docs/audit/index.mdx
+++ b/website/content/docs/audit/index.mdx
@@ -121,10 +121,6 @@ curl \
--data '{ "hmac": true }'
```
-Another way to identify the source of a request is through the User-Agent request header.
-Vault will automatically record this value as `user-agent` within the `headers` of a
-request entry within the audit log.
-
## Enabling/Disabling audit devices
diff --git a/website/content/docs/commands/auth/move.mdx b/website/content/docs/commands/auth/move.mdx
index 6961e03e05ac..cb5e3a5000d1 100644
--- a/website/content/docs/commands/auth/move.mdx
+++ b/website/content/docs/commands/auth/move.mdx
@@ -28,21 +28,7 @@ Move the existing auth method at ns1/approle/ to ns2/new-approle/:
$ vault auth move ns1/auth/approle/ ns2/auth/new-approle/
```
-Move the existing auth method `auth/userpass` to the `education/certification/approle` namespace.
-
-```shell-session
-$ vault auth move auth/userpass education/certification/auth/userpass
-```
-
## Usage
There are no flags beyond the [standard set of flags](/vault/docs/commands)
included on all commands.
-
-## Post-move considerations
-
-Each namespace has its own policies, auth methods, secrets engines, tokens,
-identity entities and groups. You must consider the following after moving a mount across namespaces:
-
-- Necessary policies exist in the target namespace
-- Entities and groups might need updates after an auth method move
diff --git a/website/content/docs/commands/operator/raft.mdx b/website/content/docs/commands/operator/raft.mdx
index 59fa0ead31e6..9b7f91da94ef 100644
--- a/website/content/docs/commands/operator/raft.mdx
+++ b/website/content/docs/commands/operator/raft.mdx
@@ -128,13 +128,6 @@ Usage: vault operator raft list-peers
}
```
-Use the output of `list-peers` to ensure that your cluster is in an expected state.
-If you've removed a server using `remove-peer`, the server should no longer be
-listed in the `list-peers` output. If you've added a server using `add-peer` or
-through `retry_join`, check the `list-peers` output to see that it has been added
-to the cluster and (if the node has not been added as a non-voter)
-it has been promoted to a voter.
-
## remove-peer
This command is used to remove a node from being a peer to the Raft cluster. In
@@ -236,9 +229,14 @@ Subcommands:
### autopilot state
Displays the state of the raft cluster under integrated storage as seen by
-autopilot. It shows whether autopilot thinks the cluster is healthy or not.
+autopilot. It shows whether autopilot thinks the cluster is healthy or not,
+and how many nodes could fail before the cluster becomes unhealthy ("Failure Tolerance").
+
+State includes a list of all servers by nodeID and IP address. Last Index
+indicates how close the state on each node is to the leader's.
-State includes a list of all servers by nodeID and IP address.
+A node can have a status of "leader", "voter", and
+"[non-voter](/vault/docs/concepts/integrated-storage#non-voting-nodes-enterprise-only)".
```text
Usage: vault operator raft autopilot state
@@ -251,60 +249,34 @@ Usage: vault operator raft autopilot state
#### Example output
```text
-Healthy: true
-Failure Tolerance: 1
-Leader: vault_1
+Healthy: true
+Failure Tolerance: 1
+Leader: raft1
Voters:
- vault_1
- vault_2
- vault_3
+ raft1
+ raft2
+ raft3
Servers:
- vault_1
- Name: vault_1
- Address: 127.0.0.1:8201
- Status: leader
- Node Status: alive
- Healthy: true
- Last Contact: 0s
- Last Term: 3
- Last Index: 61
- Version: 1.17.3
- Node Type: voter
- vault_2
- Name: vault_2
- Address: 127.0.0.1:8203
- Status: voter
- Node Status: alive
- Healthy: true
- Last Contact: 564.765375ms
- Last Term: 3
- Last Index: 61
- Version: 1.17.3
- Node Type: voter
- vault_3
- Name: vault_3
- Address: 127.0.0.1:8205
- Status: voter
- Node Status: alive
- Healthy: true
- Last Contact: 3.814017875s
- Last Term: 3
- Last Index: 61
- Version: 1.17.3
- Node Type: voter
+ raft1
+ Name: raft1
+ Address: 127.0.0.1:8201
+ Status: leader
+ Node Status: alive
+ Healthy: true
+ Last Contact: 0s
+ Last Term: 3
+ Last Index: 38
+ raft2
+ Name: raft2
+ Address: 127.0.0.2:8201
+ Status: voter
+ Node Status: alive
+ Healthy: true
+ Last Contact: 2.514176729s
+ Last Term: 3
+ Last Index: 38
```
-
-The "Failure Tolerance" of a cluster is the number of nodes in the cluster that could
-fail gradually without causing an outage.
-
-When verifying the health of your cluster, check the following fields of each server:
-- Healthy: whether Autopilot considers this node healthy or not
-- Status: the voting status of the node. This will be `voter`, `leader`, or [`non-voter`](/vault/docs/concepts/integrated-storage#non-voting-nodes-enterprise-only).
-- Last Index: the index of the last applied Raft log. This should be close to the "Last Index" value of the leader.
-- Version: the version of Vault running on the server
-- Node Type: the type of node. On CE, this will always be `voter`. See below for an explanation of Enterprise node types.
-
-Vault Enterprise will include additional output related to automated upgrades, optimistic failure tolerance, and redundancy zones.
+Vault Enterprise will include additional output related to automated upgrades and redundancy zones.
#### Example Vault enterprise output
@@ -320,7 +292,7 @@ Redundancy Zones:
Failure Tolerance: 1
Upgrade Info:
Status: await-new-voters
- Target Version: 1.17.5
+ Target Version: 1.12.0
Target Version Voters:
Target Version Non-Voters: vault_5
Other Version Voters: vault_1, vault_3
@@ -338,11 +310,6 @@ Upgrade Info:
Other Version Non-Voters: vault_4
```
-"Optimistic Failure Tolerance" describes the number of healthy active and
-back-up voting servers that can fail gradually without causing an outage.
-
-@include 'autopilot/node-types.mdx'
-
### autopilot get-config
Returns the configuration of the autopilot subsystem under integrated storage.
@@ -370,49 +337,29 @@ Usage: vault operator raft autopilot set-config [options]
Flags applicable to this command are the following:
-- `cleanup-dead-servers` `(bool: false)` - Controls whether to remove dead servers from
+- `cleanup-dead-servers` `(bool)` - Controls whether to remove dead servers from
the Raft peer list periodically or when a new server joins. This requires that
- `min-quorum` is also set.
-
-- `last-contact-threshold` `(string: "10s")` - Limit on the amount of time a server can
- go without leader contact before being considered unhealthy.
-
-- `dead-server-last-contact-threshold` `(string: "24h")` - Limit on the amount of time
-a server can go without leader contact before being considered failed. This
-takes effect only when `cleanup_dead_servers` is set. When adding new nodes
-to your cluster, the `dead_server_last_contact_threshold` needs to be larger
-than the amount of time that it takes to load a Raft snapshot, otherwise the
-newly added nodes will be removed from your cluster before they have finished
-loading the snapshot and starting up. If you are using an [HSM](/vault/docs/enterprise/hsm), your
-`dead_server_last_contact_threshold` needs to be larger than the response
-time of the HSM.
-
-
-
- We strongly recommend keeping `dead_server_last_contact_threshold` at a high
- duration, such as a day, as it being too low could result in removal of nodes
- that aren't actually dead
-
-
-
-- `max-trailing-logs` `(int: 1000)` - Amount of entries in the Raft Log that a server
- can be behind before being considered unhealthy. If this value is too low,
- it can cause the cluster to lose quorum if a follower falls behind. This
- value only needs to be increased from the default if you have a very high
- write load on Vault and you see that it takes a long time to promote new
- servers to becoming voters. This is an unlikely scenario and most users
- should not modify this value.
-
-- `min-quorum` `(int)` - The minimum number of servers that should always be
-present in a cluster. Autopilot will not prune servers below this number.
-**There is no default for this value** and it should be set to the expected
-number of voters in your cluster when `cleanup_dead_servers` is set as `true`.
-Use the [quorum size guidance](/vault/docs/internals/integrated-storage#quorum-size-and-failure-tolerance)
-to determine the proper minimum quorum size for your cluster.
-
-- `server-stabilization-time` `(string: "10s")` - Minimum amount of time a server must be in a healthy state before it
+ `min-quorum` is also set. Defaults to `false`.
+
+- `last-contact-threshold` `(string)` - Limit on the amount of time a server can
+ go without leader contact before being considered unhealthy. Defaults to `10s`.
+
+- `dead-server-last-contact-threshold` `(string)` - Limit on the amount of time
+ a server can go without leader contact before being considered failed.
+ This takes effect only when `cleanup_dead_servers` is set as `true`. Defaults to `24h`.
+
+ -> **Note:** A failed server that autopilot has removed from the raft configuration cannot rejoin the cluster without being reinitialized.
+
+- `max-trailing-logs` `(int)` - Amount of entries in the Raft Log that a server
+ can be behind before being considered unhealthy. Defaults to `1000`.
+
+- `min-quorum` `(int)` - Minimum number of servers that should always be present in a cluster.
+ Autopilot will not prune servers below this number. This should be set to the expected number
+ of voters in your cluster. There is no default.
+
+- `server-stabilization-time` `(string)` - Minimum amount of time a server must be in a healthy state before it
can become a voter. Until that happens, it will be visible as a peer in the cluster, but as a non-voter, meaning it
- won't contribute to quorum.
+ won't contribute to quorum. Defaults to `10s`.
-- `disable-upgrade-migration` `(bool: false)` - Controls whether to disable automated
- upgrade migrations, an Enterprise-only feature.
+- `disable-upgrade-migration` `(bool)` - Controls whether to disable automated
+ upgrade migrations, an Enterprise-only feature. Defaults to `false`.
diff --git a/website/content/docs/commands/secrets/move.mdx b/website/content/docs/commands/secrets/move.mdx
index 6e8868942eca..e8956cb3afca 100644
--- a/website/content/docs/commands/secrets/move.mdx
+++ b/website/content/docs/commands/secrets/move.mdx
@@ -28,22 +28,7 @@ Move the existing secrets engine at ns1/secret/ to ns2/kv/:
$ vault secrets move ns1/secret/ ns2/kv/
```
-Move the existing secrets in `team-vault` to the `vault-edu/` namespace.
-
-```shell-session
-$ vault secrets move team-vault \
- vault-edu/team-vault
-```
-
## Usage
There are no flags beyond the [standard set of flags](/vault/docs/commands)
included on all commands.
-
-## Post-move considerations
-
-Each namespace has its own policies, auth methods, secrets engines, tokens,
-identity entities and groups. You must consider the following after moving a mount across namespaces:
-
-- Necessary policies exist in the target namespace
-- Entities and groups might need updating after an auth mount migration
diff --git a/website/content/docs/commands/server.mdx b/website/content/docs/commands/server.mdx
index de61d671ef75..ce0b987534ad 100644
--- a/website/content/docs/commands/server.mdx
+++ b/website/content/docs/commands/server.mdx
@@ -92,11 +92,6 @@ flags](/vault/docs/commands) included on all commands.
`VAULT_EXPERIMENTS` environment variable as a comma-separated list, or via the
[`experiments`](/vault/docs/configuration#experiments) config key.
-- `-pprof-dump-dir` `(string: "")` - Directory where the generated profiles are
- created. Vault does not generate profiles when `pprof-dump-dir` is unset.
- Use `pprof-dump-dir` temporarily during debugging sessions. Do not use
- `pprof-dump-dir` in regular production processes.
-
- `VAULT_ALLOW_PENDING_REMOVAL_MOUNTS` `(bool: false)` - (environment variable)
Allow Vault to be started with builtin engines which have the `Pending Removal`
deprecation state. This is a temporary stopgap in place in order to perform an
diff --git a/website/content/docs/commands/status.mdx b/website/content/docs/commands/status.mdx
index 4034def61a75..ab7d41530111 100644
--- a/website/content/docs/commands/status.mdx
+++ b/website/content/docs/commands/status.mdx
@@ -54,7 +54,7 @@ By default, the output is displayed in "table" format.
#### Output fields
-1. The field for total shares is displayed as `"n"` instead of `n` in yaml outputs.
+1. The field for total shares is displayed as `"n"` instead of `n` in yaml outputs.
2. The following fields in "table" format are displayed only when relevant:
- "Unseal Progress" and "Unseal Nonce" are displayed when vault is sealed.
- "HCP Link Status" and "HCP Link Resource ID" are displayed when HCP link is configured.
@@ -67,4 +67,3 @@ By default, the output is displayed in "table" format.
- "HA Mode".
- "Active Since" is displayed if the node is active and has a valid active time.
- "Performance Standby" Node and "Performance Standby Last Remote WAL" are displayed for performance standby nodes.
-- The "Removed From Cluster" field is only displayed when the storage or HA backend is raft.
diff --git a/website/content/docs/concepts/events.mdx b/website/content/docs/concepts/events.mdx
index fd410dff04ad..e8c5d7e6ce3c 100644
--- a/website/content/docs/concepts/events.mdx
+++ b/website/content/docs/concepts/events.mdx
@@ -131,14 +131,6 @@ Here is an example event notification in JSON format:
## Subscribing to event notifications
-
-
- Vault deployments with performance replication must subscribe to events on the
- primary performance cluster. Vault ignores subscriptions made from secondary
- clusters.
-
-
-
Vault has an API endpoint, `/v1/sys/events/subscribe/{eventType}`, that allows users to subscribe to event notifications via a
WebSocket stream.
This endpoint supports the standard authentication and authorization workflows used by other Vault endpoints.
diff --git a/website/content/docs/concepts/integrated-storage/autopilot.mdx b/website/content/docs/concepts/integrated-storage/autopilot.mdx
index 497d336fd790..327c87d47bcf 100644
--- a/website/content/docs/concepts/integrated-storage/autopilot.mdx
+++ b/website/content/docs/concepts/integrated-storage/autopilot.mdx
@@ -17,7 +17,7 @@ These two features were introduced in Vault 1.11.
Server stabilization helps to retain the stability of the Raft cluster by safely
joining new voting nodes to the cluster. When a new voter node is joined to an
existing cluster, autopilot adds it as a non-voter instead, and waits for a
-pre-configured amount of time to monitor its health. If the node remains
+pre-configured amount of time to monitor it's health. If the node remains to be
healthy for the entire duration of stabilization, then that node will be
promoted as a voter. The server stabilization period can be tuned using
`server_stabilization_time` (see below).
@@ -31,7 +31,7 @@ and `min_quorum` (see below).
## State API
-The [State API](/vault/api-docs/system/storage/raftautopilot#get-cluster-state) provides detailed information about all the nodes in the Raft cluster
+State API provides detailed information about all the nodes in the Raft cluster
in a single call. This API can be used for monitoring for cluster health.
### Follower health
@@ -50,7 +50,40 @@ although dead server cleanup is not enabled by default. Upgrade of
Raft clusters deployed with older versions of Vault will also transition to use
Autopilot automatically.
-@include 'autopilot/config.mdx'
+Autopilot exposes a [configuration
+API](/vault/api-docs/system/storage/raftautopilot#set-configuration) to manage its
+behavior. Autopilot gets initialized with the following default values. If these default values do not meet your expected autopilot behavior, don't forget to set them to your desired values.
+
+- `cleanup_dead_servers` - `false`
+ - This controls whether to remove dead servers from
+ the Raft peer list periodically or when a new server joins. This requires that
+ `min-quorum` is also set.
+
+- `dead_server_last_contact_threshold` - `24h`
+ - Limit on the amount of time
+ a server can go without leader contact before being considered failed. This
+ takes effect only when `cleanup_dead_servers` is set. **We strongly recommend
+ that this is kept at a high duration, such as a day, as it being too low could
+ result in removal of nodes that aren't actually dead.**
+
+- `min_quorum` - This doesn't default to anything and should be set to the expected
+ number of voters in your cluster when `cleanup_dead_servers` is set as `true`.
+ - Minimum number of servers that should always be present in a cluster.
+ Autopilot will not prune servers below this number.
+
+- `max_trailing_logs` - `1000`
+ - Amount of entries in the Raft Log that a server
+ can be behind before being considered unhealthy.
+
+- `last_contact_threshold` - `10s`
+ - Limit on the amount of time a server can go without leader contact before being considered unhealthy.
+
+- `server_stabilization_time` - `10s`
+ - Minimum amount of time a server must be in a healthy state before it can become a voter. Until that happens,
+ it will be visible as a peer in the cluster, but as a non-voter, meaning it won't contribute to quorum.
+
+- `disable_upgrade_migration` - `false`
+ - Controls whether to disable automated upgrade migrations, an Enterprise-only feature.
~> **Note**: Autopilot in Vault does similar things to what autopilot does in
[Consul](https://www.consul.io/). However, the configuration in these 2 systems
@@ -61,7 +94,7 @@ provide the autopilot functionality.
## Automated upgrades
-[Automated Upgrades](/vault/docs/enterprise/automated-upgrades) lets you automatically upgrade a cluster of Vault nodes to a new version as
+Automated Upgrades lets you automatically upgrade a cluster of Vault nodes to a new version as
updated server nodes join the cluster. Once the number of nodes on the new version is
equal to or greater than the number of nodes on the old version, Autopilot will promote
the newer versioned nodes to voters, demote the older versioned nodes to non-voters,
@@ -71,7 +104,7 @@ nodes can be removed from the cluster.
## Redundancy zones
-[Redundancy Zones](/vault/docs/enterprise/redundancy-zones) provide both scaling and resiliency benefits by deploying non-voting
+Redundancy Zones provide both scaling and resiliency benefits by deploying non-voting
nodes alongside voting nodes on a per availability zone basis. When using redundancy zones,
each zone will have exactly one voting node and as many additional non-voting nodes as desired.
If the voting node in a zone fails, a non-voting node will be automatically promoted to
diff --git a/website/content/docs/concepts/integrated-storage/index.mdx b/website/content/docs/concepts/integrated-storage/index.mdx
index 5fe51e63aca0..6e1d8c79ca0b 100644
--- a/website/content/docs/concepts/integrated-storage/index.mdx
+++ b/website/content/docs/concepts/integrated-storage/index.mdx
@@ -60,11 +60,6 @@ API (both methods described below). When joining a node, the API address of the
recommend setting the [`api_addr`](/vault/docs/concepts/ha#direct-access) configuration
option on all nodes to make joining simpler.
-Always join nodes to a cluster one at a time and wait for the node to become
-healthy and (if applicable) a voter before continuing to add more nodes. The
-status of a node can be verified by performing a [`list-peers`](/vault/docs/commands/operator/raft#list-peers)
-command or by checking the [`autopilot state`](/vault/docs/commands/operator/raft#autopilot-state).
-
#### `retry_join` configuration
This method enables setting one, or more, target leader nodes in the config file.
@@ -100,10 +95,9 @@ provided, Vault will use [go-discover](https://github.com/hashicorp/go-discover)
to automatically attempt to discover and resolve potential Raft leader
addresses.
-Check the go-discover
+See the go-discover
[README](https://github.com/hashicorp/go-discover/blob/master/README.md) for
-details on the format of the [`auto_join`](/vault/docs/configuration/storage/raft#auto_join)
-value per cloud provider.
+details on the format of the [`auto_join`](/vault/docs/configuration/storage/raft#auto_join) value.
```hcl
storage "raft" {
@@ -173,14 +167,6 @@ $ vault operator raft remove-peer node1
Peer removed successfully!
```
-#### Re-joining after removal
-
-If you have used `remove-peer` to remove a node from the Raft cluster, but you
-later want to have this same node re-join the cluster, you will need to delete
-any existing Raft data on the removed node before adding it back to the cluster.
-This will involve stopping the Vault process, deleting the data directory containing
-Raft data, and then restarting the Vault process.
-
### Listing peers
To see the current peer set for the cluster you can issue a
diff --git a/website/content/docs/enterprise/entropy-augmentation.mdx b/website/content/docs/enterprise/entropy-augmentation.mdx
index e3ae7a6d20fc..f0c0bcb00cb7 100644
--- a/website/content/docs/enterprise/entropy-augmentation.mdx
+++ b/website/content/docs/enterprise/entropy-augmentation.mdx
@@ -21,34 +21,6 @@ interface. While the system entropy used by Vault is more than capable of
operating in most threat models, there are some situations where additional
entropy from hardware-based random number generators is desirable.
-With Entropy Augmentation enabled, the following keys and tokens leverage the
-configured external entropy source.
-
-| Operation | Description |
-| ------------------------ | ------------------------------------------------------------------------------------ |
-| Root Key | AES key that is encrypted by the seal mechanism. This encrypts the key ring. |
-| Key Ring Encryption Keys | The keys embedded in Vault's keyring which encrypt all of Vault's storage. |
-| Recovery Key | With auto-unseal, use the recovery keys to regenerate root token, key rotation, etc. |
-| TLS Private Keys | For HA leader, Raft and Enterprise Replications. |
-| MFA TOTP Keys | The keys used for TOTP in Vault Enterprise MFA |
-| JWT Signing Keys | The keys used to sign wrapping token JWTs. |
-| Root Tokens | Superuser tokens granting access to all operations in Vault. |
-| DR Operation Tokens | Token that allows certain actions to be performed on a DR secondary. |
-
-The [transit secrets engine](/vault/docs/secrets/transit) manages a number of
-different key types and leverages the
-[`keysutil`](https://godoc.org/github.com/hashicorp/vault/sdk/helper/keysutil)
-package to generate keys. It will use the external entropy source for key
-generation.
-
-
-
-When you enable the external entropy source, Vault requires connectivity to the
-HSM. If the HSM becomes unreachable for any reason, the transit secrets engine
-can't generate new keys or rotate existing keys.
-
-
-
To use this feature, you must have an active or trial license for Vault
Enterprise. To start a trial, contact [HashiCorp sales](mailto:sales@hashicorp.com).
@@ -66,7 +38,7 @@ These CSPs have been selected from our previous work in [evaluating Vault for co
FIPS 140-2 guidelines for key storage and key transport](https://www.datocms-assets.com/2885/1510600487-vault_compliance_letter_fips_140-2.pdf)
and include (but not limited to) the following:
-- Vault's root key
+- Vault’s root key
- Keyring encryption keys
- Auto Unseal recovery keys
- TLS private keys for inter-node and inter cluster communication (HA leader, raft, and replication)
@@ -90,3 +62,8 @@ Entropy augmentation is disabled by default. To enable entropy augmentation Vaul
for a supported seal type.
[configuration]: /vault/docs/configuration
+
+## Tutorial
+
+Refer to the [HSM Integration - Entropy Augmentation](/vault/tutorials/enterprise/hsm-entropy) tutorial
+to learn how to use the Entropy Augmentation function to leverage an external Hardware Security Module to augment system entropy.
diff --git a/website/content/docs/enterprise/lts.mdx b/website/content/docs/enterprise/lts.mdx
index 1940abb883df..33d4bae8b0d6 100644
--- a/website/content/docs/enterprise/lts.mdx
+++ b/website/content/docs/enterprise/lts.mdx
@@ -91,7 +91,7 @@ upgrade frequently, quickly, or easily.
Vault upgrades are challenging, especially for sensitive or critical workflows,
extensive integrations, and large-scale deployments. Strict upgrade policies
also require significant planning, testing, and employee hours to execute
-successfully.
+successfully.
Customers who need assurances that their current installation will receive
critical bug fixes and security patches with minimal service disruptions should
@@ -111,8 +111,8 @@ the current version ("N") and the two previous versions ("N−2").
Vault versions typically update 3 times per calendar year (CY), which means that
**standard maintenance** for a given Vault version lasts approximately 1 year.
After the first year, LTS Vault versions move from standard maintenance to
-**extended maintenance** for three additional major version releases (approximately one additional year)
-with patches for bugs that may cause outages and critical vulnerabilities and exposures (CVEs).
+**extended maintenance** for an additional year with patches for bugs that
+may cause outages and critical vulnerabilities and exposures (CVEs).
Maintenance updates | Standard maintenance | Extended maintenance
--------------------------------- | -------------------- | --------------------
@@ -152,10 +152,10 @@ The goal is to establish a predictable upgrade path with a longer timeline
rather than extending the lifetime for every Vault version.
Long-term support ensures your Vault Enterprise version continues to receive
-critical patches for an additional three major version releases (approximately one additional year).
-If you upgrade to a non-LTS version,you are moving your Vault instance to a version
-that lacks extended support. Non-LTS versions stop receiving updates once they leave
-the standard maintenance window.
+critical patches for an additional year. If you upgrade to a non-LTS version,
+you are moving your Vault instance to a version that lacks extended support.
+Non-LTS versions stop receiving updates once they leave the standard maintenance
+window.
@include 'assets/lts-upgrade-path.mdx'
@@ -164,7 +164,7 @@ Version | Expected release | Standard maintenance ends | Extended maintenance e
1.19 | CY25 Q1 | CY26 Q1 (1.22 release) | CY27 Q1 (1.25 release)
1.18 | CY24 Q3 | CY25 Q3 (1.21 release) | Not provided
1.17 | CY24 Q2 | CY25 Q2 (1.20 release) | Not provided
-1.16 | CY24 Q1 | CY25 Q1 (1.19 release) | CY26 Q1 (1.22 release)
+1.16 | CY24 Q1 | CY25 Q1 (1.19 release) | CY26 Q1 (1.22 release)
If a newer version of Vault Enterprise includes features you want to take
advantage of, you have two options:
@@ -180,4 +180,4 @@ advantage of, you have two options:
You should follow your existing upgrade process for major version upgrades but
allow additional time. Upgrading from version LTS to LTS+1 translates to jumping
3 major Vault Enterprise versions, which **may** require transitional upgrades
-to move through the intermediate Vault versions.
+to move through the intermediate Vault versions.
\ No newline at end of file
diff --git a/website/content/docs/enterprise/redundancy-zones.mdx b/website/content/docs/enterprise/redundancy-zones.mdx
index c4cb9f903a8e..f905ff20b524 100644
--- a/website/content/docs/enterprise/redundancy-zones.mdx
+++ b/website/content/docs/enterprise/redundancy-zones.mdx
@@ -36,7 +36,3 @@ wait to begin leadership transfer until it can ensure that there will be as much
new Vault version as there was on the old Vault version.
The status of redundancy zones can be monitored by consulting the [Autopilot state API endpoint](/vault/api-docs/system/storage/raftautopilot#get-cluster-state).
-
-## Optimistic Failure Tolerance
-
-@include 'autopilot/redundancy-zones.mdx'
diff --git a/website/content/docs/enterprise/sealwrap.mdx b/website/content/docs/enterprise/sealwrap.mdx
index 99993f51a118..da7664017e79 100644
--- a/website/content/docs/enterprise/sealwrap.mdx
+++ b/website/content/docs/enterprise/sealwrap.mdx
@@ -19,19 +19,6 @@ To use this feature, you must have an active or trial license for Vault
Enterprise Plus (HSMs). To start a trial, contact [HashiCorp
sales](mailto:sales@hashicorp.com).
-## Seal Wrap benefits
-
-Your Vault deployments can gain the following benefits by enabling seal wrapping:
-
-- Conformance with FIPS 140-2 directives on Key Storage and Key Transport as [certified by Leidos](/vault/docs/enterprise/sealwrap#fips-140-2-compliance)
-- Supports FIPS level of security equal to HSM
- - For example, if you use Level 3 hardware encryption on an HSM, Vault will be
- using FIPS 140-2 Level 3 cryptography
-- Enables Vault deployments in high security [GRC](https://en.wikipedia.org/wiki/Governance,_risk_management,_and_compliance)
- environments (e.g. PCI-DSS, HIPAA) where FIPS guidelines important for external audits
-- Pathway to use Vault for managing Department of Defense (DOD) or North
- Atlantic Treaty Organization (NATO) military secrets
-
## Enabling/Disabling
Seal Wrap is enabled by default on supporting seals. This implies that the seal
@@ -40,12 +27,6 @@ quite reliable, but, for instance, if using an HSM in a non-HA setup a
connection interruption to the HSM will result in issues with Vault
functionality.
-
-
-Having Vault generate its own key is the easiest way to get up and running, but for security, Vault marks the key as non-exportable. If your HSM key backup strategy requires the key to be exportable, you should generate the key yourself. Refer to the [key generation attributes](/vault/docs/configuration/seal/pkcs11#vault-key-generation-attributes).
-
-
-
To disable seal wrapping, set `disable_sealwrap = true` in Vault's
[configuration file][configuration]. This will not affect auto-unsealing functionality; Vault's
root key will still be protected by the seal wrapping mechanism. It will
diff --git a/website/content/docs/internals/integrated-storage.mdx b/website/content/docs/internals/integrated-storage.mdx
index 3de858a12011..67292949b405 100644
--- a/website/content/docs/internals/integrated-storage.mdx
+++ b/website/content/docs/internals/integrated-storage.mdx
@@ -302,28 +302,6 @@ For example, if you start with a 5-node cluster:
You should always maintain quorum to limit the impact on failure tolerance when
changing or scaling your Vault instance.
-### Redundancy Zones
-
-If you are using autopilot with [redundancy zones](/vault/docs/enterprise/redundancy-zones),
-the total number of servers will be different from the above, and is dependent
-on how many redundancy zones and servers per redundancy zone that you choose.
-
-@include 'autopilot/redundancy-zones.mdx'
-
-
-
- If you choose to use redundancy zones, we **strongly recommend** using at least 3
- zones to ensure failure tolerance.
-
-
-
-Redundancy zones | Servers per zone | Quorum size | Failure tolerance | Optimistic failure tolerance
-:--------------: | :--------------: | :---------: | :---------------: | :--------------------------:
-2 | 2 | 2 | 0 | 2
-3 | 2 | 2 | 1 | 3
-3 | 3 | 2 | 1 | 5
-5 | 2 | 3 | 2 | 6
-
[consensus protocol]: https://en.wikipedia.org/wiki/Consensus_(computer_science)
[consistency]: https://en.wikipedia.org/wiki/CAP_theorem
["Raft: In search of an Understandable Consensus Algorithm"]: https://raft.github.io/raft.pdf
diff --git a/website/content/docs/platform/aws/lambda-extension.mdx b/website/content/docs/platform/aws/lambda-extension.mdx
index 1df42ecf0cee..61e9718de65b 100644
--- a/website/content/docs/platform/aws/lambda-extension.mdx
+++ b/website/content/docs/platform/aws/lambda-extension.mdx
@@ -284,8 +284,7 @@ processing with returned secrets such as automatic lease renewal. The proxy serv
own Vault auth token is the only thing that gets automatically refreshed. It will
synchronously refresh its own token before proxying requests if the token is
expired (including a grace window), and it will attempt to renew its token if the
-token is nearly expired but renewable. The proxy will also immediately refresh its token
-if the incoming request header `X-Vault-Token-Options: revoke` is present.
+token is nearly expired but renewable.
diff --git a/website/content/docs/platform/k8s/helm/index.mdx b/website/content/docs/platform/k8s/helm/index.mdx
index f84dee667b96..53f410103b93 100644
--- a/website/content/docs/platform/k8s/helm/index.mdx
+++ b/website/content/docs/platform/k8s/helm/index.mdx
@@ -22,13 +22,13 @@ properly installed and configured with your Kubernetes cluster.
@include 'kubernetes-supported-versions.mdx'
-## Using the Helm chart
+## Using the helm chart
Helm must be installed and configured on your machine. Please refer to the [Helm
documentation](https://helm.sh/) or the [Vault Installation to Minikube via
Helm](/vault/tutorials/kubernetes/kubernetes-minikube-consul) tutorial.
-To use the Helm chart, add the Hashicorp Helm repository and check that you have
+To use the Helm chart, add the Hashicorp helm repository and check that you have
access to the chart:
@include 'helm/repo.mdx'
@@ -59,15 +59,6 @@ cluster](https://kubernetes.io/docs/tasks/administer-cluster/securing-a-cluster/
options](/vault/docs/platform/k8s/helm/configuration), and read the [production deployment
checklist](/vault/docs/platform/k8s/helm/run#architecture).
-
-
-
- If you use AWS features (e.g, AWS PrivateLink) that require a network load
- balancer (NLB), you must provision your NLB **before** your application load
- balancer (ALB).
-
-
-
## Tutorial
Refer to the [Kubernetes](/vault/tutorials/kubernetes)
diff --git a/website/content/docs/platform/k8s/helm/run.mdx b/website/content/docs/platform/k8s/helm/run.mdx
index f52a2c2603b8..0c12570802b0 100644
--- a/website/content/docs/platform/k8s/helm/run.mdx
+++ b/website/content/docs/platform/k8s/helm/run.mdx
@@ -436,25 +436,7 @@ running:
$ kubectl delete pod
```
-
-If you deployed Vault in high availability (`ha`) mode, you must upgrade your
-standby pods before upgrading the active pod:
-
-1. Before deleting the standby pod, remove the associated node from the raft
- with `vault operator raft remove-peer `.
-1. Confirm Vault removed the node successfully from Raft with
- `vault operator raft list-peers`.
-1. Once you confirm the removal, delete the pod.
-
-
-
-Removing a pod without first deleting the node from its cluster means that
-Raft will not be aware of the correct number of nodes in the cluster. Not knowing
-the correct number of nodes can trigger a leader election, which can potentially
-cause unneeded downtime.
-
-
-
+If Vault is deployed using `ha` mode, the standby pods must be upgraded first.
Vault has K8s service discovery built in (when enabled in the server configuration) and
will automatically change the labels of the pod with its current leader status. These labels
can be used to filter the pods.
diff --git a/website/content/docs/secrets/aws.mdx b/website/content/docs/secrets/aws.mdx
index 6e0b74957d06..2f5ea0125d3e 100644
--- a/website/content/docs/secrets/aws.mdx
+++ b/website/content/docs/secrets/aws.mdx
@@ -14,7 +14,7 @@ involve clicking in the web UI. Additionally, the process is codified and mapped
to internal auth methods (such as LDAP). The AWS IAM credentials are time-based
and are automatically revoked when the Vault lease expires.
-Vault supports four different types of credentials to retrieve from AWS:
+Vault supports three different types of credentials to retrieve from AWS:
1. `iam_user`: Vault will create an IAM user for each lease, attach the managed
and inline IAM policies as specified in the role to the user, and if a
@@ -266,7 +266,6 @@ user, you can use a policy like:
"iam:CreateAccessKey",
"iam:DeleteAccessKey",
"iam:DeleteUser",
- "iam:GetUser",
"iam:ListAccessKeys",
"iam:ListAttachedUserPolicies",
"iam:ListGroupsForUser",
diff --git a/website/content/docs/secrets/databases/mongodbatlas.mdx b/website/content/docs/secrets/databases/mongodbatlas.mdx
index 62313bb859c3..25741aff20d0 100644
--- a/website/content/docs/secrets/databases/mongodbatlas.mdx
+++ b/website/content/docs/secrets/databases/mongodbatlas.mdx
@@ -19,8 +19,8 @@ more information about setting up the database secrets engine.
The information below relates to the MongoDB Altas database plugin for the Vault database secrets engine.
- Refer to the MongoDB Atlas secrets engine for
- information about using the MongoDB Atlas secrets engine for the Vault.
+ Refer to the MongoDB Atlas secrets engine
+ for information about using the MongoDB Atlas secrets engine for the Vault.
## Capabilities
diff --git a/website/content/docs/secrets/kv/kv-v2/cookbook/patch-data.mdx b/website/content/docs/secrets/kv/kv-v2/cookbook/patch-data.mdx
index 733453a5ca3e..1e746acbe588 100644
--- a/website/content/docs/secrets/kv/kv-v2/cookbook/patch-data.mdx
+++ b/website/content/docs/secrets/kv/kv-v2/cookbook/patch-data.mdx
@@ -106,11 +106,10 @@ push the update to the plugin.
-@include 'alerts/enterprise-only.mdx'
+@include 'gui-page-instructions/select-kv-mount.mdx'
-@include 'gui-page-instructions/select-kv-data.mdx'
-
-- Click **Patch latest version +** on the key/value page.
+- Click through the path segments to select the relevant secret path.
+- Click **Create new version +** on the key/value page.
- Edit the values you want to update.
- Click **Save**.
diff --git a/website/content/docs/secrets/transform/index.mdx b/website/content/docs/secrets/transform/index.mdx
index 282d48889173..3bbe633d67fb 100644
--- a/website/content/docs/secrets/transform/index.mdx
+++ b/website/content/docs/secrets/transform/index.mdx
@@ -152,8 +152,7 @@ data transformation.
Format Preserving Encryption (FPE) performs cryptographically secure
transformation via FF3-1 to encode input values while maintaining its data
-format and length. FF3-1 is a construction that uses AES-256 for
-encryption.
+format and length.
#### Tweak and tweak source
diff --git a/website/content/docs/secrets/transform/tokenization.mdx b/website/content/docs/secrets/transform/tokenization.mdx
index abd2f5b70ee9..89417c2f2cbb 100644
--- a/website/content/docs/secrets/transform/tokenization.mdx
+++ b/website/content/docs/secrets/transform/tokenization.mdx
@@ -23,9 +23,7 @@ endpoint that lets one query whether a plaintext exists in the system.
Depending on the mapping mode, the plaintext may be decoded only with possession
of the distributed token, or may be recoverable in the export operation. See
-[Security Considerations](#security-considerations) for more.
-Tokenization's cryptosystem uses AES256-GCM96 for encryption of its token
-store, with keys derived from the token and a tokenization root key.
+[Security Considerations](#security-considerations) for more.
### Convergence
diff --git a/website/content/docs/secrets/transit/index.mdx b/website/content/docs/secrets/transit/index.mdx
index dd97f120321d..14266cfafc55 100644
--- a/website/content/docs/secrets/transit/index.mdx
+++ b/website/content/docs/secrets/transit/index.mdx
@@ -88,7 +88,7 @@ types also generate separate HMAC keys):
- `hmac`: HMAC; supporting HMAC generation and verification.
- `managed_key`: Managed key; supports a variety of operations depending on the
backing key management solution. See [Managed Keys](/vault/docs/enterprise/managed-keys)
- for more information.
+ for more information.
- `aes128-cmac`: CMAC with a 128-bit AES key; supporting CMAC generation and verification.
- `aes256-cmac`: CMAC with a 256-bit AES key; supporting CMAC generation and verification.
@@ -262,7 +262,7 @@ have Transit generate and manage a key within Vault.
### Via the Command Line
The Vault command line tool [includes a helper](/vault/docs/commands/transit/) to perform the steps described
-in Manual below.
+in Manual below.
### Via the API
diff --git a/website/content/docs/upgrading/upgrade-to-1.16.x.mdx b/website/content/docs/upgrading/upgrade-to-1.16.x.mdx
index 7d399f06c804..9646a9858009 100644
--- a/website/content/docs/upgrading/upgrade-to-1.16.x.mdx
+++ b/website/content/docs/upgrading/upgrade-to-1.16.x.mdx
@@ -189,7 +189,7 @@ kubectl exec -ti -- wget https://github.com/moparisthebest/static-curl/re
### Product usage reporting
As of 1.16.13, Vault will collect anonymous product usage metrics for HashiCorp. This information will be collected
-alongside client activity data, and will be sent automatically if automated reporting is configured, or added to manual
+alongside activity information, and will be sent automatically if automated reporting is configured, or added to manual
reports if manual reporting is preferred.
See the main page for [Vault product usage metrics reporting](/vault/docs/enterprise/license/product-usage-reporting) for
diff --git a/website/content/docs/upgrading/upgrade-to-1.17.x.mdx b/website/content/docs/upgrading/upgrade-to-1.17.x.mdx
index 4e9486cab6cf..60b0ba1d3e3d 100644
--- a/website/content/docs/upgrading/upgrade-to-1.17.x.mdx
+++ b/website/content/docs/upgrading/upgrade-to-1.17.x.mdx
@@ -176,7 +176,7 @@ kubectl exec -ti -- wget https://github.com/moparisthebest/static-curl/re
### Product usage reporting
As of 1.17.9, Vault will collect anonymous product usage metrics for HashiCorp. This information will be collected
-alongside client activity data, and will be sent automatically if automated reporting is configured, or added to manual
+alongside activity information, and will be sent automatically if automated reporting is configured, or added to manual
reports if manual reporting is preferred.
See the main page for [Vault product usage metrics reporting](/vault/docs/enterprise/license/product-usage-reporting) for
@@ -199,7 +199,3 @@ more details, and information about opt-out.
@include 'known-issues/duplicate-identity-groups.mdx'
@include 'known-issues/manual-entity-merge-does-not-persist.mdx'
-
-@include 'known-issues/aws-auth-external-id.mdx'
-
-@include 'known-issues/sync-activation-flags-cache-not-updated.mdx'
diff --git a/website/content/docs/upgrading/upgrade-to-1.19.x.mdx b/website/content/docs/upgrading/upgrade-to-1.19.x.mdx
deleted file mode 100644
index 7873ee4e7708..000000000000
--- a/website/content/docs/upgrading/upgrade-to-1.19.x.mdx
+++ /dev/null
@@ -1,44 +0,0 @@
----
-layout: docs
-page_title: Upgrade to Vault 1.19.x - Guides
-description: |-
- Deprecations, important or breaking changes, and remediation recommendations
- for anyone upgrading to 1.19.x from Vault 1.18.x.
----
-
-# Overview
-
-The Vault 1.19.x upgrade guide contains information on deprecations, important
-or breaking changes, and remediation recommendations for anyone upgrading from
-Vault 1.18. **Please read carefully**.
-
-## Important changes
-
-### Transit support for Ed25519ph and Ed25519ctx signatures
-
-**NOTE**: This only applies to Transit Ed25519 keys.
-
-On prior versions of Vault, when the sign and verify API endpoints backed by an Ed25519
-key received the prehashed=true or the hash_algorithm=sha2-512 parameters they were ignored,
-returning back or verifying a Pure Ed25519 signature. As of 1.19.x, setting these values
-on Enterprise editions of Vault will now return an Ed25519ph signature and assume the
-input has been hashed using the SHA-512 algorithm.
-
-If neither prehashed nor hash_algorithm values are provided, the existing default of using
-Pure Ed25519 signatures remains unchanged for both Enterprise and CE Vault editions. The change
-is if those values had been overridden they were previously ignored but now will be enforced
-based on the table below.
-
-| Vault Edition | prehashed | hash_algorithm | 1.19.x Signature | Previous Vault Versions Signature |
-|:--------------|:----------|:------------------------------|:-------------------------------------------|:----------------------------------|
-| Enterprise | not set | not set | Pure Ed25519 | Pure Ed25519 |
-| Enterprise | false | any value other than sha2-512 | Pure Ed25519 | Pure Ed25519 |
-| Enterprise | false | sha2-512 | An error is returned | Pure Ed25519 |
-| Enterprise | true | any value other than sha2-512 | An error is returned | Pure Ed25519 |
-| Enterprise | true | sha2-512 | Ed25519ph | Pure Ed25519 |
-| CE | not set | not set | Pure Ed25519 | Pure Ed25519 |
-| CE | false | any value other than sha2-512 | Pure Ed25519 | Pure Ed25519 |
-| CE | false | sha2-512 | An error is returned | Pure Ed25519 |
-| CE | true | any value other than sha2-512 | An error is returned | Pure Ed25519 |
-| CE | true | sha2-512 | An error is returned (not supported on CE) | Pure Ed25519 |
-
diff --git a/website/content/partials/autopilot/config.mdx b/website/content/partials/autopilot/config.mdx
deleted file mode 100644
index cede26434b8d..000000000000
--- a/website/content/partials/autopilot/config.mdx
+++ /dev/null
@@ -1,53 +0,0 @@
-Autopilot exposes a [configuration
-API](/vault/api-docs/system/storage/raftautopilot#set-configuration) to manage its
-behavior. These items cannot be set in Vault server configuration files.
-Autopilot gets initialized with the following default values. If these default
-values do not meet your expected autopilot behavior, don't forget to set them to your desired values.
-
-- `cleanup_dead_servers` `(bool: false)` - This controls whether to remove dead servers from
-the Raft peer list periodically or when a new server joins. This requires that
-`min-quorum` is also set.
-
-- `dead_server_last_contact_threshold` `(string: "24h")` - Limit on the amount of time
-a server can go without leader contact before being considered failed. This
-takes effect only when `cleanup_dead_servers` is set. When adding new nodes
-to your cluster, the `dead_server_last_contact_threshold` needs to be larger
-than the amount of time that it takes to load a Raft snapshot, otherwise the
-newly added nodes will be removed from your cluster before they have finished
-loading the snapshot and starting up. If you are using an [HSM](/vault/docs/enterprise/hsm), your
-`dead_server_last_contact_threshold` needs to be larger than the response
-time of the HSM.
-
-
-
- We strongly recommend keeping `dead_server_last_contact_threshold` at a high
- duration, such as a day, as it being too low could result in removal of nodes
- that aren't actually dead
-
-
-
-- `min_quorum` `(int)` - The minimum number of servers that should always be
-present in a cluster. Autopilot will not prune servers below this number.
-**There is no default for this value** and it should be set to the expected
-number of voters in your cluster when `cleanup_dead_servers` is set as `true`.
-Use the [quorum size guidance](/vault/docs/internals/integrated-storage#quorum-size-and-failure-tolerance)
-to determine the proper minimum quorum size for your cluster.
-
-- `max_trailing_logs` `(int: 1000)` - Amount of entries in the Raft Log that a
-server can be behind before being considered unhealthy. If this value is too low,
-it can cause the cluster to lose quorum if a follower falls behind. This
-value only needs to be increased from the default if you have a very high
-write load on Vault and you see that it takes a long time to promote new
-servers to becoming voters. This is an unlikely scenario and most users
-should not modify this value.
-
-- `last_contact_threshold` `(string "10s")` - Limit on the amount of time a
-server can go without leader contact before being considered unhealthy.
-
-- `server_stabilization_time` `(string "10s")` - Minimum amount of time a server
-must be in a healthy state before it can become a voter. Until that happens,
-it will be visible as a peer in the cluster, but as a non-voter, meaning it
-won't contribute to quorum.
-
-- `disable_upgrade_migration` `(bool: false)` - Disables automatically upgrading
-Vault using autopilot (Enterprise-only)
diff --git a/website/content/partials/autopilot/node-types.mdx b/website/content/partials/autopilot/node-types.mdx
deleted file mode 100644
index 8ee7c5b9f498..000000000000
--- a/website/content/partials/autopilot/node-types.mdx
+++ /dev/null
@@ -1,6 +0,0 @@
-#### Enterprise Node Types
-- `voter`: The server is a Raft voter and contributing to quorum.
-- `read-replica`: The server is not a Raft voter, but receives a replica of all data.
-- `zone-voter`: The main Raft voter in a redundancy zone.
-- `zone-extra-voter`: An additional Raft voter in a redundancy zone.
-- `zone-standby`: A non-voter in a redundancy zone that can be promoted to a voter, if needed.
diff --git a/website/content/partials/autopilot/redundancy-zones.mdx b/website/content/partials/autopilot/redundancy-zones.mdx
deleted file mode 100644
index e681b1e26baa..000000000000
--- a/website/content/partials/autopilot/redundancy-zones.mdx
+++ /dev/null
@@ -1,25 +0,0 @@
-The majority of the voting servers in a cluster need to be available to agree on
-changes in configuration. If a voting node becomes unavailable and that causes
-the cluster to have fewer voting nodes than the quorum size, then Autopilot will not
-be able to promote a non-voter to become a voter. This is the **failure tolerance** of
-the cluster. Redundancy zones are not able to improve the failure tolerance of a
-cluster.
-
-Say that you have a cluster configured to have 2 redundancy zones and each zone
-has 2 servers within it (for total of 4 nodes in the cluster). The quorum size
-is 2. If the zone voter in either of the redundancy zones becomes unavailable,
-the cluster does not have quorum and is not able to agree on the configuration
-change needed to promote the non-voter in the zone into a voter.
-
-Redundancy zones do improve the **optimistic failure tolerance** of a cluster.
-The optimistic failure tolerance is the number of healthy active and back-up
-voting servers that can fail gradually without causing an outage. If the Vault
-cluster is able to maintain a quorum of voting nodes, then the cluster has the
-capability to lose nodes gradually and promote the standby redundancy zone nodes
-to take the place of voters.
-
-For example, consider a cluster that is configured to have 3 redundancy zones
-with 2 nodes in each zone. If a voting node becomes unreachable, the zone standby
-in that zone is promoted. The cluster then maintains 3 voting nodes with 2 remaining
-standbys. The cluster can handle an additional 2 gradual failures before it loses
-quorum.
diff --git a/website/content/partials/cli/README.md b/website/content/partials/cli/README.md
index 6cd61bf7b995..74329cd15886 100644
--- a/website/content/partials/cli/README.md
+++ b/website/content/partials/cli/README.md
@@ -1,108 +1,22 @@
-## Terminology
-
-non-runnable a command with no side-effects other than printing help text.
-command tree a hierarchical graph of CLI commands where internal nodes are
- non-runnable and leaf nodes represent runnable CLI commands.
-root command the root node in a tree or subtree of CLI commands. For example,
- `plugin` is the root node for all plugin commands and
- `plugin runtime` is the root node for runtime commands.
-command family - the top-most root command for a collection of CLI commands.
- For example `audit` or `plugin`.
-
-### Exceptions :(
-
-The `agent` family of commands is malformed. Rather than having a root node
-(`agent`) with two subcommands (`agent start` and `agent generate-config`), the
-root command is runnable.
-
-
-## Why partials?
We document CLI command arguments, options, and flags as partials:
- as a first step toward templatizing and autogenerating the CLI command pages.
-- to make it easier to include and maintain parameters shared across commands in
+- to make it easier to include and maintain elements shared across commands in
the same family.
-- to make it easier to include and maintain parameters shared across command
+- to make it easier to include and maintain elements shared across command
families.
- to make it easier to include information about standard flags on the command
pages.
-## Directory structure
-
-partials/cli/ partials specific to a command family
-partials/cli//args command-family arguments
-partials/cli//flags command-family flags
-partials/cli//options command-family options
-
-partials/cli/shared partials for parameters shared across some, but not all, command families
-partials/cli/shared/args shared arguments (does not exist yet)
-partials/cli/shared/flags shared flags
-partials/cli/shared/options shared options (does not exist yet)
-
-partials/global-settings partials for standard/global parameters
-partials/global-settings/flags global flags (e.g., `-header`)
-partials/global-settings/env global environment variables (e.g., `VAULT_LICENSE`)
-partials/global-settings/both parameters that exits as flags and variables
-
-## Partial templates
-
-- Use the parameter name as the file name and "NAME" in the anchor definition,
- even if the use of dashes or underscores is inconsistent with other parameters
- or partial names. For example, if the flag is `-my_weird_flag`, make the
- partial filename `my_weird_flag.mdx` and the anchor ID
- `COMMAND-flag-my_weird_flag`.
-- If the parameter is shared across command families, but not applicable to **all**
- command families, it belongs under `partials/cli/shared`
-- If the parameter is a flag with a cooresponding environment variable but
- **does not** apply to all commands, talk with a technical writer before
- creating your partials.
-- If the parameter is required, use `` for the default entry.
-- Include `-` as part of the name for flag names **except for anchor IDs**.
-- Use `=` in example text for options
-- Omit `=` in example text for flags
-
-### Template 1 - command-specific parameters
-
-Use the following template for parameters that exist as command-exclusively
-arguments, flags, or options. "ANCHOR_ID" is the ID defined in the ``
-HTML tag.
-
--- Template (start) --
-
-
-
+Partial template for CLI elements (required elements use in place of
+a default value):
-
+
-**`NAME (TYPE : DEFAULT)`**
-
-
+**`-NAME (TYPE : DEFAULT)`**
DESCRIPTION
**Example**: `EXAMPLE_OF_VALID_USE`
-
--- Template (end) --
-
-
-
-### Template 2 - shared parameters
-
-Use the following template for parameters that exist as arguments, flags, or
-options that are not global but are shared across more than one command family.
-"ANCHOR_ID" is the ID defined in the `` HTML tag.
-
-
-
-
-
-
-**`NAME (TYPE : DEFAULT)`**
-
-
-
-DESCRIPTION
-
-**Example**: `EXAMPLE_OF_VALID_USE`
\ No newline at end of file
diff --git a/website/content/partials/cli/agent/args/file_path.mdx b/website/content/partials/cli/agent/args/file_path.mdx
index bb0048aec7ea..bfd554bcdf0f 100644
--- a/website/content/partials/cli/agent/args/file_path.mdx
+++ b/website/content/partials/cli/agent/args/file_path.mdx
@@ -1,11 +1,7 @@
-
-
**`file_path (string : "./agent.hcl")`**
-
-
The path where Vault should save the generated configuration file.
**Example**: `"./agent/custom-config.hcl"`
\ No newline at end of file
diff --git a/website/content/partials/cli/agent/flags/config.mdx b/website/content/partials/cli/agent/flags/config.mdx
index fee77d50aee3..f339548d3b30 100644
--- a/website/content/partials/cli/agent/flags/config.mdx
+++ b/website/content/partials/cli/agent/flags/config.mdx
@@ -1,12 +1,7 @@
-
-
**`-config (string : )`**
-
-
-
Path to a single
[Vault Agent configuration file](/vault/docs/agent-and-proxy/agent#configuration-file-options)
or directory of configuration files with agent directives. Repeat the `-config`
diff --git a/website/content/partials/cli/agent/flags/exec.mdx b/website/content/partials/cli/agent/flags/exec.mdx
index b14c9d0a1618..ba0e9664ccb5 100644
--- a/website/content/partials/cli/agent/flags/exec.mdx
+++ b/website/content/partials/cli/agent/flags/exec.mdx
@@ -1,11 +1,7 @@
-
-
**`-exec (string : "")`**
-
-
Path to the command for child processes with optional arguments. Relative paths
start from the current working directory when executed. Corresponds to
`exec.command` in the Vault Agent configuration file.
diff --git a/website/content/partials/cli/agent/flags/exit-after-auth.mdx b/website/content/partials/cli/agent/flags/exit-after-auth.mdx
index c026ba5ec9a3..4f3c1a2a3689 100644
--- a/website/content/partials/cli/agent/flags/exit-after-auth.mdx
+++ b/website/content/partials/cli/agent/flags/exit-after-auth.mdx
@@ -1,11 +1,7 @@
-
-
**`-exit-after-auth (bool : false)`**
-
-
Exit with code `0` after a single successful auth. Success indicates successful
token retrieval and write to sink.
diff --git a/website/content/partials/cli/agent/flags/path.mdx b/website/content/partials/cli/agent/flags/path.mdx
index 051602f69c14..bd1a3debd48e 100644
--- a/website/content/partials/cli/agent/flags/path.mdx
+++ b/website/content/partials/cli/agent/flags/path.mdx
@@ -1,11 +1,7 @@
-
-
**`-path (string : "")`**
-
-
Path to one or more `kv` secrets store. Paths that end with a wildcard (`*`)
include all secrets under that path.
diff --git a/website/content/partials/cli/agent/flags/type.mdx b/website/content/partials/cli/agent/flags/type.mdx
index 8bc947549e21..216a84c92ebc 100644
--- a/website/content/partials/cli/agent/flags/type.mdx
+++ b/website/content/partials/cli/agent/flags/type.mdx
@@ -1,11 +1,7 @@
-
-
**`-type (enum : )`**
-
-
The configuration file entry to create.
Enum | Description
diff --git a/website/content/partials/cli/audit/args/device_path.mdx b/website/content/partials/cli/audit/args/device_path.mdx
index ee0ebb70277a..6d2160586f3a 100644
--- a/website/content/partials/cli/audit/args/device_path.mdx
+++ b/website/content/partials/cli/audit/args/device_path.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`device_path (string : )`**
-
-
The internal path where Vault accesses the audit device. Audit device paths are
unique across all audit devices.
diff --git a/website/content/partials/cli/audit/args/device_type.mdx b/website/content/partials/cli/audit/args/device_type.mdx
index eabfab07f98b..3a8a75cbeb9e 100644
--- a/website/content/partials/cli/audit/args/device_type.mdx
+++ b/website/content/partials/cli/audit/args/device_type.mdx
@@ -1,10 +1,6 @@
-
+
-
-
-**`audit-arg-device_type (enum : )`**
-
-
+**`device_type (enum : )`**
The audit device type to create.
diff --git a/website/content/partials/cli/audit/args/file/file_path.mdx b/website/content/partials/cli/audit/args/file/file_path.mdx
index 4f3a3eac595a..bd0f45a4c0e7 100644
--- a/website/content/partials/cli/audit/args/file/file_path.mdx
+++ b/website/content/partials/cli/audit/args/file/file_path.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`file_path (string : )`**
-
-
Location on the audit log on the Vault server. Must be one of the following:
Value | Description
diff --git a/website/content/partials/cli/audit/args/file/mode.mdx b/website/content/partials/cli/audit/args/file/mode.mdx
index e416b915b5e7..fbd575bb166e 100644
--- a/website/content/partials/cli/audit/args/file/mode.mdx
+++ b/website/content/partials/cli/audit/args/file/mode.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`mode (string : "0600")`**
-
-
The `chmod`-style octal permissions for the audit file. Set `mode` to "0000" to
prevent Vault from modifying the file mode.
diff --git a/website/content/partials/cli/audit/args/socket/address.mdx b/website/content/partials/cli/audit/args/socket/address.mdx
index 38cd2cc80d5c..ef764ddf227e 100644
--- a/website/content/partials/cli/audit/args/socket/address.mdx
+++ b/website/content/partials/cli/audit/args/socket/address.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`address (string : "")`**
-
-
Location of the socket as a server IP and port or a local path.
**Example**: `address="/tmp/audit.sock"`
diff --git a/website/content/partials/cli/audit/args/socket/socket_type.mdx b/website/content/partials/cli/audit/args/socket/socket_type.mdx
index a564c6c0e420..ee2e8a3e6ddf 100644
--- a/website/content/partials/cli/audit/args/socket/socket_type.mdx
+++ b/website/content/partials/cli/audit/args/socket/socket_type.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`socket_type (string : "tcp")`**
-
-
Communication protocol expected by the socket. Vault can write to any
[net.Dialer](https://pkg.go.dev/net#Dialer)-compatible socket. If a TCP socket
stops responding, Vault may become unresponsive due to a
diff --git a/website/content/partials/cli/audit/args/socket/write_timeout.mdx b/website/content/partials/cli/audit/args/socket/write_timeout.mdx
index e5ea6950852b..205fe62bbcc0 100644
--- a/website/content/partials/cli/audit/args/socket/write_timeout.mdx
+++ b/website/content/partials/cli/audit/args/socket/write_timeout.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`write_timeout (string : "2s")`**
-
-
Duration in seconds that Vault will wait for a write to complete over the
socket. Setting `write_timeout` to `0` disables time outs and forces Vault to
always wait until the write completes.
diff --git a/website/content/partials/cli/audit/args/syslog/facility.mdx b/website/content/partials/cli/audit/args/syslog/facility.mdx
index 9de92fb008ff..18e22048441b 100644
--- a/website/content/partials/cli/audit/args/syslog/facility.mdx
+++ b/website/content/partials/cli/audit/args/syslog/facility.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`facility (string : "AUTH")`**
-
-
The process that generated the syslog entry (the syslog facility).
**Example**: `facility="AUTO-AUTH"`
diff --git a/website/content/partials/cli/audit/args/syslog/tag.mdx b/website/content/partials/cli/audit/args/syslog/tag.mdx
index 7f1bd2e666f4..66024e304244 100644
--- a/website/content/partials/cli/audit/args/syslog/tag.mdx
+++ b/website/content/partials/cli/audit/args/syslog/tag.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`tag (string : "vault")`**
-
-
The program that generated the syslog entry.
**Example**: `tag="vault-agent"`
diff --git a/website/content/partials/cli/audit/flags/description.mdx b/website/content/partials/cli/audit/flags/description.mdx
index dad60a6a20a1..d106c4c16ce1 100644
--- a/website/content/partials/cli/audit/flags/description.mdx
+++ b/website/content/partials/cli/audit/flags/description.mdx
@@ -1,11 +1,7 @@
-
-
**`-description (string : "")`**
-
-
A human-friendly string that explains the purpose of the audit device.
**Example**: `-description "KV request auditing"`
diff --git a/website/content/partials/cli/audit/flags/detailed.mdx b/website/content/partials/cli/audit/flags/detailed.mdx
index 27a3db3de683..267458dff44e 100644
--- a/website/content/partials/cli/audit/flags/detailed.mdx
+++ b/website/content/partials/cli/audit/flags/detailed.mdx
@@ -1,11 +1,7 @@
-
-
**`-detailed (bool : false)`**
-
-
Print detailed information such as options and replication status about each
audit device.
diff --git a/website/content/partials/cli/audit/flags/local.mdx b/website/content/partials/cli/audit/flags/local.mdx
index f9667ac3f39e..9c8cbe4a2ba2 100644
--- a/website/content/partials/cli/audit/flags/local.mdx
+++ b/website/content/partials/cli/audit/flags/local.mdx
@@ -1,11 +1,7 @@
-
-
**`-local (bool : false)`**
-
-
Indicates that the audit device is local to the Vault server and ignored by
replication.
diff --git a/website/content/partials/cli/audit/flags/path.mdx b/website/content/partials/cli/audit/flags/path.mdx
index 4af182cd8996..2f355410bb97 100644
--- a/website/content/partials/cli/audit/flags/path.mdx
+++ b/website/content/partials/cli/audit/flags/path.mdx
@@ -1,11 +1,7 @@
-
-
**`-path (string : "/")`**
-
-
The internal path where Vault will access the audit device. Audit device paths
must be unique across all audit devices.
diff --git a/website/content/partials/cli/audit/options/elide_list_responses.mdx b/website/content/partials/cli/audit/options/elide_list_responses.mdx
index 382e36a98c40..0aeec756a4a4 100644
--- a/website/content/partials/cli/audit/options/elide_list_responses.mdx
+++ b/website/content/partials/cli/audit/options/elide_list_responses.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`elide_list_responses (bool : false)`**
-
-
Replace the details for `response.data.keys` and `response.data.key_info` with
the number of entries to reduce the size of audit records. See
[Eliding list response bodies](/vault/docs/audit#eliding-list-response-bodies)
diff --git a/website/content/partials/cli/audit/options/exclude.mdx b/website/content/partials/cli/audit/options/exclude.mdx
index 3ac7861a0070..6e1ee3910f49 100644
--- a/website/content/partials/cli/audit/options/exclude.mdx
+++ b/website/content/partials/cli/audit/options/exclude.mdx
@@ -1,11 +1,6 @@
-
-
-
-
-**`exclude (string : "")`**
-
-
+
+**`exclude (string : "")`**
Remove any fields matching the provided
[exclusion filtering rules](/vault/docs/enterprise/audit/exclusion) from the
@@ -19,9 +14,8 @@ audit entry before writing to the audit device.
definitions to `exclude`:
```
- $ vault audit enable \
- exclude=@rules.json \
- file file_path="/var/logs/vault/audit.log"'
+ $ vault audit enable exclude=@rules.json file file_path="/var/logs/vault/audit.log"'
```
+
diff --git a/website/content/partials/cli/audit/options/fallback.mdx b/website/content/partials/cli/audit/options/fallback.mdx
index d00c6699ffe9..6cec7108bab7 100644
--- a/website/content/partials/cli/audit/options/fallback.mdx
+++ b/website/content/partials/cli/audit/options/fallback.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`fallback (bool : false)`**
-
-
The audit device is the fallback for filtering purposes.
**Vault only supports one fallback audit device at a time**.
diff --git a/website/content/partials/cli/audit/options/filter.mdx b/website/content/partials/cli/audit/options/filter.mdx
index 42c2bc6a2d5d..431c3e648263 100644
--- a/website/content/partials/cli/audit/options/filter.mdx
+++ b/website/content/partials/cli/audit/options/filter.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`filter (string : "")`**
-
-
Only write audit log entries matching the provided
[filtering expression](/vault/docs/enterprise/audit/filtering) to the audit
device.
diff --git a/website/content/partials/cli/audit/options/format.mdx b/website/content/partials/cli/audit/options/format.mdx
index 3dfac1137c3f..1218ec0191f2 100644
--- a/website/content/partials/cli/audit/options/format.mdx
+++ b/website/content/partials/cli/audit/options/format.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`format (enum : json)`**
-
-
Write audit log entries in the provided format.
Enum | Description
diff --git a/website/content/partials/cli/audit/options/hmac_accessor.mdx b/website/content/partials/cli/audit/options/hmac_accessor.mdx
index 79f4968bd599..fe4a88421f57 100644
--- a/website/content/partials/cli/audit/options/hmac_accessor.mdx
+++ b/website/content/partials/cli/audit/options/hmac_accessor.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`hmac_accessor (bool : true)`**
-
-
Hash all token accessor data before writing to the audit device.
**Example**: `hmac_accessor=false`
diff --git a/website/content/partials/cli/audit/options/log_raw.mdx b/website/content/partials/cli/audit/options/log_raw.mdx
index d4a2eb3cd8d4..69870db3228d 100644
--- a/website/content/partials/cli/audit/options/log_raw.mdx
+++ b/website/content/partials/cli/audit/options/log_raw.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`log_raw (bool : false)`**
-
-
Hash all sensitive security information before writing to the audit device.
**Example**: `log_raw=true`
diff --git a/website/content/partials/cli/audit/options/prefix.mdx b/website/content/partials/cli/audit/options/prefix.mdx
index b4b2c2f08edf..bb6d812116ca 100644
--- a/website/content/partials/cli/audit/options/prefix.mdx
+++ b/website/content/partials/cli/audit/options/prefix.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`prefix (string : "")`**
-
-
Prepend the provided string to each log entry when writing to the audit device.
**Example**: `prefix="KV Request :: "`
diff --git a/website/content/partials/cli/shared/flags/log-file.mdx b/website/content/partials/cli/shared/flags/log-file.mdx
index cddc393b5ee5..7e75bfd6e87f 100644
--- a/website/content/partials/cli/shared/flags/log-file.mdx
+++ b/website/content/partials/cli/shared/flags/log-file.mdx
@@ -1,11 +1,7 @@
-
-
**`-log-file (string : "./.log")`**
-
-
Absolute path where Vault Agent saves logging data.
- Paths ending with `/` use the default file name `.log`. For example,
diff --git a/website/content/partials/cli/shared/flags/log-rotate-bytes.mdx b/website/content/partials/cli/shared/flags/log-rotate-bytes.mdx
index bc476e701706..37b5a1d5de2f 100644
--- a/website/content/partials/cli/shared/flags/log-rotate-bytes.mdx
+++ b/website/content/partials/cli/shared/flags/log-rotate-bytes.mdx
@@ -1,11 +1,7 @@
-
-
**`-log-rotate-bytes (int : )`**
-
-
File size, in bytes, after which log files must rotate. Leave `log-rotate-bytes`
unset if you prefer not to limit log file size.
diff --git a/website/content/partials/cli/shared/flags/log-rotate-duration.mdx b/website/content/partials/cli/shared/flags/log-rotate-duration.mdx
index f153382a9b2d..487d3e771878 100644
--- a/website/content/partials/cli/shared/flags/log-rotate-duration.mdx
+++ b/website/content/partials/cli/shared/flags/log-rotate-duration.mdx
@@ -1,11 +1,7 @@
-
-
**`-log-rotate-duration (string : "24h")`**
-
-
Amount of time, in `[s|m|h|d]` format, after which log files must
rotate.
diff --git a/website/content/partials/cli/shared/flags/log-rotate-max-files.mdx b/website/content/partials/cli/shared/flags/log-rotate-max-files.mdx
index ad80b5d08606..1e3b6cc122fe 100644
--- a/website/content/partials/cli/shared/flags/log-rotate-max-files.mdx
+++ b/website/content/partials/cli/shared/flags/log-rotate-max-files.mdx
@@ -1,11 +1,7 @@
-
-
-
+
**`-log-rotate-max-files (int : 0)`**
-
-
The number of log file archives to preserve over time:
- *`n`* - Preserve up to `n` archived logs.
diff --git a/website/content/partials/global-settings/both/address.mdx b/website/content/partials/global-settings/both/address.mdx
index c59ca02ca87e..d8417909b634 100644
--- a/website/content/partials/global-settings/both/address.mdx
+++ b/website/content/partials/global-settings/both/address.mdx
@@ -1,11 +1,7 @@
-
-
**`[-address | VAULT_ADDR] (string : 'https://127.0.0.1:8200')`**
-
-
Address of the Vault server.
**Examples**:
diff --git a/website/content/partials/global-settings/both/agent-address.mdx b/website/content/partials/global-settings/both/agent-address.mdx
index 23b4d108f4dc..7e27fedb28d9 100644
--- a/website/content/partials/global-settings/both/agent-address.mdx
+++ b/website/content/partials/global-settings/both/agent-address.mdx
@@ -1,11 +1,7 @@
-
-
**`[-agent-address | VAULT_AGENT_ADDR] (string : "")`**
-
-
Address of the Vault Agent, if used.
**Examples**:
diff --git a/website/content/partials/global-settings/both/ca-cert.mdx b/website/content/partials/global-settings/both/ca-cert.mdx
index 1448c7fdc513..b98ecfca4347 100644
--- a/website/content/partials/global-settings/both/ca-cert.mdx
+++ b/website/content/partials/global-settings/both/ca-cert.mdx
@@ -1,11 +1,7 @@
-
-
**`[-ca-cert | VAULT_CACERT] (string : "")`**
-
-
Path to a PEM-encoded CA certificate file on the local disk. Used to verify SSL
certificates for the server. **Takes precedence over `-ca_path`**.
diff --git a/website/content/partials/global-settings/both/ca-path.mdx b/website/content/partials/global-settings/both/ca-path.mdx
index 43d41cb77d12..cb8d1dfada99 100644
--- a/website/content/partials/global-settings/both/ca-path.mdx
+++ b/website/content/partials/global-settings/both/ca-path.mdx
@@ -1,11 +1,7 @@
-
-
**`[-ca-path | VAULT_CAPATH] (string : "")`**
-
-
Path to a directory with PEM-encoded CA certificate files on the local disk.
Used to verify SSL certificates for the server.
diff --git a/website/content/partials/global-settings/both/client-cert.mdx b/website/content/partials/global-settings/both/client-cert.mdx
index fa7c0b3ec5c1..dbde772483bc 100644
--- a/website/content/partials/global-settings/both/client-cert.mdx
+++ b/website/content/partials/global-settings/both/client-cert.mdx
@@ -1,11 +1,7 @@
-
-
**`[-client-cert | VAULT_CLIENT_CERT] (string : "")`**
-
-
Path to a PEM-encoded CA certificate file on the local disk. Used for TLS
communication with the server. **The specified certificate must match to the
private key specified with `-client-cert`**.
diff --git a/website/content/partials/global-settings/both/client-key.mdx b/website/content/partials/global-settings/both/client-key.mdx
index 16bde7074b2c..12b574fb641c 100644
--- a/website/content/partials/global-settings/both/client-key.mdx
+++ b/website/content/partials/global-settings/both/client-key.mdx
@@ -1,11 +1,7 @@
-
-
**`[-client-key | VAULT_CLIENT_KEY] (string : "")`**
-
-
Path to a PEM-encoded private key that matches the client certificate set with
`-client-cert`.
diff --git a/website/content/partials/global-settings/both/disable-redirects.mdx b/website/content/partials/global-settings/both/disable-redirects.mdx
index 9eb2676be020..815de5818cae 100644
--- a/website/content/partials/global-settings/both/disable-redirects.mdx
+++ b/website/content/partials/global-settings/both/disable-redirects.mdx
@@ -1,11 +1,7 @@
-
-
**`[-disable-redirects | VAULT_DISABLE_REDIRECTS] (bool : false)`**
-
-
Disable the default CLI redirect behavior so the CLI honors the first redirect
response from the underlying API instead of following the full HTTP redirect
chain.
diff --git a/website/content/partials/global-settings/both/format.mdx b/website/content/partials/global-settings/both/format.mdx
index ad3e5a245b87..c0d437e482ec 100644
--- a/website/content/partials/global-settings/both/format.mdx
+++ b/website/content/partials/global-settings/both/format.mdx
@@ -1,10 +1,6 @@
-
-
-**`[-format | VAULT_FORMAT] (enum: table)`**
-
-
+**`[-format | VAULT_FORMAT] (enum: json)`**
Set the CLI output format.
@@ -17,6 +13,6 @@ Value | Description
**Examples**:
-- CLI flag: `-format json`
-- Environment variable: `export VAULT_FORMAT=json`
+- CLI flag: `-format table`
+- Environment variable: `export VAULT_FORMAT=table`
diff --git a/website/content/partials/global-settings/both/log-format.mdx b/website/content/partials/global-settings/both/log-format.mdx
index de3564b7ca0d..ab7c74f03ecd 100644
--- a/website/content/partials/global-settings/both/log-format.mdx
+++ b/website/content/partials/global-settings/both/log-format.mdx
@@ -1,11 +1,7 @@
-
-
**`[-log-format | VAULT_LOG_FORMAT] (enum : standard)`**
-
-
Format of log data:
- **`standard`** - Write log data as basic text.
diff --git a/website/content/partials/global-settings/both/log-level.mdx b/website/content/partials/global-settings/both/log-level.mdx
index 206e149d8247..c21109153977 100644
--- a/website/content/partials/global-settings/both/log-level.mdx
+++ b/website/content/partials/global-settings/both/log-level.mdx
@@ -1,11 +1,7 @@
-
-
**`[-log-level | VAULT_LOG_LEVEL] (enum : info)`**
-
-
Default logging level for the Vault server.
Enum | Logging behavior
diff --git a/website/content/partials/global-settings/both/mfa.mdx b/website/content/partials/global-settings/both/mfa.mdx
index 1bd495e1d644..a3eb933d8f14 100644
--- a/website/content/partials/global-settings/both/mfa.mdx
+++ b/website/content/partials/global-settings/both/mfa.mdx
@@ -1,11 +1,7 @@
-
-
**`[-mfa | VAULT_MFA] (string : "")`**
-
-
A multi-factor authentication (MFA) credential, in the format
`mfa_method_name[:key[=value]]`, that the CLI should use to authenticate to
Vault. The CLI adds MFA credentials to the `X-Vault-MFA` header when calling the
diff --git a/website/content/partials/global-settings/both/namespace.mdx b/website/content/partials/global-settings/both/namespace.mdx
index dcf81d6eac0e..05c87c15480e 100644
--- a/website/content/partials/global-settings/both/namespace.mdx
+++ b/website/content/partials/global-settings/both/namespace.mdx
@@ -1,11 +1,7 @@
-
-
**`[-namespace | -ns | VAULT_NAMESPACE] (string : )`**
-
-
Root namespace for the CLI command. Setting a default namespace allow relative
mount paths.
diff --git a/website/content/partials/global-settings/both/tls-server-name.mdx b/website/content/partials/global-settings/both/tls-server-name.mdx
index 68f6c49bea00..5adc392b3701 100644
--- a/website/content/partials/global-settings/both/tls-server-name.mdx
+++ b/website/content/partials/global-settings/both/tls-server-name.mdx
@@ -1,11 +1,7 @@
-
-
**`[-tls-server-name | VAULT_TLS_SERVER_NAME] (string : "")`**
-
-
Name of the SNI host for TLS handshake resolution for TLS connections to Vault.
**Examples**:
diff --git a/website/content/partials/global-settings/both/tls-skip-verify.mdx b/website/content/partials/global-settings/both/tls-skip-verify.mdx
index 884849a5745e..c2a1f510f35f 100644
--- a/website/content/partials/global-settings/both/tls-skip-verify.mdx
+++ b/website/content/partials/global-settings/both/tls-skip-verify.mdx
@@ -1,11 +1,7 @@
-
-
**`[-tls-skip-verify | VAULT_SKIP_VERIFY] (bool : false)`**
-
-
Disable verification for all TLS certificates. **Use with caution**. Disabling
TLS certificate verification decreases the security of data transmissions to and
from the Vault server.
diff --git a/website/content/partials/global-settings/both/wrap-ttl.mdx b/website/content/partials/global-settings/both/wrap-ttl.mdx
index 878fb8a288f7..d90b32455b17 100644
--- a/website/content/partials/global-settings/both/wrap-ttl.mdx
+++ b/website/content/partials/global-settings/both/wrap-ttl.mdx
@@ -1,11 +1,7 @@
-
-
**`[-wrap-ttl | VAULT_WRAP_TTL] (string : "")`**
-
-
Default time-to-live in `[s|m|h|d]` format for the Cubbyhole token used
to wrap CLI responses. You must use `vault unwrap` to view response data before
the duration expires. Leave `wrap_ttl` unset to leave CLI responses unwrapped.
diff --git a/website/content/partials/global-settings/env/cli_no_color.mdx b/website/content/partials/global-settings/env/cli_no_color.mdx
index ea1535049049..c2b934925ac9 100644
--- a/website/content/partials/global-settings/env/cli_no_color.mdx
+++ b/website/content/partials/global-settings/env/cli_no_color.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_CLI_NO_COLOR (bool : true)`**
-
-
Exclude ANSI color escape sequence characters from the CLI output.
**Example**: `export VAULT_CLI_NO_COLOR=0`
diff --git a/website/content/partials/global-settings/env/client_timeout.mdx b/website/content/partials/global-settings/env/client_timeout.mdx
index 29533482aba0..de3443260594 100644
--- a/website/content/partials/global-settings/env/client_timeout.mdx
+++ b/website/content/partials/global-settings/env/client_timeout.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_CLIENT_TIMEOUT (string : "60s")`**
-
-
Amount of time, in `[s|m|h|d]` format, the CLI should wait on a response
from Vault.
diff --git a/website/content/partials/global-settings/env/cluster_addr.mdx b/website/content/partials/global-settings/env/cluster_addr.mdx
index fea888813f1b..239cdd77c979 100644
--- a/website/content/partials/global-settings/env/cluster_addr.mdx
+++ b/website/content/partials/global-settings/env/cluster_addr.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_CLUSTER_ADDR (string : "")`**
-
-
Address of the local Vault node. Vault uses cluster addresses for
cluster-to-cluster communication when running in high-availability mode.
diff --git a/website/content/partials/global-settings/env/http_proxy.mdx b/website/content/partials/global-settings/env/http_proxy.mdx
index 2d7f0d0ebb86..8617d0c0923f 100644
--- a/website/content/partials/global-settings/env/http_proxy.mdx
+++ b/website/content/partials/global-settings/env/http_proxy.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_HTTP_PROXY (string : "")`**
-
-
Legacy alias for `VAULT_PROXY_ADDR`.
diff --git a/website/content/partials/global-settings/env/license.mdx b/website/content/partials/global-settings/env/license.mdx
index 704e0a3ca971..8e8477d178d9 100644
--- a/website/content/partials/global-settings/env/license.mdx
+++ b/website/content/partials/global-settings/env/license.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_LICENSE (string : "")`**
-
-
Vault Enterprise license string for the local server or node. `VAULT_LICENSE`
takes precedence over `VAULT_LICENSE_PATH` **and** the `license_path` parameter
in the Vault configuration file.
diff --git a/website/content/partials/global-settings/env/license_path.mdx b/website/content/partials/global-settings/env/license_path.mdx
index cd84d53f7d77..ae4d61faaa8e 100644
--- a/website/content/partials/global-settings/env/license_path.mdx
+++ b/website/content/partials/global-settings/env/license_path.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_LICENSE_PATH (string : "")`**
-
-
Local path to a file containing a valid Vault Enterprise license for the server
or node. `VAULT_LICENSE_PATH` takes precedence over the `license_path` parameter
in the Vault configuration file.
diff --git a/website/content/partials/global-settings/env/max_retries.mdx b/website/content/partials/global-settings/env/max_retries.mdx
index 5a48277d22a0..111126a1537b 100644
--- a/website/content/partials/global-settings/env/max_retries.mdx
+++ b/website/content/partials/global-settings/env/max_retries.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_MAX_RETRIES (integer : 2)`**
-
-
The number of times the CLI should retry a request to the Vault server when the
CLI receives one of the following recoverable error codes:
diff --git a/website/content/partials/global-settings/env/proxy_addr.mdx b/website/content/partials/global-settings/env/proxy_addr.mdx
index 060ffa321d94..eb862b0b1d54 100644
--- a/website/content/partials/global-settings/env/proxy_addr.mdx
+++ b/website/content/partials/global-settings/env/proxy_addr.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_PROXY_ADDR (string : "")`**
-
-
The HTTPS or HTTP address, including server and port, where clients can access
Vault. Setting `VAULT_HTTP_PROXY` overrides the default proxy resolution
behavior and tells Vault to ignore the following proxy-related environment
diff --git a/website/content/partials/global-settings/env/rate_limit.mdx b/website/content/partials/global-settings/env/rate_limit.mdx
index 25b8d60ffc07..1fde5909356d 100644
--- a/website/content/partials/global-settings/env/rate_limit.mdx
+++ b/website/content/partials/global-settings/env/rate_limit.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_RATE_LIMIT (string : unset)`**
-
-
The number of operations per second, in `rate[:burst]` format, used to throttle
requests between the CLI and the server. The `burst` value is optional and
defaults to `rate` when not specified.
diff --git a/website/content/partials/global-settings/env/redirect_addr.mdx b/website/content/partials/global-settings/env/redirect_addr.mdx
index 02b14449f6fb..7c400efb6d8d 100644
--- a/website/content/partials/global-settings/env/redirect_addr.mdx
+++ b/website/content/partials/global-settings/env/redirect_addr.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_REDIRECT_ADDR (string : "")`**
-
-
Local node address that listens for redirected client communication when Vault
runs in high-availability mode.
diff --git a/website/content/partials/global-settings/env/skip_verify.mdx b/website/content/partials/global-settings/env/skip_verify.mdx
index 61075467deab..fb79ae2ebcad 100644
--- a/website/content/partials/global-settings/env/skip_verify.mdx
+++ b/website/content/partials/global-settings/env/skip_verify.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_SKIP_VERIFY (bool : false)`**
-
-
Allow communication between the CLI and Vault server before verifying the
authentication certificate presented by Vault.
diff --git a/website/content/partials/global-settings/env/srv_lookup.mdx b/website/content/partials/global-settings/env/srv_lookup.mdx
index c16ee16b0bb2..195ce532d524 100644
--- a/website/content/partials/global-settings/env/srv_lookup.mdx
+++ b/website/content/partials/global-settings/env/srv_lookup.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_SRV_LOOKUP (bool : false)`**
-
-
Use SRV records instead of standard DNS to look up hostnames as described in
the Network Working Group draft
["Use of SRV records in conjunction with HTTP and URIs"](https://datatracker.ietf.org/doc/html/draft-andrews-http-srv-02).
diff --git a/website/content/partials/global-settings/env/token.mdx b/website/content/partials/global-settings/env/token.mdx
index 56eb99eda3a4..11e116189918 100644
--- a/website/content/partials/global-settings/env/token.mdx
+++ b/website/content/partials/global-settings/env/token.mdx
@@ -1,11 +1,7 @@
-
-
**`VAULT_TOKEN (string : "")`**
-
-
A Vault-issued service token that authenticates the CLI user to Vault.
See the [tokens concepts page](/vault/docs/concepts/tokens) for more information
on token types.
diff --git a/website/content/partials/global-settings/flags/header.mdx b/website/content/partials/global-settings/flags/header.mdx
index c7c04454d036..3699d394476e 100644
--- a/website/content/partials/global-settings/flags/header.mdx
+++ b/website/content/partials/global-settings/flags/header.mdx
@@ -1,11 +1,7 @@
-
-
**`-header (string : "")`**
-
-
Optional HTTP header in the form `"="` for the CLI request. Repeat
the `-header` flag as needed with one string per flag. User-defined headers
cannot start with `X-Vault-`
diff --git a/website/content/partials/global-settings/flags/non-interactive.mdx b/website/content/partials/global-settings/flags/non-interactive.mdx
index fe6d1e0c3ac5..43725e8ad758 100644
--- a/website/content/partials/global-settings/flags/non-interactive.mdx
+++ b/website/content/partials/global-settings/flags/non-interactive.mdx
@@ -1,11 +1,7 @@
-
-
**`-non-interactive (bool : false)`**
-
-
Prevent the CLI from asking users for input through the terminal.
**Example**: `-non-interactive`
diff --git a/website/content/partials/global-settings/flags/output-curl-string.mdx b/website/content/partials/global-settings/flags/output-curl-string.mdx
index e7e15184b85b..a10a86f01b04 100644
--- a/website/content/partials/global-settings/flags/output-curl-string.mdx
+++ b/website/content/partials/global-settings/flags/output-curl-string.mdx
@@ -1,11 +1,7 @@
-
-
**`-output-curl-string (bool : false)`**
-
-
Print the API call(s) required to execute the CLI command as `cURL` strings
then exit without running the command.
diff --git a/website/content/partials/global-settings/flags/output-policy.mdx b/website/content/partials/global-settings/flags/output-policy.mdx
index 749240600e9b..457a17869022 100644
--- a/website/content/partials/global-settings/flags/output-policy.mdx
+++ b/website/content/partials/global-settings/flags/output-policy.mdx
@@ -1,11 +1,7 @@
-
-
**`-output-policy (bool : false)`**
-
-
Print the Vault policy required to execute the CLI command as HCL then exit
without running the command.
diff --git a/website/content/partials/global-settings/flags/policy-override.mdx b/website/content/partials/global-settings/flags/policy-override.mdx
index 340c12d93c7c..af33f609c99c 100644
--- a/website/content/partials/global-settings/flags/policy-override.mdx
+++ b/website/content/partials/global-settings/flags/policy-override.mdx
@@ -1,11 +1,7 @@
-
-
**`-policy-override (bool : false)`**
-
-
Overrides any Sentinel policy where `enforcement_level` is "soft-mandatory".
**Example**: `-policy-override`
diff --git a/website/content/partials/global-settings/flags/unlock-key.mdx b/website/content/partials/global-settings/flags/unlock-key.mdx
index 097b18a6f243..38f8c01e3dcb 100644
--- a/website/content/partials/global-settings/flags/unlock-key.mdx
+++ b/website/content/partials/global-settings/flags/unlock-key.mdx
@@ -1,11 +1,7 @@
-
-
**`-unlock-key (string : )`**
-
-
Plaintext key that unlocks the underlying API endpoint for a given namespace.
**Example**: `-unlock-key "7oXtdlmvRQ"`
diff --git a/website/content/partials/gui-page-instructions/select-kv-data.mdx b/website/content/partials/gui-page-instructions/select-kv-data.mdx
deleted file mode 100644
index 564b39ac9a0c..000000000000
--- a/website/content/partials/gui-page-instructions/select-kv-data.mdx
+++ /dev/null
@@ -1,14 +0,0 @@
-- Open the data page for your `kv` plugin:
-
- 1. Open the GUI for your Vault instance.
-
- 1. Login under the namespace for the plugin or select the namespace from the
- selector at the bottom of the left-hand menu and re-authenticate.
-
- 1. Select **Secrets Engines** from the left-hand menu.
-
- 1. Select the mount path for your `kv` plugin.
-
- 1. Click through the path segments to select the relevant secret path.
-
- 1. Select the **Secret** tab
\ No newline at end of file
diff --git a/website/content/partials/known-issues/aws-auth-external-id.mdx b/website/content/partials/known-issues/aws-auth-external-id.mdx
deleted file mode 100644
index e4774161255d..000000000000
--- a/website/content/partials/known-issues/aws-auth-external-id.mdx
+++ /dev/null
@@ -1,19 +0,0 @@
-### AWS Auth Role configuration requires an external_id
-
-#### Affected Versions
-
-- 1.17.0 - 1.17.3
-
-#### Issue
-
-You must set the `external_id` parameter during role configuration, or the Vault
-AWS authentication plugin returns a validation error.
-
-#### Workaround
-
-To avoid the error during configuration:
-
-1. Set the `external_id` parameter when configuring AWS authentication plugin
- with a valid ID or any string longer than two characters.
-1. Configure any desired roles.
-1. If you used an arbitrary string, remove the external ID.
diff --git a/website/content/partials/known-issues/sync-activation-flags-cache-not-updated.mdx b/website/content/partials/known-issues/sync-activation-flags-cache-not-updated.mdx
deleted file mode 100644
index 942146ba08f7..000000000000
--- a/website/content/partials/known-issues/sync-activation-flags-cache-not-updated.mdx
+++ /dev/null
@@ -1,23 +0,0 @@
-### Cached activation flags for secrets sync on follower nodes are not updated
-
-#### Affected versions
-
-- 1.16.0 - 1.16.2
-- 1.17.0 - 1.17.5
-
-#### Issue
-
-Vault 1.16 introduced secrets sync with a one-time flag required to activate the
-feature before use. Writing the activation flag to enable secrets sync is forwarded
-to leader nodes for storage and distributed to follower nodes, but the in-memory
-cache for this flag is not updated on the followers.
-
-This prevents any secrets sync endpoints (those starting with `sys/sync/`) from
-being usable on follower nodes in a cluster.
-
-#### Workaround
-
-The cache is force-updated on all nodes when the leader node steps down and the
-cluster promotes a new leader. First, activate the secrets sync feature as described
-in the [documentation](/vault/docs/sync#activating-the-feature). Then, have the leader node
-step down.
diff --git a/website/content/partials/kubernetes-supported-versions.mdx b/website/content/partials/kubernetes-supported-versions.mdx
index d4f8a83c7160..4913216a104d 100644
--- a/website/content/partials/kubernetes-supported-versions.mdx
+++ b/website/content/partials/kubernetes-supported-versions.mdx
@@ -4,10 +4,10 @@ The following [Kubernetes minor releases][k8s-releases] are currently supported.
The latest version is tested against each Kubernetes version. It may work with
other versions of Kubernetes, but those are not supported.
-* 1.31
* 1.30
* 1.29
* 1.28
* 1.27
+* 1.26
[k8s-releases]: https://kubernetes.io/releases/
diff --git a/website/data/docs-nav-data.json b/website/data/docs-nav-data.json
index 546597d77229..876108b7fd23 100644
--- a/website/data/docs-nav-data.json
+++ b/website/data/docs-nav-data.json
@@ -2522,10 +2522,6 @@
"title": "Upgrade to Raft WAL",
"path": "upgrading/raft-wal"
},
- {
- "title": "Upgrade to 1.19.x",
- "path": "upgrading/upgrade-to-1.19.x"
- },
{
"title": "Upgrade to 1.18.x",
"path": "upgrading/upgrade-to-1.18.x"
diff --git a/website/public/img/gui/kv/patch-data.png b/website/public/img/gui/kv/patch-data.png
index dffc8bbaef89717bab5e33d7f4239f309fa2e734..38cbe953598f2f44bf56100e5f704a2296579769 100644
GIT binary patch
literal 35270
zcmc$`2UL??(=Lo6AR?k5`hXM*9RZczL6qKm2SIvo2^|3e=_*wqfYNK|frKJm={>Xr
zsi8+o2rX~|KF@c~zrORo=X=-xpL1A?#ZGeHd(WOd_sn(8Oo&uhl_S4FdxL<0fLuZT
zl_mkfHD&^WtM=E4@$b-09N)qJx#FQI_mZG|fNl-{;+l=LiZlU1WjxvG8zTH`QdfBc
z4+4T)?H9jSx?Kw25)echDZG-__BP!@kow=n;Ldk|*RwTu2wssEgjf`LYu7yfaP5`Y
z?IPW$`bJkO*VWDo@5V>TQEy0#t8R9^t2o_y+8_soNb|5MU!R%cu-tfS5_rtURZ%V5ezwSuqoCiSyO2pc;8R`V
zkMkuv6GTCY>N$c
zCn9FNatv`K@x!zI0$=lef@+|Et)jCOMDFv1d$wL@@c`)#Yh;d}^~C3jSE$j%bSE2Q
zC4S(%t|+Jj=3;Lj3=*jF@G?Yk7!JG4>Otl_GN)l>rt2m1Jj81UWHYeni)FH*ADGxa
zNuIEn9H1H$3mGO{a`v2kH52j&hfk085b%sO7uylZ?THHBX3}*>W4?f)2-m&La
zkV{1troCWHoD?&Fn~3s1f4DO$O?%$8N9(5Rx6Bta_oaNUqS8Gut%S_kD;e~bs`6yF
zU_H|hH4)-WrvP>B^Ecvct{x>#wjbE+pj<;#M}sb{P&+(^!n5Jmb|&;RPih%F)_T7^
zisIMgS&xG6?kRoRDJq@MdR?=i3ZaK>5!58r>7h5O{WV^h_Iu%GKx12+$#6yGw5O0t
zOM`pGF+(yFA(3d!agN@8*BFuE@3U^hCZCdB#0m~GUa|G52RCj-I|0t2C5B#K68HIf
zEUrCG@K=-9`RNQ$GKl#`ik1xjERWfd*^B|d?i`8f@DHj`1Xa(Aq2*gnCWld75<4N*
z{`H1OEbUX|Jk-!5-F}D8;pH1XAnR*TtNmtRvoi%rg9Lp^V2j@@`xVX^VL6WskGq>^
z8Y>=rdV=&pwfWP+?E&aG5B`JvVc%n%*w*3NGnHdG|KI(mgRC1x-KN81@yQ%$hnf8L
zn}@#otjh=VrM<_sknRkNxt}3G(Oo`l1F7#Zroj+8-4YHe6?DouV9hLOZILtr(Xz7t
zoDy6+oF_&9GKF-PrGKr*8l!4fqJ2*iXuW2B69ivo9rGqzJ1^lY9HciCyEF9#~ni
z@d3zV_}7bTOqxGOm)_v`*D-mf%ta6R0^8SrLXR^>y^atGwco+Yv`0p@$G8IibwTm*
za~`s&TDl*8)*p_|1PtTV(!O>+SWFVudNgQODV2a`_?Ag1>(II_Ak1cS*25y#(s0`7
z&1`1Ge{|r-Cumm^ta-O|VrkxH!?VDVrNxDi(%HAUPGos^f9pO=veKgJx_c@Jz%|tU
zwZ7kmktplQy#8UZ#I0+THO@sL>7F8$T5DHGbEM0U59&rIVCZmU?m4a`xu$gU&0qo*tC2jT4zTO@@ibcfpw2$i^*Dv;_@RpCQ(hE~O1X?-}$*l#=l9xytW-Q$DYR*5iO1yXN
z>Z2_l-&ZbuHH3B&l9~;{(5;XN7N*h?@r)kNmN(E7#sofuvn4ULzvbq`T8i1%bH^iE
zm2NlOx&23s(8!@JAIKW4g`#Dn%XtOdw_eh#^MnefA{JC88}!y@Pn48%kq|P;2qaK9Fdk!0!zZTFAUCsiLqaYZcI47=;o>ozW%?0k3b6^_W_%MB)UDC=M4wBAbMjNcPc>rBK@+~+`QEXjqOzxgwmHo$
zXs~x2VbWL}>w1Sd5n#T%a{DhM$zv&`_%U5AMNDtSSkZUC;OOxiT}-r@V>(v$W(kPb
z1|9whS5p9=*mO+M=SeI_P-k%>7yE@0A3ohvn=8D@MY0yG5is5bW)pDQL?+Eu
zGMVmpHHj*U$4i=albGTdNd*iB;mJHNoe*RI+l>MbOxv8C-Iz*B$@vs+ta@Vy>igMb
zyGP5^K=eeEgDGfv=y$Gzmy_$mc(3>kXbbBSQv>?m#-dcp4c^Ix!pz7c@#b^=2j;(*
zTGL1+flwW+v)4x{4$90Bv(bUxMh
zC!aF`Y`6Q&vzaS5WW%y@wjqAf2Q8Yx8@
zaz#En+U7x+HmM9*qL%6)!&mF2OfJ>1`qEv2?i{l1hrz7C@wFD1VNSQMGijjyGt
zzj-&J26n{R65i7Uc-uyhrQkiY?md$GE#Z49_Ukh6wYE`^gX`g_A)TMQ;#yr$`XmQC>bn$U!n=&*NE4w2
z6sI`mLE=}XOCb9m?{_p>bkf!L_QunyU9k`R1RYaL(;(>*v>+jQ^HFYE&y6R(XR(5O
z0R#RjPrjoFx9-;YHX-l5utTiJMOrgz1T4;AT^8>YwtBdQcdgQ#=fVDUBy6t(mhG0>ud
z+9c*fwgY*5BrL%N+#g6DzeLU51T^foALa;&0{DV67d>Yckd2k*J9rahLQ;RlMS`v_
zzDZOm&{9>h%`=C_KJ^BcFF4&$hR15Yq)hUOOIxw;kj!@&c%Zhv&3RIsKo5Ilz42BY
zr<*3-IF#>W#x&=Qi5l|qcU5I`y>$FZtnHSHo?}G!!wmmQ?bbYVtI|bJXfc28UGsix
zp>-70QdHELx%*Ms`0+#fmaLzOsC)c)w^I^`kJ0*gs)ecpdhLwbfUIEcw
z%WoBNT;Q))f+tDG#NADYb)ahScRr{hA&gGqR)x=_0%)x?AhjX)GS#a88M|EIvEv&~!;h
z>pJZIDA}*4*~n({Lnr8e@jBRPQvwXhnT0L82=Z-~Ie^K?byGFvKmcD0(J&gwt>Hqw
z6nlrw#x3=;iSe|PC6)Ccta181pfO6yN;s(U-pQ2+DM-wFsqbAxh80Lv!Od^wUmXgR
zt3=tcW8V#aW|cGqCqEKy^1B;UEGIjlk(tR5*wEEjBUJ)f=`R07f8=ABbU*4{E{#-i
zkmlU`=Q4@sAe?UUEEpqo|8>#fIlErO%dTlw*6*#DY
zB!1b)`UH6W5_ccV2Z3smCEUM4RIq2X`~ePYN$nP8H*)dNtDt2q
z;Xn9D;x`XZK1HI+&1+fa0>7%pgCOPWKE6RFxFPMbGh{iHdW+WmPML>3Ju|B+%bt#m
zJQ2SZWk69rrKW)+kuY(jOg&kIMHz|vn5f`Sp+{dt-c
zm*DJI09FuCF^%grt>2YidrIA6U(n|%2GCzG<3fZ%Lx
zOX}_l*aFWrBx7&hd}eUfW4kL(JH;HBTxC0}U~;Q^^f~!B^WodL--|M3vjg2zMTpzm
zZSxKjM&g<^ZH1I5HauOx&XzS>u>lj%1ietIQ^s+-*nEflWJI3ADl$3DbNk
zrQZCGX*qES61$E`1t_}^GB|*?ji|+zs#gz(6Dh`eEn>PR_wM?Bw9q8?LzW3L(C5$A
zkt^L)P2>!sF0(U>RmMg>b~;`^)63uZna-_VQ1^9j0#6d-es~Q|F%>*EGmaAh{RN%L
z3izg~YvoHbb$-&i;5bk>a7AGo($wcrf2bPXsBZZ8l2;$Kvd_olW9QKt~
zaHDYc(G$$l0~g!3!Xd=5eskN)Gq@Ncu*}|(dpU{|4c{A`8`|nj?)N(I}`=lLy
zbn$3Mmgil&92SoM>omvZMXy4#|6Kjy+JDZM7`(nEYO;1OsLn85z@-+S>B0YG=L;9w
zS$c^z#)3Ev@Q}`^xd$Fd{$Nu0@>6xhm%^92$)d{xL7?-dw9UC85H~qO@bWXM2CIfU
zr08Xpg80AJWD^7vhGRJ#Y;OLYqRF1MW&bA+^C#*N+$J^rCs{M3`6spWDkS-zj84rz
z`J3#&H~rxn*FQ;~e>K&ns#4@ro6st!$_I7+NnjkUK!r!%$$U5cbcI`VU0{>RT)?^I
z7m9&r8fnCSTVK0QwsKbb(`*OD*Ju7rZ;m}GdlHo)QolcbfF65$9xhP!R!4xX(rr(-
z%n%!9xI&kmAB!C|#oyc(^3rMWJZo3LWweW@Zkm78=IY(8nL73A{8*=p+#3PGBt1*<
zFU<7sHZn6*kXchGWqR{$e!o$(?nxaNCV|**daORA?WNPyajb+{1JX%FLpK8ApkRKf
zjdwRR&XOs0oRvwb-s-1dt$NKi4OERs9gpc)hKU@2!1$L$MlaSw8Z_^32f;RWj8T3uv%h
z&Pe
zc>{Iho8)csYVR8r8>##KB;eb@dioH@0-K4%KAUKhK>}m?BMekAzGFnJn;eIqxKcYl
z-sr8CSghaj`UfuFeK>pi-zPF2?^F05Zuq!YwE)~h#9W-4LB~Ilazh|SzQXO3#3bb{
z!wz~lt6r?+m;9fkGmLq?K{KXC1vuWsTbGhyG;$0T$H01mTe>$_*y-kQ>f!e&uB_E{
zx$mV});6f|{_0I9aWJ(eNgxLcDr{VMFlh%AD8$$)h6P6ZZHRQ8T>3AET
zYnCyhrN_v&FVWy)KA6pEzA*MY
z&WXI6%Bk-!bKJ;TK3g?o@vQeZfAt?#HUm~r`&ruN-am$yvP+tTZ{$iWcD${#x5gg9A}FM>N;g?<^>+QMIauFvsid|!;77MYF2kyqE`vwvyhz*+a_z*t-f;Gh
zS$vaPW$tO~@3YSNV&A=ga>^GQb}qkJJTHHf?@&67Bnm9J9q)UZa6o(4SKIdanR(i1
z^7J2vTHK7VWB;`5s9whfXio<3)|#~w1Ilk09#WW`sZrgv{rhp*`&{dm<_vWRr+oQH
zE(0tVMMNv$u)9UE+Rp`;kgv)l)mFZE0(lOu55M+(JLz$pt5ELTT%^}PJaDecL^=4(
znBr$y2Ai9M>ZX^H6h1C~r4*?bp8p^Jf-#J#L
znCSE&g*5fWEmF{Lq>7&Prqz1fqw7h$o|Ys$Ye{e$Un*vU>X+xW2{21U6g9Ggr(4)Z
zKR?K9q%9qMyBDJ%yg=W8P1`M8ZG1bho0mkrbQ!kCwC_L?ZNH8lPZxHKP#fNNAid*F
z4r#qHig|+NF6eP-ESCPc-O~+KxK>B@9KmE!#&pI%W+FJDn4^#VY#x16(#6JMHV5hZ
zb1taR)XgS&HEx#aNd8l|1wydRXN?!hp2ehS$W!w)!|Yrfmtu4)xSVCtaf77AKg7gz
zVa>Gnqx8AE-taEW9pMBlVJ5rSU$*WCA+}0W7uC8eAqP(5FAS7E1d*#$D}67ay-Q{&
zl(99kLAl_@IV)pU>mfmPdR?y~Wi<}gwvor#FP8RQsg3{8)}zF<=X>XmpEK5bB9Z2-=ActscuZe>3>Hv<-7ZsTUQCgz*0&Cs>q
z^qa?uy0DFEB{s%jfOC(5Z==2*i47w!CG7nv@b!*)_!`SR8jfy$o^K;2G_rk6bAZF)
zgp&5u)jP}z^!3OD&lf%GL@oC`z#hiW*Os5JZn~KolxDto#pPu9B66cMo;6uw`za8)
z#lnkg_HKY3?U-8nLQ6UhXX4g`wGw6tWc>rcH&a#{ibjUmd-o4{`rTU`x3$UzFSR74
zsAHb3L}l*%TE<8;p|zZRpu{m$qfSS(nxa-6cHsPy=5SHUbLglECnZyiy9Q@3;oF<^
z{oq`a2ovg4>)wf2H5_ybK!hdPz4Fm(C=E`mT5Lba!?K{w9KQYS$#E
zG*Y@a{qivKO!V@9?=6-67$iObaAOZ^$rR4CSiV1MVtIAZ(EIZ>@vF+wF`o^~nKDE#
zEQes^qs~&r=;e@rMvLel|2+NfZ_oBWL*@S!&-CeH+NA9WM&LS(F;M%>n=a%Z$oy6w
zBw}D+fnu2Ui1!u9drRZQE-m#3Wu*0v9jPEBP4IcN+aE7Q%C_cILS#Mv2T=cir4#=`
z@WN#2NHtS`sT@<|x2W5ZJT|NIFovOOrA@I*L9Fmj&EnqL6pY2TD+O@x@;&VvA(W4d
zkOqazTX8zuR|Ky7r2kmLS>B@9vTcsKvC_Ps-&C5F4
zxI2?IPal7JD%|EX2i`k^E-YCiV`yM+8mrZSbJ6>?hF$TUCe|ZQ3mikNJ{uM)hn{$*
zJ{O(c-Kv;nhj|I`?@U@A-N?V((l6VpAQ@4es!bz$0CXe(+kQ(li5tAz*jbc&J}=lb
zR<=R9{(E`DcYTpDPnM*_C?a&W*5XQd3GphQ4aj+`1>R?4Hryl2y7~csqoB5jLh5xjq>oa%Ay5x1U^*YLyWp+j@JoAuXARW^Y*loU$PT@A7Re|-
z2`}oSm;m?r0m^c)-Gf@02kIqRjEm(tJd)$3wSGlf1)<~L5ji_y5?`tf?a`?2pS>Y;
zCeCy1O95v~Yo^HlU3{|WEW)AIaZeihL%jKxy}dv*5fS$&+BY5RQWi3$wZFgKWU>6Y
zCmFc19@IoKv!$w@qvB#WvWXsVUGdoa>~rjRWZ%yrB;dNqxNeppsV(j2=jhzrtgCgd
zf2pbp+RfSs5Qjn1cJq83y5d~pxOUF|`|#3I>pFa3p#I+499qF-fV4(d=%F40oJ6i}F0O
zdohtJ3e16YnNLK7W&&ze(l;A>*|AE5NBEi7)~n6|IG}<>^QEz}fjQ_CC-;{DTR?mr
zGik>H&kMPx%NrIMhYDc}V1=D#QjvwFoEo$2s>fL(9C5Dt|4pq{726)5XS5HIQz0vHr
zL7tAC