diff --git a/.github/CONTRIBUTING.md b/.github/CONTRIBUTING.md index a7bd6adf..d4d275b5 100644 --- a/.github/CONTRIBUTING.md +++ b/.github/CONTRIBUTING.md @@ -7,7 +7,8 @@ operating environment, schemachange version and python version. Whenever possibl also include a brief, self-contained code example that demonstrates the problem. We have -included [issue templates](https://github.com/Snowflake-Labs/schemachange/issues/new/choose) for reporting bugs, requesting features and seeking clarifications. Choose the appropriate issue template to contribute to the repository. +included [issue templates](https://github.com/Snowflake-Labs/schemachange/issues/new/choose) for reporting bugs, +requesting features and seeking clarifications. Choose the appropriate issue template to contribute to the repository. ## Contributing code @@ -22,8 +23,6 @@ Thank you for your interest in contributing code to schemachange! ### Guide to contributing to schemachange -> **IMPORTANT** : You will need to follow the [provisioning and schemachange setup instructions](../demo/README.MD) to ensure you can run GitHub actions against your Snowflake account before placing a PR with main schemachange repository so that your PR can be merged into schemachange master branch. - 1. If you are a first-time contributor + Go to [Snowflake-Labs/Schemachange](https://github.com/Snowflake-Labs/schemachange) and click the "fork" button to create your own copy of the project. @@ -53,8 +52,8 @@ Thank you for your interest in contributing code to schemachange! + [Pull](https://github.com/git-guides/git-pull) the latest changes from upstream, including tags: ```shell - git checkout main - git pull upstream main --tags + git checkout master + git pull upstream master --tags ``` 2. Create and Activate a Virtual Environment @@ -98,24 +97,32 @@ Thank you for your interest in contributing code to schemachange! + Commit locally as you progress ( [git add](https://github.com/git-guides/git-add) and [git commit](https://github.com/git-guides/git-commit) ). Use a properly formatted commit message. Be sure to - document any changed behavior in the [CHANGELOG.md](../CHANGELOG.md) file to help us collate the changes for a specific release. + document any changed behavior in the [CHANGELOG.md](../CHANGELOG.md) file to help us collate the changes for a + specific release. 4. Test your contribution locally ```bash python -m pytest ``` - PS: Please add test cases to the features you are developing so that over time, we can capture any lapse in functionality changes. + PS: Please add test cases to the features you are developing so that over time, we can capture any lapse in + functionality changes. + +5. Perform integration tests on your branch from your fork + - Follow the [provisioning and schemachange setup instructions](../demo/README.MD) to configure your Snowflake + account for testing. + - Follow [these](https://docs.github.com/en/actions/managing-workflow-runs-and-deployments/managing-workflow-runs/manually-running-a-workflow) + instructions to manually run the `master-pytest` workflow on your fork of the repo, targeting your feature branch. -5. Push your contribution to GitHub +6. Push your contribution to GitHub - [Push](https://github.com/git-guides/git-push) your changes back to your fork on GitHub + [Push](https://github.com/git-guides/git-push) your changes back to your fork on GitHub ```shell git push origin update-build-library-dependencies ``` -6. Raise a Pull Request to merge your contribution into the a Schemachange Release +7. Raise a Pull Request to merge your contribution into the a Schemachange Release + Go to GitHub. The new branch will show up with a green [Pull Request](https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/about-pull-requests#initiating-the-pull-request) button. Make sure the title and message are clear, concise and self-explanatory. Then click the button to submit diff --git a/.github/workflows/master-pytest.yml b/.github/workflows/master-pytest.yml index 5caa505f..027a09ef 100644 --- a/.github/workflows/master-pytest.yml +++ b/.github/workflows/master-pytest.yml @@ -29,13 +29,13 @@ jobs: runs-on: ${{ matrix.os }} if: ${{ github.event.label.name == 'ci-run-tests' || github.event_name == 'push' || github.event_name == 'workflow_dispatch' }} env: - SNOWFLAKE_PASSWORD: ${{ secrets.SCHEMACHANGE_SNOWFLAKE_PASSWORD }} - SNOWFLAKE_USER: ${{ secrets.SCHEMACHANGE_SNOWFLAKE_USER }} SNOWFLAKE_ACCOUNT: ${{ secrets.SCHEMACHANGE_SNOWFLAKE_ACCOUNT }} - SNOWFLAKE_DATABASE: SCHEMACHANGE_DEMO - SNOWFLAKE_WAREHOUSE: SCHEMACHANGE_DEMO_WH + SNOWFLAKE_USER: ${{ secrets.SCHEMACHANGE_SNOWFLAKE_USER }} SNOWFLAKE_ROLE: SCHEMACHANGE_DEMO-DEPLOY + SNOWFLAKE_WAREHOUSE: SCHEMACHANGE_DEMO_WH + SNOWFLAKE_DATABASE: SCHEMACHANGE_DEMO MY_TARGET_SCHEMA: ${{ matrix.scenario-name }}_${{ github.run_number }}_${{ strategy.job-index }} + SNOWFLAKE_PASSWORD: ${{ secrets.SCHEMACHANGE_SNOWFLAKE_PASSWORD }} SCENARIO_NAME: ${{ matrix.scenario-name }} steps: - uses: actions/checkout@v4 @@ -58,6 +58,18 @@ jobs: flake8 . --count --select=E9,F63,F7,F82 --show-source --statistics # exit-zero treats all errors as warnings. The GitHub editor is 127 chars wide flake8 . --count --exit-zero --max-complexity=10 --max-line-length=127 --statistics + - name: Create and populate connections.toml + run: | + touch ./connections.toml + echo [default] >> ./connections.toml + echo account = \"${SNOWFLAKE_ACCOUNT}\" >> ./connections.toml + echo user = \"${SNOWFLAKE_USER}\" >> ./connections.toml + echo role = \"${SNOWFLAKE_ROLE}\" >> ./connections.toml + echo warehouse = \"${SNOWFLAKE_WAREHOUSE}\" >> ./connections.toml + echo database = \"${SNOWFLAKE_DATABASE}\" >> ./connections.toml + echo password = \"${SNOWFLAKE_PASSWORD}\" >> ./connections.toml + echo "cat connections.toml" + cat ./connections.toml - name: Test with pytest id: pytest run: | @@ -66,16 +78,36 @@ jobs: - name: Test Schemachange on ${{ matrix.os }} targeting ${{ env.SNOWFLAKE_DATABASE }}.${{ env.MY_TARGET_SCHEMA }} schema run: | echo "::group::Setting up ${MY_TARGET_SCHEMA}" - schemachange deploy --config-folder ./demo/setup/${SCENARIO_NAME} + schemachange deploy \ + --config-folder ./demo \ + --config-file-name schemachange-config-setup.yml \ + --root-folder ./demo/${SCENARIO_NAME}/1_setup \ + --connection-name default \ + --connections-file-path ./connections.toml \ + --verbose echo "::endgroup::" + echo "::group::Testing Rendering to ${MY_TARGET_SCHEMA}" - schemachange render --config-folder ./demo/${SCENARIO_NAME} ./demo/${SCENARIO_NAME}/A__render.sql - schemachange render --config-folder ./demo/${SCENARIO_NAME} ./demo/${SCENARIO_NAME}/R__render.sql - schemachange render --config-folder ./demo/${SCENARIO_NAME} ./demo/${SCENARIO_NAME}/V1.0.0__render.sql + + schemachange render \ + --config-folder ./demo/${SCENARIO_NAME} \ + ./demo/${SCENARIO_NAME}/2_test/A__render.sql + schemachange render \ + --config-folder ./demo/${SCENARIO_NAME} \ + ./demo/${SCENARIO_NAME}/2_test/R__render.sql + schemachange render \ + --config-folder ./demo/${SCENARIO_NAME} \ + ./demo/${SCENARIO_NAME}/2_test/V1.0.0__render.sql echo "::endgroup::" + echo "::group::Testing Deployment using ${MY_TARGET_SCHEMA}" set +e - schemachange deploy --config-folder ./demo/${SCENARIO_NAME} + schemachange deploy \ + --config-folder ./demo/${SCENARIO_NAME} \ + --connection-name default \ + --connections-file-path ./connections.toml \ + --root-folder ./demo/${SCENARIO_NAME}/2_test \ + --verbose RESULT=$? if [ $RESULT -eq 0 ]; then echo "Deployment Completed!" @@ -84,9 +116,17 @@ jobs: fi echo "::endgroup::" set -e + echo "::group::Tearing down up ${MY_TARGET_SCHEMA}" - schemachange deploy --config-folder ./demo/teardown/${SCENARIO_NAME} + schemachange deploy \ + --config-folder ./demo \ + --config-file-name schemachange-config-teardown.yml \ + --connection-name default \ + --connections-file-path ./connections.toml \ + --root-folder ./demo/${SCENARIO_NAME}/3_teardown \ + --verbose echo "::endgroup::" + if [ $RESULT -ne 0 ]; then exit 1 fi diff --git a/CHANGELOG.md b/CHANGELOG.md index 5d1c9335..e99cafd3 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -7,12 +7,14 @@ All notable changes to this project will be documented in this file. ### Added - Use of `structlog~=24.1.0` for standard log outputs - Verified Schemachange against Python 3.12 +- Support for connections.toml configurations +- Support for supplying the authenticator, private key path, token path, connections file path, and connection name via the YAML and command-line configurations. ### Changed - Refactored the main cli.py into multiple modules - config, session. - Updated contributing guidelines and demo readme content to help contributors setup local snowflake account to run the github actions in their fork before pushing the PR to upstream repository. - Removed tests against Python 3.8 [End of Life on 2024-10-07](https://devguide.python.org/versions/#supported-versions) - +- Command-line vars are now merged into YAML vars instead of overwriting them entirely ## [3.7.0] - 2024-07-22 ### Added diff --git a/README.md b/README.md index 93dcd74c..cd19fe52 100644 --- a/README.md +++ b/README.md @@ -45,17 +45,20 @@ support or warranty. 1. [Change History Table](#change-history-table) 1. [Authentication](#authentication) 1. [Password Authentication](#password-authentication) - 1. [Private Key Authentication](#private-key-authentication) - 1. [Oauth Authentication](#oauth-authentication) + 1. [External OAuth Authentication](#external-oauth-authentication) 1. [External Browser Authentication](#external-browser-authentication) 1. [Okta Authentication](#okta-authentication) + 1. [Private Key Authentication](#private-key-authentication) 1. [Configuration](#configuration) 1. [YAML Config File](#yaml-config-file) 1. [Yaml Jinja support](#yaml-jinja-support) - 1. [Command Line Arguments](#command-line-arguments) + 1. [connections.toml File](#connectionstoml-file) +1. [Commands](#commands) + 1. [deploy](#deploy) + 1. [render](#render) 1. [Running schemachange](#running-schemachange) 1. [Prerequisites](#prerequisites) - 1. [Running The Script](#running-the-script) + 1. [Running the Script](#running-the-script) 1. [Integrating With DevOps](#integrating-with-devops) 1. [Sample DevOps Process Flow](#sample-devops-process-flow) 1. [Using in a CI/CD Pipeline](#using-in-a-cicd-pipeline) @@ -119,7 +122,7 @@ numbers separated by periods. Here are a few valid version strings: Every script within a database folder must have a unique version number. schemachange will check for duplicate version numbers and throw an error if it finds any. This helps to ensure that developers who are working in parallel don't -accidently (re-)use the same version number. +accidentally (re-)use the same version number. ### Repeatable Script Naming @@ -164,7 +167,8 @@ schemachange is designed to be very lightweight and not impose too many limitati number of SQL statements within it and must supply the necessary context, like database and schema names. The context can be supplied by using an explicit `USE ` command or by naming all objects with a three-part name (`..`). schemachange will simply run the contents of each script against -the target Snowflake account, in the correct order. +the target Snowflake account, in the correct order. After each script, Schemachange will execute "reset" the context ( +role, warehouse, database, schema) to the values used to configure the connector. ### Using Variables in Scripts @@ -222,7 +226,7 @@ These files can be stored in the root-folder but schemachange also provides a se folder `--modules-folder`. This allows common logic to be stored outside of the main changes scripts. The [demo/citibike_demo_jinja](demo/citibike_demo_jinja) has a simple example that demonstrates this. -The Jinja autoescaping feature is disabled in schemachange, this feature in Jinja is currently designed for where the +The Jinja auto-escaping feature is disabled in schemachange, this feature in Jinja is currently designed for where the output language is HTML/XML. So if you are using schemachange with untrusted inputs you will need to handle this within your change scripts. @@ -237,16 +241,16 @@ Within change scripts: schemachange records all applied changes scripts to the change history table. By default, schemachange will attempt to log all activities to the `METADATA.SCHEMACHANGE.CHANGE_HISTORY` table. The name and location of the change history -table can be overriden by using the `-c` (or `--change-history-table`) parameter. The value passed to the parameter can -have a one, two, or three part name (e.g. "TABLE_NAME", or "SCHEMA_NAME.TABLE_NAME", or " -DATABASE_NAME.SCHEMA_NAME.TABLE_NAME"). This can be used to support multiple environments (dev, test, prod) or multiple -subject areas within the same Snowflake account. By default, schemachange will not try to create the change history -table, and will fail if the table does not exist. +table can be overriden via a command line argument (`-c` or `--change-history-table`) or the `schemachange-config.yml` +file ( `change-history-table`). The value passed to the parameter can have a one, two, or three part name (e.g. " +TABLE_NAME", or "SCHEMA_NAME.TABLE_NAME", or " DATABASE_NAME.SCHEMA_NAME.TABLE_NAME"). This can be used to support +multiple environments (dev, test, prod) or multiple subject areas within the same Snowflake account. -Additionally, if the `--create-change-history-table` parameter is given, then schemachange will attempt to create the -schema and table associated with the change history table. schemachange will not attempt to create the database for the -change history table, so that must be created ahead of time, even when using the `--create-change-history-table` -parameter. +By default, schemachange will not try to create the change history table, and it will fail if the table does not exist. +This behavior can be altered by passing in the `--create-change-history-table` argument or adding +`create-change-history-table: true` to the `schemachange-config.yml` file. Even with the `--create-change-history-table` +parameter, schemachange will not attempt to create the database for the change history table. That must be created +before running schemachange. The structure of the `CHANGE_HISTORY` table is as follows: @@ -272,119 +276,112 @@ script), in case you choose to create it manually and not use the `--create-chan ```sql CREATE TABLE IF NOT EXISTS SCHEMACHANGE.CHANGE_HISTORY ( - VERSION VARCHAR - ,DESCRIPTION VARCHAR - ,SCRIPT VARCHAR - ,SCRIPT_TYPE VARCHAR - ,CHECKSUM VARCHAR - ,EXECUTION_TIME NUMBER - ,STATUS VARCHAR - ,INSTALLED_BY VARCHAR - ,INSTALLED_ON TIMESTAMP_LTZ + VERSION VARCHAR, + DESCRIPTION VARCHAR, + SCRIPT VARCHAR, + SCRIPT_TYPE VARCHAR, + CHECKSUM VARCHAR, + EXECUTION_TIME NUMBER, + STATUS VARCHAR, + INSTALLED_BY VARCHAR, + INSTALLED_ON TIMESTAMP_LTZ ) ``` ## Authentication -Schemachange supports snowflake's default authenticator, External Oauth, Browswer based SSO and Programmatic SSO options -supported by -the [Snowflake Python Connector](https://docs.snowflake.com/en/user-guide/python-connector-example.html#connecting-to-snowflake). -Set the environment variable `SNOWFLAKE_AUTHENTICATOR` to one of the following -Authentication Option | Expected Value ---- | --- -Default [Password](https://docs.snowflake.com/en/user-guide/python-connector-example.html#connecting-using-the-default-authenticator) -Authenticator | `snowflake` -[Key Pair](https://docs.snowflake.com/en/user-guide/python-connector-example.html#using-key-pair-authentication) -Authenticator| `snowflake` -[External Oauth](https://docs.snowflake.com/en/user-guide/oauth-external.html) | `oauth` -[Browser based SSO](https://docs.snowflake.com/en/user-guide/admin-security-fed-auth-use.html#setting-up-browser-based-sso) | `externalbrowser` -[Programmatic SSO](https://docs.snowflake.com/en/user-guide/admin-security-fed-auth-use.html#native-sso-okta-only) (Okta -Only) | Okta URL endpoint for your Okta account typically in the form `https://.okta.com` -OR `https://.oktapreview.com` - -If an authenticator is unsupported, then schemachange will default to `snowflake`. If the authenticator is `snowflake`, -and both password and key pair values are provided then schemachange will use the password over the key pair values. +Schemachange supports the many of the authentication methods supported by +the [Snowflake Python Connector](https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect). +The authenticator can be set by setting an `authenticator` in the [connections.toml](#connectionstoml-file) file -### Password Authentication +The following authenticators are supported: -The Snowflake user password for `SNOWFLAKE_USER` is required to be set in the environment variable `SNOWFLAKE_PASSWORD` -prior to calling the script. schemachange will fail if the `SNOWFLAKE_PASSWORD` environment variable is not set. The -environment variable `SNOWFLAKE_AUTHENTICATOR` will be set to `snowflake` if it not explicitly set. +- `snowflake`: [Password](#password-authentication) +- `oauth`: [External OAuth](#external-oauth-authentication) +- `externalbrowser`: [Browser-based SSO](#external-browser-authentication) +- `https://.okta.com`: [Okta SSO](#okta-authentication) +- `snowflake_jwt`: [Private Key](#private-key-authentication) -_**DEPRECATION NOTICE**: The `SNOWSQL_PWD` environment variable is deprecated but currently still supported. Support for -it will be removed in a later version of schemachange. Please use `SNOWFLAKE_PASSWORD` instead._ - -### Private Key Authentication +If an authenticator is unsupported, an exception will be raised. -The Snowflake user encrypted private key for `SNOWFLAKE_USER` is required to be in a file with the file path set in the -environment variable `SNOWFLAKE_PRIVATE_KEY_PATH`. Additionally, the password for the encrypted private key file is -required to be set in the environment variable `SNOWFLAKE_PRIVATE_KEY_PASSPHRASE`. If the variable is not set, -schemachange will assume the private key is not encrypted. These two environment variables must be set prior to calling -the script. Schemachange will fail if the `SNOWFLAKE_PRIVATE_KEY_PATH` is not set. +### Password Authentication -### Oauth Authentication +Password authentication is the default authenticator. Supplying `snowflake` as your authenticator will set it +explicitly. A `password` must be supplied in the [connections.toml](#connectionstoml-file) file -An Oauth Configuration can be made in the [YAML Config File](#yaml-config-file) or passing an equivalent json dictionary -to the switch `--oauth-config`. Invoke this method by setting the environment variable `SNOWFLAKE_AUTHENTICATOR` to the -value `oauth` prior to calling schemachange. Since different Oauth providers may require different information the Oauth -configuration uses four named variables that are fed into a POST request to obtain a token. Azure is shown in the -example YAML but other providers should use a similar pattern and request payload contents. +### External OAuth Authentication -* token-provider-url - The URL of the authenticator resource that will receive the POST request. -* token-response-name - The Expected name of the JSON element containing the Token in the return response from the authenticator resource. -* token-request-payload - The Set of variables passed as a dictionary to the `data` element of the request. -* token-request-headers - The Set of variables passed as a dictionary to the `headers` element of the request. +External OAuth authentication can be selected by supplying `oauth` as your authenticator. A `token_file_path` must be +supplied in the [connections.toml](#connectionstoml-file) file -It is recomended to use the YAML file and pass oauth secrets into the configuration using the templating engine instead -of the command line option. +**Schemachange no longer supports the `--oauth-config` option.** Prior to the 4.0 release, this library supported +supplying an `--oauth-config` that would be used to fetch an OAuth token via the `requests` library. This required +Schemachange to keep track of connection arguments that could otherwise be passed directly to the Snowflake Python +connector. Maintaining this logic in Schemachange added unnecessary complication to the repo and prevented access to +recent connector parameterization features offered by the Snowflake connector. ### External Browser Authentication -External browser authentication can be used for local development by setting the environment -variable `SNOWFLAKE_AUTHENTICATOR` to the value `externalbrowser` prior to calling schemachange. -The client will be prompted to authenticate in a browser that pops up. Refer to +External browser authentication can be selected by supplying `externalbrowser` as your authenticator. The client will be +prompted to authenticate in a browser that pops up. Refer to the [documentation](https://docs.snowflake.com/en/user-guide/admin-security-fed-auth-use.html#setting-up-browser-based-sso) to cache the token to minimize the number of times the browser pops up to authenticate the user. ### Okta Authentication -For clients that do not have a browser, can use the popular SaaS Idp option to connect via Okta. This will require the -Okta URL that you utilize for SSO. -Okta authentication can be used setting the environment variable `SNOWFLAKE_AUTHENTICATOR` to the value of your okta -endpoint as a fully formed URL ( E.g. `https://.okta.com`) prior to calling schemachange. +External browser authentication can be selected by supplying your Okta endpoint as your authenticator (e.g. +`https://.okta.com`). For clients that do not have a browser, can use the popular SaaS Idp option to connect +via Okta. A `password` must be supplied in the [connections.toml](#connectionstoml-file) file _** NOTE**: Please disable Okta MFA for the user who uses Native SSO authentication with client drivers. Please consult your Okta administrator for more information._ -## Configuration +### Private Key Authentication -Parameters to schemachange can be supplied in two different ways: +External browser authentication can be selected by supplying `snowflake_jwt` as your authenticator. The filepath to a +Snowflake user-encrypted private key must be supplied as `private-key` in the [connections.toml](#connectionstoml-file) +file. If the private key file is password protected, supply the password as `private_key_file_pwd` in +the [connections.toml](#connectionstoml-file) file. If the variable is not set, the Snowflake Python connector will +assume the private key is not encrypted. + +## Configuration -1. Through a YAML config file -2. Via command line arguments +As of version 4.0, Snowflake connection parameters must be supplied via +a [connections.toml file](#connectionstoml-file). Command-line and yaml arguments will still be supported with a +deprecation warning until support is completely dropped. -If supplied by both the command line and the YAML file, The command line overides the YAML values. +Schemachange-specific parameters can be supplied in two different ways (in order of priority): -Additionally, regardless of the approach taken, the following paramaters are required to run schemachange: +1. Command Line Arguments +2. YAML config file -* snowflake-account -* snowflake-user -* snowflake-role -* snowflake-warehouse +**Note:** As of 4.0, `vars` provided via command-line argument will be merged with vars provided via YAML config. +Previously, one overwrote the other completely -Plese +Please see [Usage Notes for the account Parameter (for the connect Method)](https://docs.snowflake.com/en/user-guide/python-connector-api.html#label-account-format-info) for more details on how to structure the account name. +### connections.toml File + +A `[connections.toml](https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect#connecting-using-the-connections-toml-file) +filepath can be supplied in the following ways (in order of priority): + +1. The `--connections-file-path` [command-line argument](#commands) +2. The `connections-file-path` [YAML value](#yaml-config-file) + +A connection name can be supplied in the following ways (in order of priority): + +1. The `SNOWFLAKE_DEFAULT_CONNECTION_NAME` [environment variable](#environment-variables) +2. The `--connection-name` [command-line argument](#commands) +3. The `connection-name` [YAML value](#yaml-config-file) + ### YAML Config File -schemachange expects the YAML config file to be named `schemachange-config.yml` and looks for it by default in the -current folder. The folder can be overridden by using the `--config-folder` command line argument ( -see [Command Line Arguments](#command-line-arguments) below for more details). +By default, Schemachange expects the YAML config file to be named `schemachange-config.yml`, located in the current +working directory. The YAML file name can be overridden with the +`--config-file-name` [command-line argument](#commands). The folder can be overridden by using the +`--config-folder` [command-line argument](#commands) Here is the list of available configurations in the `schemachange-config.yml` file: @@ -397,30 +394,16 @@ root-folder: '/path/to/folder' # The modules folder for jinja macros and templates to be used across multiple scripts. modules-folder: null -# The name of the snowflake account (e.g. xy12345.east-us-2.azure). -# You can also use the regionless format (e.g. myorgname-accountname) -# for privatelink accounts, suffix the account value with privatelink (e.g. .privatelink) -snowflake-account: 'xy12345.east-us-2.azure' +# Override the default connections.toml file path at snowflake.connector.constants.CONNECTIONS_FILE (OS specific) +connections-file-path: null -# The name of the snowflake user -snowflake-user: 'user' - -# The name of the default role to use. Can be overridden in the change scripts. -snowflake-role: 'role' - -# The name of the default warehouse to use. Can be overridden in the change scripts. -snowflake-warehouse: 'warehouse' - -# The name of the default database to use. Can be overridden in the change scripts. -snowflake-database: null - -# The name of the default schema to use. Can be overridden in the change scripts. -snowflake-schema: null +# Override the default connections.toml connection name. Other connection-related values will override these connection values. +connection-name: null # Used to override the default name of the change history table (the default is METADATA.SCHEMACHANGE.CHANGE_HISTORY) change-history-table: null -# Define values for the variables to replaced in change scripts +# Define values for the variables to replaced in change scripts. vars supplied via the command line will be merged into YAML-supplied vars vars: var1: 'value1' var2: 'value2' @@ -441,24 +424,6 @@ dry-run: false # A string to include in the QUERY_TAG that is attached to every SQL statement executed query-tag: 'QUERY_TAG' - -# Information for Oauth token requests -oauthconfig: - # url Where token request are posted to - token-provider-url: 'https://login.microsoftonline.com/{{ env_var('AZURE_ORG_GUID', 'default') }}/oauth2/v2.0/token' - # name of Json entity returned by request - token-response-name: 'access_token' - # Headers needed for successful post or other security markings ( multiple labeled items permitted - token-request-headers: - Content-Type: "application/x-www-form-urlencoded" - User-Agent: "python/schemachange" - # Request Payload for Token (it is recommended pass - token-request-payload: - client_id: '{{ env_var('CLIENT_ID', 'default') }}' - username: '{{ env_var('USER_ID', 'default') }}' - password: '{{ env_var('USER_PASSWORD', 'default') }}' - grant_type: 'password' - scope: '{{ env_var('SESSION_SCOPE', 'default') }}' ``` #### Yaml Jinja support @@ -483,41 +448,37 @@ Return the value of the environmental variable if it exists, otherwise raise an {{ env_var('') }} ``` -### Command Line Arguments +## Commands Schemachange supports a few subcommands. If the subcommand is not provided it defaults to deploy. This behaviour keeps compatibility with versions prior to 3.2. -#### deploy +### deploy This is the main command that runs the deployment process. ```bash -usage: schemachange deploy [-h] [--config-folder CONFIG_FOLDER] [-f ROOT_FOLDER] [-m MODULES_FOLDER] [-a SNOWFLAKE_ACCOUNT] [-u SNOWFLAKE_USER] [-r SNOWFLAKE_ROLE] [-w SNOWFLAKE_WAREHOUSE] [-d SNOWFLAKE_DATABASE] [-s SNOWFLAKE_SCHEMA] [-c CHANGE_HISTORY_TABLE] [--vars VARS] [--create-change-history-table] [-ac] [-v] [--dry-run] [--query-tag QUERY_TAG] +usage: schemachange deploy [-h] [--config-folder CONFIG_FOLDER] [--config-file-name CONFIG_FILE_NAME] [-f ROOT_FOLDER] [-m MODULES_FOLDER] [--connections-file-path CONNECTIONS_FILE_PATH] [--connection-name CONNECTION_NAME] [-c CHANGE_HISTORY_TABLE] [--vars VARS] [--create-change-history-table] [-ac] [-v] [--dry-run] [--query-tag QUERY_TAG] ``` -| Parameter | Description | -|----------------------------------------------------------------------|-------------------------------------------------------------------------------------------------------------------------------------------------------------------------| -| -h, --help | Show the help message and exit | -| --config-folder CONFIG_FOLDER | The folder to look in for the schemachange-config.yml file (the default is the current working directory) | -| -f ROOT_FOLDER, --root-folder ROOT_FOLDER | The root folder for the database change scripts. The default is the current directory. | -| -m MODULES_FOLDER, --modules-folder MODULES_FOLDER | The modules folder for jinja macros and templates to be used across mutliple scripts | -| -a SNOWFLAKE_ACCOUNT, --snowflake-account SNOWFLAKE_ACCOUNT | The name of the snowflake account (e.g. xy12345.east-us-2.azure). | -| -u SNOWFLAKE_USER, --snowflake-user SNOWFLAKE_USER | The name of the snowflake user | -| -r SNOWFLAKE_ROLE, --snowflake-role SNOWFLAKE_ROLE | The name of the role to use | -| -w SNOWFLAKE_WAREHOUSE, --snowflake-warehouse SNOWFLAKE_WAREHOUSE | The name of the default warehouse to use. Can be overridden in the change scripts. | -| -d SNOWFLAKE_DATABASE, --snowflake-database SNOWFLAKE_DATABASE | The name of the default database to use. Can be overridden in the change scripts. | -| -s SNOWFLAKE_SCHEMA, --snowflake-schema SNOWFLAKE_SCHEMA | The name of the default schema to use. Can be overridden in the change scripts. | -| -c CHANGE_HISTORY_TABLE, --change-history-table CHANGE_HISTORY_TABLE | Used to override the default name of the change history table (which is METADATA.SCHEMACHANGE.CHANGE_HISTORY) | -| --vars VARS | Define values for the variables to replaced in change scripts, given in JSON format (e.g. '{"variable1": "value1", "variable2": "value2"}') | -| --create-change-history-table | Create the change history table if it does not exist. The default is 'False'. | -| -ac, --autocommit | Enable autocommit feature for DML commands. The default is 'False'. | -| -v, --verbose | Display verbose debugging details during execution. The default is 'False'. | -| --dry-run | Run schemachange in dry run mode. The default is 'False'. | -| --query-tag | A string to include in the QUERY_TAG that is attached to every SQL statement executed. | -| --oauth-config | Define values for the variables to Make Oauth Token requests (e.g. {"token-provider-url": "https//...", "token-request-payload": {"client_id": "GUID_xyz",...},... })' | - -#### render +| Parameter | Description | +|----------------------------------------------------------------------|---------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------------| +| -h, --help | Show the help message and exit | +| --config-folder CONFIG_FOLDER | The folder to look in for the schemachange config file (the default is the current working directory) | +| --config-file-name CONFIG_FILE_NAME | The file name of the schemachange config file. (the default is schemachange-config.yml) | +| -f ROOT_FOLDER, --root-folder ROOT_FOLDER | The root folder for the database change scripts. The default is the current directory. | +| -m MODULES_FOLDER, --modules-folder MODULES_FOLDER | The modules folder for jinja macros and templates to be used across mutliple scripts | +| --connections-file-path CONNECTIONS_FILE_PATH | Override the default [connections.toml](https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect#connecting-using-the-connections-toml-file) file path at snowflake.connector.constants.CONNECTIONS_FILE (OS specific) | +| --connection-name CONNECTION_NAME | Override the default [connections.toml](https://docs.snowflake.com/en/developer-guide/python-connector/python-connector-connect#connecting-using-the-connections-toml-file) connection name. Other connection-related values will override these connection values. | +| -c CHANGE_HISTORY_TABLE, --change-history-table CHANGE_HISTORY_TABLE | Used to override the default name of the change history table (which is METADATA.SCHEMACHANGE.CHANGE_HISTORY) | +| --vars VARS | Define values for the variables to replaced in change scripts, given in JSON format. Vars supplied via the command line will be merged with YAML-supplied vars (e.g. '{"variable1": "value1", "variable2": "value2"}') | +| --create-change-history-table | Create the change history table if it does not exist. The default is 'False'. | +| -ac, --autocommit | Enable autocommit feature for DML commands. The default is 'False'. | +| -v, --verbose | Display verbose debugging details during execution. The default is 'False'. | +| --dry-run | Run schemachange in dry run mode. The default is 'False'. | +| --query-tag | A string to include in the QUERY_TAG that is attached to every SQL statement executed. | + +### render This subcommand is used to render a single script to the console. It is intended to support the development and troubleshooting of script that use features from the jinja template engine. @@ -562,13 +523,13 @@ schemachange is a single python script located at [schemachange/cli.py](schemach follows: ``` -python schemachange/cli.py [-h] [--config-folder CONFIG_FOLDER] [-f ROOT_FOLDER] [-a SNOWFLAKE_ACCOUNT] [-u SNOWFLAKE_USER] [-r SNOWFLAKE_ROLE] [-w SNOWFLAKE_WAREHOUSE] [-d SNOWFLAKE_DATABASE] [-s SNOWFLAKE_SCHEMA] [-c CHANGE_HISTORY_TABLE] [--vars VARS] [--create-change-history-table] [-ac] [-v] [--dry-run] [--query-tag QUERY_TAG] [--oauth-config OUATH_CONFIG] +python schemachange/cli.py [-h] [--config-folder CONFIG_FOLDER] [-f ROOT_FOLDER] [-c CHANGE_HISTORY_TABLE] [--vars VARS] [--create-change-history-table] [-ac] [-v] [--dry-run] [--query-tag QUERY_TAG] [--connections-file-path] [--connection-name] ``` Or if installed via `pip`, it can be executed as follows: ``` -schemachange [-h] [--config-folder CONFIG_FOLDER] [-f ROOT_FOLDER] [-a SNOWFLAKE_ACCOUNT] [-u SNOWFLAKE_USER] [-r SNOWFLAKE_ROLE] [-w SNOWFLAKE_WAREHOUSE] [-d SNOWFLAKE_DATABASE] [-s SNOWFLAKE_SCHEMA] [-c CHANGE_HISTORY_TABLE] [--vars VARS] [--create-change-history-table] [-ac] [-v] [--dry-run] [--query-tag QUERY_TAG] [--oauth-config OUATH_CONFIG] +schemachange [-h] [--config-folder CONFIG_FOLDER] [-f ROOT_FOLDER] [-c CHANGE_HISTORY_TABLE] [--vars VARS] [--create-change-history-table] [-ac] [-v] [--dry-run] [--query-tag QUERY_TAG] [--connections-file-path] [--connection-name] ``` The [demo](demo) folder in this project repository contains three schemachange demo projects for you to try out. These @@ -607,10 +568,10 @@ If your build agent has a recent version of python 3 installed, the script can b ```bash pip install schemachange --upgrade -schemachange [-h] [-f ROOT_FOLDER] -a SNOWFLAKE_ACCOUNT -u SNOWFLAKE_USER -r SNOWFLAKE_ROLE -w SNOWFLAKE_WAREHOUSE [-d SNOWFLAKE_DATABASE] [-s SNOWFLAKE_SCHEMA] [-c CHANGE_HISTORY_TABLE] [--vars VARS] [--create-change-history-table] [-ac] [-v] [--dry-run] [--query-tag QUERY_TAG] [--oauth-config OUATH_CONFIG] +schemachange [-h] [-f ROOT_FOLDER] [-c CHANGE_HISTORY_TABLE] [--vars VARS] [--create-change-history-table] [-ac] [-v] [--dry-run] [--query-tag QUERY_TAG] [--connections-file-path] [--connection-name] ``` -Or if you prefer docker, set the environment variables and run like so: +Or if you prefer docker, run like so: ```bash docker run -it --rm \ @@ -618,15 +579,11 @@ docker run -it --rm \ -v "$PWD":/usr/src/schemachange \ -w /usr/src/schemachange \ -e ROOT_FOLDER \ - -e SNOWFLAKE_ACCOUNT \ - -e SNOWFLAKE_USER \ - -e SNOWFLAKE_ROLE \ - -e SNOWFLAKE_WAREHOUSE \ - -e SNOWFLAKE_PASSWORD \ - python:3 /bin/bash -c "pip install schemachange --upgrade && schemachange -f $ROOT_FOLDER -a $SNOWFLAKE_ACCOUNT -u $SNOWFLAKE_USER -r $SNOWFLAKE_ROLE -w $SNOWFLAKE_WAREHOUSE" + -e $CONNECTION_NAME \ + python:3 /bin/bash -c "pip install schemachange --upgrade && schemachange -f $ROOT_FOLDER --connections-file-path connections.toml --connection-name $CONNECTION_NAME" ``` -Either way, don't forget to set the `SNOWFLAKE_PASSWORD` environment variable if using password authentication! +Either way, don't forget to configure a [connections.toml file](#connectionstoml-file) for connection parameters ## Maintainers diff --git a/demo/README.MD b/demo/README.MD index a36dadf8..f67b2f3a 100644 --- a/demo/README.MD +++ b/demo/README.MD @@ -5,7 +5,7 @@ to see how schemachange works with the main feature set. For the contributor, wh codebase, this will serve as a basis to test the PR against your own snowflake account to ensure your code change does not break any existing functionality. -## Prerequisite +## Prerequisites - You will need your own snowflake Account to test the Demo - Both as a contributor and consumer. - You will need to review and run statements in the provision folder or set up your own database and schema. @@ -43,14 +43,19 @@ the demo DDL scripts. ### Contributors -As a contributor, you will have to set up schemachange demo database and schemachange schema (See Initialize and Setup -scripts below). Along with that you will also set up the following Secrets in your forked repository so that the GitHub -actions can set up, test and teardown the temporary schema it creates to test the changes to your code in the master and -dev branches respectively. +1. Execute the [initialize.sql](provision/initialize.sql) + and [setup_schemachange_schema.sql](provision/setup_schemachange_schema.sql) scripts to create up a + `SCHEMACHANGE_DEMO` + database and `SCHEMACHANGE` schema (See [Prerequisites](#prerequisites)). -- SCHEMACHANGE_SNOWFLAKE_PASSWORD -- SCHEMACHANGE_SNOWFLAKE_USER -- SCHEMACHANGE_SNOWFLAKE_ACCOUNT +2. Create the + following [GitHub Action Secrets](https://docs.github.com/en/actions/security-for-github-actions/security-guides/using-secrets-in-github-actions) + in your forked repository. GitHub will use these actions to set up and teardown the temporary schema(s) it creates to + test your code. + + - `SCHEMACHANGE_SNOWFLAKE_PASSWORD` + - `SCHEMACHANGE_SNOWFLAKE_USER` + - `SCHEMACHANGE_SNOWFLAKE_ACCOUNT` # Setup diff --git a/demo/setup/basics_demo/A__setup_basics_demo.sql b/demo/basics_demo/1_setup/A__setup.sql similarity index 97% rename from demo/setup/basics_demo/A__setup_basics_demo.sql rename to demo/basics_demo/1_setup/A__setup.sql index 2d7edf78..911048b0 100644 --- a/demo/setup/basics_demo/A__setup_basics_demo.sql +++ b/demo/basics_demo/1_setup/A__setup.sql @@ -31,7 +31,7 @@ GRANT DATABASE ROLE IDENTIFIER($SC_W) TO DATABASE ROLE IDENTIFIER($SC_C); CREATE SCHEMA IF NOT EXISTS IDENTIFIER($TARGET_SCHEMA_NAME) WITH MANAGED ACCESS; -- USE SCHEMA INFORMATION_SCHEMA; -- DROP SCHEMA IF EXISTS PUBLIC; -GRANT OWNERSHIP ON SCHEMA IDENTIFIER($TARGET_SCHEMA_NAME) TO ROLE IDENTIFIER($DEPLOY_ROLE); +GRANT OWNERSHIP ON SCHEMA IDENTIFIER($TARGET_SCHEMA_NAME) TO ROLE IDENTIFIER($DEPLOY_ROLE) REVOKE CURRENT GRANTS; USE SCHEMA IDENTIFIER($SCHEMACHANGE_NAMESPACE); -- SCHEMA @@ -44,7 +44,6 @@ GRANT MONITOR ON SCHEMA IDENTIFIER($SCHEMACHANGE_NAMESPACE) TO DATABASE ROLE IDE -- SC_W -- None -- SC_C -GRANT MODIFY, APPLYBUDGET, ADD SEARCH OPTIMIZATION ON SCHEMA IDENTIFIER($SCHEMACHANGE_NAMESPACE) TO DATABASE ROLE IDENTIFIER($SC_C); -- TABLES -- SC_M diff --git a/demo/basics_demo/A__basic001.sql b/demo/basics_demo/2_test/A__basic001.sql similarity index 100% rename from demo/basics_demo/A__basic001.sql rename to demo/basics_demo/2_test/A__basic001.sql diff --git a/demo/basics_demo/A__render.sql b/demo/basics_demo/2_test/A__render.sql similarity index 100% rename from demo/basics_demo/A__render.sql rename to demo/basics_demo/2_test/A__render.sql diff --git a/demo/basics_demo/R__basic001.sql b/demo/basics_demo/2_test/R__basic001.sql similarity index 100% rename from demo/basics_demo/R__basic001.sql rename to demo/basics_demo/2_test/R__basic001.sql diff --git a/demo/basics_demo/R__render.sql b/demo/basics_demo/2_test/R__render.sql similarity index 100% rename from demo/basics_demo/R__render.sql rename to demo/basics_demo/2_test/R__render.sql diff --git a/demo/basics_demo/V1.0.0__render.sql b/demo/basics_demo/2_test/V1.0.0__render.sql similarity index 100% rename from demo/basics_demo/V1.0.0__render.sql rename to demo/basics_demo/2_test/V1.0.0__render.sql diff --git a/demo/basics_demo/V1.0.1__EOF_FIle.sql b/demo/basics_demo/2_test/V1.0.1__EOF_FIle.sql similarity index 100% rename from demo/basics_demo/V1.0.1__EOF_FIle.sql rename to demo/basics_demo/2_test/V1.0.1__EOF_FIle.sql diff --git a/demo/basics_demo/V1.0.2__StoredProc.sql b/demo/basics_demo/2_test/V1.0.2__StoredProc.sql similarity index 100% rename from demo/basics_demo/V1.0.2__StoredProc.sql rename to demo/basics_demo/2_test/V1.0.2__StoredProc.sql diff --git a/demo/teardown/basics_demo/A__teardown_basics_demo.sql b/demo/basics_demo/3_teardown/A__teardown.sql similarity index 100% rename from demo/teardown/basics_demo/A__teardown_basics_demo.sql rename to demo/basics_demo/3_teardown/A__teardown.sql diff --git a/demo/basics_demo/schemachange-config.yml b/demo/basics_demo/schemachange-config.yml index 18680db4..7252e334 100644 --- a/demo/basics_demo/schemachange-config.yml +++ b/demo/basics_demo/schemachange-config.yml @@ -2,12 +2,6 @@ config-version: 1 root-folder: "./demo/{{ env_var('SCENARIO_NAME')}}" -snowflake-user: "{{ env_var('SNOWFLAKE_USER')}}" -snowflake-account: "{{ env_var('SNOWFLAKE_ACCOUNT')}}" -snowflake-role: "{{ env_var('SNOWFLAKE_ROLE')}}" -snowflake-warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE')}}" -snowflake-database: "{{ env_var('SNOWFLAKE_DATABASE')}}" -snowflake-schema: "{{ env_var('MY_TARGET_SCHEMA')}}" change-history-table: "{{ env_var('SNOWFLAKE_DATABASE')}}.{{ env_var('MY_TARGET_SCHEMA')}}.CHANGE_HISTORY" create-change-history-table: true diff --git a/demo/setup/citibike_demo_jinja/A__setup_citibike_demo_jinja.sql b/demo/citibike_demo/1_setup/A__setup.sql similarity index 97% rename from demo/setup/citibike_demo_jinja/A__setup_citibike_demo_jinja.sql rename to demo/citibike_demo/1_setup/A__setup.sql index 2d7edf78..911048b0 100644 --- a/demo/setup/citibike_demo_jinja/A__setup_citibike_demo_jinja.sql +++ b/demo/citibike_demo/1_setup/A__setup.sql @@ -31,7 +31,7 @@ GRANT DATABASE ROLE IDENTIFIER($SC_W) TO DATABASE ROLE IDENTIFIER($SC_C); CREATE SCHEMA IF NOT EXISTS IDENTIFIER($TARGET_SCHEMA_NAME) WITH MANAGED ACCESS; -- USE SCHEMA INFORMATION_SCHEMA; -- DROP SCHEMA IF EXISTS PUBLIC; -GRANT OWNERSHIP ON SCHEMA IDENTIFIER($TARGET_SCHEMA_NAME) TO ROLE IDENTIFIER($DEPLOY_ROLE); +GRANT OWNERSHIP ON SCHEMA IDENTIFIER($TARGET_SCHEMA_NAME) TO ROLE IDENTIFIER($DEPLOY_ROLE) REVOKE CURRENT GRANTS; USE SCHEMA IDENTIFIER($SCHEMACHANGE_NAMESPACE); -- SCHEMA @@ -44,7 +44,6 @@ GRANT MONITOR ON SCHEMA IDENTIFIER($SCHEMACHANGE_NAMESPACE) TO DATABASE ROLE IDE -- SC_W -- None -- SC_C -GRANT MODIFY, APPLYBUDGET, ADD SEARCH OPTIMIZATION ON SCHEMA IDENTIFIER($SCHEMACHANGE_NAMESPACE) TO DATABASE ROLE IDENTIFIER($SC_C); -- TABLES -- SC_M diff --git a/demo/citibike_demo/A__checks.sql b/demo/citibike_demo/2_test/A__checks.sql similarity index 100% rename from demo/citibike_demo/A__checks.sql rename to demo/citibike_demo/2_test/A__checks.sql diff --git a/demo/citibike_demo/A__render.sql b/demo/citibike_demo/2_test/A__render.sql similarity index 100% rename from demo/citibike_demo/A__render.sql rename to demo/citibike_demo/2_test/A__render.sql diff --git a/demo/citibike_demo/R__checks.sql b/demo/citibike_demo/2_test/R__checks.sql similarity index 100% rename from demo/citibike_demo/R__checks.sql rename to demo/citibike_demo/2_test/R__checks.sql diff --git a/demo/citibike_demo/R__render.sql b/demo/citibike_demo/2_test/R__render.sql similarity index 100% rename from demo/citibike_demo/R__render.sql rename to demo/citibike_demo/2_test/R__render.sql diff --git a/demo/citibike_demo/V1.0.0__render.sql b/demo/citibike_demo/2_test/V1.0.0__render.sql similarity index 100% rename from demo/citibike_demo/V1.0.0__render.sql rename to demo/citibike_demo/2_test/V1.0.0__render.sql diff --git a/demo/citibike_demo/V1.1.0__initial_database_objects.sql b/demo/citibike_demo/2_test/V1.1.0__initial_database_objects.sql similarity index 100% rename from demo/citibike_demo/V1.1.0__initial_database_objects.sql rename to demo/citibike_demo/2_test/V1.1.0__initial_database_objects.sql diff --git a/demo/citibike_demo/V1.2.0__load_tables_from_s3.sql b/demo/citibike_demo/2_test/V1.2.0__load_tables_from_s3.sql similarity index 100% rename from demo/citibike_demo/V1.2.0__load_tables_from_s3.sql rename to demo/citibike_demo/2_test/V1.2.0__load_tables_from_s3.sql diff --git a/demo/teardown/citibike_demo/A__teardown_citibike_demo.sql b/demo/citibike_demo/3_teardown/A__teardown.sql similarity index 100% rename from demo/teardown/citibike_demo/A__teardown_citibike_demo.sql rename to demo/citibike_demo/3_teardown/A__teardown.sql diff --git a/demo/citibike_demo/schemachange-config.yml b/demo/citibike_demo/schemachange-config.yml index 18680db4..7252e334 100644 --- a/demo/citibike_demo/schemachange-config.yml +++ b/demo/citibike_demo/schemachange-config.yml @@ -2,12 +2,6 @@ config-version: 1 root-folder: "./demo/{{ env_var('SCENARIO_NAME')}}" -snowflake-user: "{{ env_var('SNOWFLAKE_USER')}}" -snowflake-account: "{{ env_var('SNOWFLAKE_ACCOUNT')}}" -snowflake-role: "{{ env_var('SNOWFLAKE_ROLE')}}" -snowflake-warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE')}}" -snowflake-database: "{{ env_var('SNOWFLAKE_DATABASE')}}" -snowflake-schema: "{{ env_var('MY_TARGET_SCHEMA')}}" change-history-table: "{{ env_var('SNOWFLAKE_DATABASE')}}.{{ env_var('MY_TARGET_SCHEMA')}}.CHANGE_HISTORY" create-change-history-table: true diff --git a/demo/setup/citibike_demo/A__setup_citibike_demo.sql b/demo/citibike_demo_jinja/1_setup/A__setup.sql similarity index 97% rename from demo/setup/citibike_demo/A__setup_citibike_demo.sql rename to demo/citibike_demo_jinja/1_setup/A__setup.sql index 2d7edf78..911048b0 100644 --- a/demo/setup/citibike_demo/A__setup_citibike_demo.sql +++ b/demo/citibike_demo_jinja/1_setup/A__setup.sql @@ -31,7 +31,7 @@ GRANT DATABASE ROLE IDENTIFIER($SC_W) TO DATABASE ROLE IDENTIFIER($SC_C); CREATE SCHEMA IF NOT EXISTS IDENTIFIER($TARGET_SCHEMA_NAME) WITH MANAGED ACCESS; -- USE SCHEMA INFORMATION_SCHEMA; -- DROP SCHEMA IF EXISTS PUBLIC; -GRANT OWNERSHIP ON SCHEMA IDENTIFIER($TARGET_SCHEMA_NAME) TO ROLE IDENTIFIER($DEPLOY_ROLE); +GRANT OWNERSHIP ON SCHEMA IDENTIFIER($TARGET_SCHEMA_NAME) TO ROLE IDENTIFIER($DEPLOY_ROLE) REVOKE CURRENT GRANTS; USE SCHEMA IDENTIFIER($SCHEMACHANGE_NAMESPACE); -- SCHEMA @@ -44,7 +44,6 @@ GRANT MONITOR ON SCHEMA IDENTIFIER($SCHEMACHANGE_NAMESPACE) TO DATABASE ROLE IDE -- SC_W -- None -- SC_C -GRANT MODIFY, APPLYBUDGET, ADD SEARCH OPTIMIZATION ON SCHEMA IDENTIFIER($SCHEMACHANGE_NAMESPACE) TO DATABASE ROLE IDENTIFIER($SC_C); -- TABLES -- SC_M diff --git a/demo/citibike_demo_jinja/A__render.sql b/demo/citibike_demo_jinja/2_test/A__render.sql similarity index 100% rename from demo/citibike_demo_jinja/A__render.sql rename to demo/citibike_demo_jinja/2_test/A__render.sql diff --git a/demo/citibike_demo_jinja/R__render.sql b/demo/citibike_demo_jinja/2_test/R__render.sql similarity index 100% rename from demo/citibike_demo_jinja/R__render.sql rename to demo/citibike_demo_jinja/2_test/R__render.sql diff --git a/demo/citibike_demo_jinja/V1.0.0__render.sql b/demo/citibike_demo_jinja/2_test/V1.0.0__render.sql similarity index 100% rename from demo/citibike_demo_jinja/V1.0.0__render.sql rename to demo/citibike_demo_jinja/2_test/V1.0.0__render.sql diff --git a/demo/citibike_demo_jinja/V1.1.0__initial_database_objects.sql b/demo/citibike_demo_jinja/2_test/V1.1.0__initial_database_objects.sql similarity index 100% rename from demo/citibike_demo_jinja/V1.1.0__initial_database_objects.sql rename to demo/citibike_demo_jinja/2_test/V1.1.0__initial_database_objects.sql diff --git a/demo/citibike_demo_jinja/V1.2.0__load_tables_from_s3.sql b/demo/citibike_demo_jinja/2_test/V1.2.0__load_tables_from_s3.sql similarity index 100% rename from demo/citibike_demo_jinja/V1.2.0__load_tables_from_s3.sql rename to demo/citibike_demo_jinja/2_test/V1.2.0__load_tables_from_s3.sql diff --git a/demo/teardown/citibike_demo_jinja/A__teardown_citibike_demo_jinja.sql b/demo/citibike_demo_jinja/3_teardown/A__teardown.sql similarity index 100% rename from demo/teardown/citibike_demo_jinja/A__teardown_citibike_demo_jinja.sql rename to demo/citibike_demo_jinja/3_teardown/A__teardown.sql diff --git a/demo/citibike_demo_jinja/schemachange-config.yml b/demo/citibike_demo_jinja/schemachange-config.yml index 0cd98e41..88cac3ed 100644 --- a/demo/citibike_demo_jinja/schemachange-config.yml +++ b/demo/citibike_demo_jinja/schemachange-config.yml @@ -3,12 +3,6 @@ config-version: 1 root-folder: "./demo/{{ env_var('SCENARIO_NAME')}}" modules-folder: "./demo/{{ env_var('SCENARIO_NAME')}}/modules" -snowflake-user: "{{ env_var('SNOWFLAKE_USER')}}" -snowflake-account: "{{ env_var('SNOWFLAKE_ACCOUNT')}}" -snowflake-role: "{{ env_var('SNOWFLAKE_ROLE')}}" -snowflake-warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE')}}" -snowflake-database: "{{ env_var('SNOWFLAKE_DATABASE')}}" -snowflake-schema: "{{ env_var('MY_TARGET_SCHEMA')}}" change-history-table: "{{ env_var('SNOWFLAKE_DATABASE')}}.{{ env_var('MY_TARGET_SCHEMA')}}.CHANGE_HISTORY" create-change-history-table: true diff --git a/demo/provision/setup_schemachange_schema.sql b/demo/provision/setup_schemachange_schema.sql index e1eef074..9e1c1fd9 100644 --- a/demo/provision/setup_schemachange_schema.sql +++ b/demo/provision/setup_schemachange_schema.sql @@ -53,7 +53,6 @@ GRANT MONITOR ON SCHEMA IDENTIFIER($SCHEMACHANGE_NAMESPACE) TO DATABASE ROLE IDE -- SC_W -- None -- SC_C -GRANT MODIFY, APPLYBUDGET, ADD SEARCH OPTIMIZATION ON SCHEMA IDENTIFIER($SCHEMACHANGE_NAMESPACE) TO DATABASE ROLE IDENTIFIER($SC_C); -- TABLES -- SC_M diff --git a/demo/setup/citibike_demo_jinja/schemachange-config.yml b/demo/schemachange-config-setup.yml similarity index 63% rename from demo/setup/citibike_demo_jinja/schemachange-config.yml rename to demo/schemachange-config-setup.yml index 16a4c464..e68543cb 100644 --- a/demo/setup/citibike_demo_jinja/schemachange-config.yml +++ b/demo/schemachange-config-setup.yml @@ -2,11 +2,6 @@ config-version: 1 root-folder: "./demo/setup/{{ env_var('SCENARIO_NAME')}}" -snowflake-user: "{{ env_var('SNOWFLAKE_USER')}}" -snowflake-account: "{{ env_var('SNOWFLAKE_ACCOUNT')}}" -snowflake-role: "{{ env_var('SNOWFLAKE_ROLE')}}" -snowflake-warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE')}}" -snowflake-database: "{{ env_var('SNOWFLAKE_DATABASE')}}" # tracking the setup step in a different change history table to use schemachange setup and teardown separate from deployment. change-history-table: "{{ env_var('SNOWFLAKE_DATABASE')}}.SCHEMACHANGE.{{ env_var('SCENARIO_NAME')}}_CHANGE_HISTORY" create-change-history-table: true diff --git a/demo/teardown/citibike_demo/schemachange-config.yml b/demo/schemachange-config-teardown.yml similarity index 63% rename from demo/teardown/citibike_demo/schemachange-config.yml rename to demo/schemachange-config-teardown.yml index d4f8fc00..800415a0 100644 --- a/demo/teardown/citibike_demo/schemachange-config.yml +++ b/demo/schemachange-config-teardown.yml @@ -2,11 +2,6 @@ config-version: 1 root-folder: "./demo/teardown/{{ env_var('SCENARIO_NAME')}}" -snowflake-user: "{{ env_var('SNOWFLAKE_USER')}}" -snowflake-account: "{{ env_var('SNOWFLAKE_ACCOUNT')}}" -snowflake-role: "{{ env_var('SNOWFLAKE_ROLE')}}" -snowflake-warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE')}}" -snowflake-database: "{{ env_var('SNOWFLAKE_DATABASE')}}" # tracking the setup step in a different change history table to use schemachange setup and teardown separate from deployment. change-history-table: "{{ env_var('SNOWFLAKE_DATABASE')}}.SCHEMACHANGE.{{ env_var('SCENARIO_NAME')}}_CHANGE_HISTORY" create-change-history-table: true diff --git a/demo/setup/basics_demo/schemachange-config.yml b/demo/setup/basics_demo/schemachange-config.yml deleted file mode 100644 index 16a4c464..00000000 --- a/demo/setup/basics_demo/schemachange-config.yml +++ /dev/null @@ -1,16 +0,0 @@ -config-version: 1 - -root-folder: "./demo/setup/{{ env_var('SCENARIO_NAME')}}" - -snowflake-user: "{{ env_var('SNOWFLAKE_USER')}}" -snowflake-account: "{{ env_var('SNOWFLAKE_ACCOUNT')}}" -snowflake-role: "{{ env_var('SNOWFLAKE_ROLE')}}" -snowflake-warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE')}}" -snowflake-database: "{{ env_var('SNOWFLAKE_DATABASE')}}" -# tracking the setup step in a different change history table to use schemachange setup and teardown separate from deployment. -change-history-table: "{{ env_var('SNOWFLAKE_DATABASE')}}.SCHEMACHANGE.{{ env_var('SCENARIO_NAME')}}_CHANGE_HISTORY" -create-change-history-table: true - -vars: - database_name: "{{env_var('SNOWFLAKE_DATABASE')}}" - schema_name: "{{env_var('MY_TARGET_SCHEMA')}}" diff --git a/demo/setup/citibike_demo/schemachange-config.yml b/demo/setup/citibike_demo/schemachange-config.yml deleted file mode 100644 index 16a4c464..00000000 --- a/demo/setup/citibike_demo/schemachange-config.yml +++ /dev/null @@ -1,16 +0,0 @@ -config-version: 1 - -root-folder: "./demo/setup/{{ env_var('SCENARIO_NAME')}}" - -snowflake-user: "{{ env_var('SNOWFLAKE_USER')}}" -snowflake-account: "{{ env_var('SNOWFLAKE_ACCOUNT')}}" -snowflake-role: "{{ env_var('SNOWFLAKE_ROLE')}}" -snowflake-warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE')}}" -snowflake-database: "{{ env_var('SNOWFLAKE_DATABASE')}}" -# tracking the setup step in a different change history table to use schemachange setup and teardown separate from deployment. -change-history-table: "{{ env_var('SNOWFLAKE_DATABASE')}}.SCHEMACHANGE.{{ env_var('SCENARIO_NAME')}}_CHANGE_HISTORY" -create-change-history-table: true - -vars: - database_name: "{{env_var('SNOWFLAKE_DATABASE')}}" - schema_name: "{{env_var('MY_TARGET_SCHEMA')}}" diff --git a/demo/teardown/basics_demo/schemachange-config.yml b/demo/teardown/basics_demo/schemachange-config.yml deleted file mode 100644 index d4f8fc00..00000000 --- a/demo/teardown/basics_demo/schemachange-config.yml +++ /dev/null @@ -1,16 +0,0 @@ -config-version: 1 - -root-folder: "./demo/teardown/{{ env_var('SCENARIO_NAME')}}" - -snowflake-user: "{{ env_var('SNOWFLAKE_USER')}}" -snowflake-account: "{{ env_var('SNOWFLAKE_ACCOUNT')}}" -snowflake-role: "{{ env_var('SNOWFLAKE_ROLE')}}" -snowflake-warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE')}}" -snowflake-database: "{{ env_var('SNOWFLAKE_DATABASE')}}" -# tracking the setup step in a different change history table to use schemachange setup and teardown separate from deployment. -change-history-table: "{{ env_var('SNOWFLAKE_DATABASE')}}.SCHEMACHANGE.{{ env_var('SCENARIO_NAME')}}_CHANGE_HISTORY" -create-change-history-table: true - -vars: - database_name: "{{env_var('SNOWFLAKE_DATABASE')}}" - schema_name: "{{env_var('MY_TARGET_SCHEMA')}}" diff --git a/demo/teardown/citibike_demo_jinja/schemachange-config.yml b/demo/teardown/citibike_demo_jinja/schemachange-config.yml deleted file mode 100644 index d4f8fc00..00000000 --- a/demo/teardown/citibike_demo_jinja/schemachange-config.yml +++ /dev/null @@ -1,16 +0,0 @@ -config-version: 1 - -root-folder: "./demo/teardown/{{ env_var('SCENARIO_NAME')}}" - -snowflake-user: "{{ env_var('SNOWFLAKE_USER')}}" -snowflake-account: "{{ env_var('SNOWFLAKE_ACCOUNT')}}" -snowflake-role: "{{ env_var('SNOWFLAKE_ROLE')}}" -snowflake-warehouse: "{{ env_var('SNOWFLAKE_WAREHOUSE')}}" -snowflake-database: "{{ env_var('SNOWFLAKE_DATABASE')}}" -# tracking the setup step in a different change history table to use schemachange setup and teardown separate from deployment. -change-history-table: "{{ env_var('SNOWFLAKE_DATABASE')}}.SCHEMACHANGE.{{ env_var('SCENARIO_NAME')}}_CHANGE_HISTORY" -create-change-history-table: true - -vars: - database_name: "{{env_var('SNOWFLAKE_DATABASE')}}" - schema_name: "{{env_var('MY_TARGET_SCHEMA')}}" diff --git a/schemachange/cli.py b/schemachange/cli.py index f896d096..fc04e3d1 100644 --- a/schemachange/cli.py +++ b/schemachange/cli.py @@ -9,7 +9,7 @@ from schemachange.config.get_merged_config import get_merged_config from schemachange.deploy import deploy from schemachange.redact_config_secrets import redact_config_secrets -from schemachange.session.SnowflakeSession import get_session_from_config +from schemachange.session.SnowflakeSession import SnowflakeSession # region Global Variables # metadata @@ -42,7 +42,7 @@ def main(): % {"schemachange_version": SCHEMACHANGE_VERSION} ) - config = get_merged_config() + config = get_merged_config(logger=module_logger) redact_config_secrets(config_secrets=config.secrets) structlog.configure( @@ -62,12 +62,11 @@ def main(): logger=logger, ) else: - config.check_for_deploy_args() - session = get_session_from_config( - config=config, + session = SnowflakeSession( schemachange_version=SCHEMACHANGE_VERSION, - snowflake_application_name=SNOWFLAKE_APPLICATION_NAME, + application=SNOWFLAKE_APPLICATION_NAME, logger=logger, + **config.get_session_kwargs(), ) deploy(config=config, session=session) diff --git a/schemachange/config/BaseConfig.py b/schemachange/config/BaseConfig.py index a77c8daa..e2ecccf4 100644 --- a/schemachange/config/BaseConfig.py +++ b/schemachange/config/BaseConfig.py @@ -4,7 +4,7 @@ import logging from abc import ABC from pathlib import Path -from typing import Literal, ClassVar, TypeVar +from typing import Literal, TypeVar import structlog @@ -20,8 +20,6 @@ @dataclasses.dataclass(frozen=True) class BaseConfig(ABC): - default_config_file_name: ClassVar[str] = "schemachange-config.yml" - subcommand: Literal["deploy", "render"] config_version: int | None = None config_file_path: Path | None = None diff --git a/schemachange/config/DeployConfig.py b/schemachange/config/DeployConfig.py index e60656f3..e1834707 100644 --- a/schemachange/config/DeployConfig.py +++ b/schemachange/config/DeployConfig.py @@ -12,12 +12,22 @@ @dataclasses.dataclass(frozen=True) class DeployConfig(BaseConfig): subcommand: Literal["deploy"] = "deploy" - snowflake_account: str | None = None - snowflake_user: str | None = None - snowflake_role: str | None = None - snowflake_warehouse: str | None = None - snowflake_database: str | None = None - snowflake_schema: str | None = None + snowflake_account: str | None = ( + None # TODO: Remove when connections.toml is enforced + ) + snowflake_user: str | None = None # TODO: Remove when connections.toml is enforced + snowflake_role: str | None = None # TODO: Remove when connections.toml is enforced + snowflake_warehouse: str | None = ( + None # TODO: Remove when connections.toml is enforced + ) + snowflake_database: str | None = ( + None # TODO: Remove when connections.toml is enforced + ) + snowflake_schema: str | None = ( + None # TODO: Remove when connections.toml is enforced + ) + connections_file_path: Path | None = None + connection_name: str | None = None # TODO: Turn change_history_table into three arguments. There's no need to parse it from a string change_history_table: ChangeHistoryTable | None = dataclasses.field( default_factory=ChangeHistoryTable @@ -26,22 +36,29 @@ class DeployConfig(BaseConfig): autocommit: bool = False dry_run: bool = False query_tag: str | None = None - oauth_config: dict | None = None @classmethod def factory( cls, config_file_path: Path, - snowflake_role: str | None = None, - snowflake_warehouse: str | None = None, - snowflake_database: str | None = None, - snowflake_schema: str | None = None, change_history_table: str | None = None, **kwargs, ): if "subcommand" in kwargs: kwargs.pop("subcommand") + # TODO: Remove when connections.toml is enforced + for sf_input in [ + "snowflake_role", + "snowflake_warehouse", + "snowflake_database", + "snowflake_schema", + ]: + if sf_input in kwargs and kwargs[sf_input] is not None: + kwargs[sf_input] = get_snowflake_identifier_string( + kwargs[sf_input], sf_input + ) + change_history_table = ChangeHistoryTable.from_str( table_str=change_history_table ) @@ -49,37 +66,22 @@ def factory( return super().factory( subcommand="deploy", config_file_path=config_file_path, - snowflake_role=get_snowflake_identifier_string( - snowflake_role, "snowflake_role" - ), - snowflake_warehouse=get_snowflake_identifier_string( - snowflake_warehouse, "snowflake_warehouse" - ), - snowflake_database=get_snowflake_identifier_string( - snowflake_database, "snowflake_database" - ), - snowflake_schema=get_snowflake_identifier_string( - snowflake_schema, "snowflake_schema" - ), change_history_table=change_history_table, **kwargs, ) - def check_for_deploy_args(self) -> None: - """Make sure we have the required connection info""" - - req_args = { - "snowflake_account": self.snowflake_account, - "snowflake_user": self.snowflake_user, - "snowflake_role": self.snowflake_role, - "snowflake_warehouse": self.snowflake_warehouse, + def get_session_kwargs(self) -> dict: + session_kwargs = { + "account": self.snowflake_account, # TODO: Remove when connections.toml is enforced + "user": self.snowflake_user, # TODO: Remove when connections.toml is enforced + "role": self.snowflake_role, # TODO: Remove when connections.toml is enforced + "warehouse": self.snowflake_warehouse, # TODO: Remove when connections.toml is enforced + "database": self.snowflake_database, # TODO: Remove when connections.toml is enforced + "schema": self.snowflake_schema, # TODO: Remove when connections.toml is enforced + "connections_file_path": self.connections_file_path, + "connection_name": self.connection_name, + "change_history_table": self.change_history_table, + "autocommit": self.autocommit, + "query_tag": self.query_tag, } - missing_args = [key for key, value in req_args.items() if value is None] - - if len(missing_args) == 0: - return - - missing_args = ", ".join({arg.replace("_", " ") for arg in missing_args}) - raise ValueError( - f"Missing config values. The following config values are required: {missing_args}" - ) + return {k: v for k, v in session_kwargs.items() if v is not None} diff --git a/schemachange/config/get_merged_config.py b/schemachange/config/get_merged_config.py index 607c09e9..f1a2ad52 100644 --- a/schemachange/config/get_merged_config.py +++ b/schemachange/config/get_merged_config.py @@ -3,53 +3,86 @@ from pathlib import Path from typing import Union, Optional +import structlog + from schemachange.config.DeployConfig import DeployConfig from schemachange.config.RenderConfig import RenderConfig from schemachange.config.parse_cli_args import parse_cli_args -from schemachange.config.utils import load_yaml_config, validate_directory +from schemachange.config.utils import ( + load_yaml_config, + validate_directory, + validate_file_path, +) def get_yaml_config_kwargs(config_file_path: Optional[Path]) -> dict: - # TODO: I think the configuration key for oauthconfig should be oauth-config. - # This looks like a bug in the current state of the repo to me - # load YAML inputs and convert kebabs to snakes kwargs = { - k.replace("-", "_").replace("oauthconfig", "oauth_config"): v - for (k, v) in load_yaml_config(config_file_path).items() + k.replace("-", "_"): v for (k, v) in load_yaml_config(config_file_path).items() } if "verbose" in kwargs: - kwargs["log_level"] = logging.DEBUG + if kwargs["verbose"]: + kwargs["log_level"] = logging.DEBUG kwargs.pop("verbose") if "vars" in kwargs: kwargs["config_vars"] = kwargs.pop("vars") - return kwargs - - -def get_merged_config() -> Union[DeployConfig, RenderConfig]: + for deprecated_arg in [ + "snowflake_account", + "snowflake_user", + "snowflake_role", + "snowflake_warehouse", + "snowflake_database", + "snowflake_schema", + ]: + if deprecated_arg in kwargs: + sys.stderr.write( + f"DEPRECATED - Set in connections.toml instead: {deprecated_arg}\n" + ) + + return {k: v for k, v in kwargs.items() if v is not None} + + +def get_merged_config( + logger: structlog.BoundLogger, +) -> Union[DeployConfig, RenderConfig]: cli_kwargs = parse_cli_args(sys.argv[1:]) + logger.debug("cli_kwargs", **cli_kwargs) + + cli_config_vars = cli_kwargs.pop("config_vars") - if "verbose" in cli_kwargs and cli_kwargs["verbose"]: - cli_kwargs["log_level"] = logging.DEBUG - cli_kwargs.pop("verbose") + connections_file_path = validate_file_path( + file_path=cli_kwargs.pop("connections_file_path", None) + ) - cli_config_vars = cli_kwargs.pop("config_vars", None) - if cli_config_vars is None: - cli_config_vars = {} + connection_name = cli_kwargs.pop("connection_name", None) config_folder = validate_directory(path=cli_kwargs.pop("config_folder", ".")) - config_file_path = Path(config_folder) / "schemachange-config.yml" + config_file_name = cli_kwargs.pop("config_file_name") + config_file_path = Path(config_folder) / config_file_name yaml_kwargs = get_yaml_config_kwargs( config_file_path=config_file_path, ) + logger.debug("yaml_kwargs", **yaml_kwargs) + yaml_config_vars = yaml_kwargs.pop("config_vars", None) if yaml_config_vars is None: yaml_config_vars = {} + if connections_file_path is None: + connections_file_path = yaml_kwargs.pop("connections_file_path", None) + if config_folder is not None and connections_file_path is not None: + # noinspection PyTypeChecker + connections_file_path = config_folder / connections_file_path + + connections_file_path = validate_file_path(file_path=connections_file_path) + + if connection_name is None: + connection_name = yaml_kwargs.pop("connection_name", None) + config_vars = { **yaml_config_vars, **cli_config_vars, @@ -62,6 +95,12 @@ def get_merged_config() -> Union[DeployConfig, RenderConfig]: **{k: v for k, v in yaml_kwargs.items() if v is not None}, **{k: v for k, v in cli_kwargs.items() if v is not None}, } + if connections_file_path is not None: + kwargs["connections_file_path"] = connections_file_path + if connection_name is not None: + kwargs["connection_name"] = connection_name + + logger.debug("final kwargs", **kwargs) if cli_kwargs["subcommand"] == "deploy": return DeployConfig.factory(**kwargs) diff --git a/schemachange/config/parse_cli_args.py b/schemachange/config/parse_cli_args.py index f287cd5f..8b4cd010 100644 --- a/schemachange/config/parse_cli_args.py +++ b/schemachange/config/parse_cli_args.py @@ -2,6 +2,8 @@ import argparse import json +import logging +import sys from enum import Enum import structlog @@ -9,6 +11,25 @@ logger = structlog.getLogger(__name__) +class DeprecateConnectionArgAction(argparse.Action): + def __init__(self, *args, **kwargs): + self.call_count = 0 + if "help" in kwargs: + kwargs["help"] = ( + f'[DEPRECATED - Set in connections.toml instead.] {kwargs["help"]}' + ) + super().__init__(*args, **kwargs) + + def __call__(self, parser, namespace, values, option_string=None): + if self.call_count == 0: + sys.stderr.write( + f"{', '.join(self.option_strings)} is deprecated. It will be ignored in future versions.\n" + ) + sys.stderr.write(self.help + "\n") + self.call_count += 1 + setattr(namespace, self.dest, values) + + class EnumAction(argparse.Action): """ Argparse action for handling Enums @@ -58,6 +79,14 @@ def parse_cli_args(args) -> dict: "(the default is the current working directory)", required=False, ) + parent_parser.add_argument( + "--config-file-name", + type=str, + default="schemachange-config.yml", + help="The schemachange config YAML file name. Must be in the directory supplied as the config-folder " + "(Default: schemachange-config.yml)", + required=False, + ) parent_parser.add_argument( "-f", "--root-folder", @@ -92,12 +121,14 @@ def parse_cli_args(args) -> dict: subcommands = parser.add_subparsers(dest="subcommand") parser_deploy = subcommands.add_parser("deploy", parents=[parent_parser]) + parser_deploy.register("action", "deprecate", DeprecateConnectionArgAction) parser_deploy.add_argument( "-a", "--snowflake-account", type=str, help="The name of the snowflake account (e.g. xy12345.east-us-2.azure)", required=False, + action="deprecate", ) parser_deploy.add_argument( "-u", @@ -105,6 +136,7 @@ def parse_cli_args(args) -> dict: type=str, help="The name of the snowflake user", required=False, + action="deprecate", ) parser_deploy.add_argument( "-r", @@ -112,6 +144,7 @@ def parse_cli_args(args) -> dict: type=str, help="The name of the default role to use", required=False, + action="deprecate", ) parser_deploy.add_argument( "-w", @@ -119,6 +152,7 @@ def parse_cli_args(args) -> dict: type=str, help="The name of the default warehouse to use. Can be overridden in the change scripts.", required=False, + action="deprecate", ) parser_deploy.add_argument( "-d", @@ -126,6 +160,7 @@ def parse_cli_args(args) -> dict: type=str, help="The name of the default database to use. Can be overridden in the change scripts.", required=False, + action="deprecate", ) parser_deploy.add_argument( "-s", @@ -133,6 +168,19 @@ def parse_cli_args(args) -> dict: type=str, help="The name of the default schema to use. Can be overridden in the change scripts.", required=False, + action="deprecate", + ) + parser_deploy.add_argument( + "--connections-file-path", + type=str, + help="Override the default connections.toml file path at snowflake.connector.constants.CONNECTIONS_FILE (OS specific)", + required=False, + ) + parser_deploy.add_argument( + "--connection-name", + type=str, + help="Override the default connections.toml connection name. Other connection-related values will override these connection values.", + required=False, ) parser_deploy.add_argument( "-c", @@ -173,14 +221,6 @@ def parse_cli_args(args) -> dict: help="The string to add to the Snowflake QUERY_TAG session value for each query executed", required=False, ) - parser_deploy.add_argument( - "--oauth-config", - type=json.loads, - help='Define values for the variables to Make Oauth Token requests (e.g. {"token-provider-url": ' - '"https//...", "token-request-payload": {"client_id": "GUID_xyz",...},... })', - required=False, - ) - parser_render = subcommands.add_parser( "render", description="Renders a script to the console, used to check and verify jinja output from scripts.", @@ -204,7 +244,16 @@ def parse_cli_args(args) -> dict: if "log_level" in parsed_kwargs and isinstance(parsed_kwargs["log_level"], Enum): parsed_kwargs["log_level"] = parsed_kwargs["log_level"].value + parsed_kwargs["config_vars"] = {} if "vars" in parsed_kwargs: - parsed_kwargs["config_vars"] = parsed_kwargs.pop("vars") + config_vars = parsed_kwargs.pop("vars") + if config_vars is not None: + parsed_kwargs["config_vars"] = config_vars + + if "verbose" in parsed_kwargs: + parsed_kwargs["log_level"] = ( + logging.DEBUG if parsed_kwargs["verbose"] else logging.INFO + ) + parsed_kwargs.pop("verbose") - return parsed_kwargs + return {k: v for k, v in parsed_kwargs.items() if v is not None} diff --git a/schemachange/config/utils.py b/schemachange/config/utils.py index cd984951..af3f284b 100644 --- a/schemachange/config/utils.py +++ b/schemachange/config/utils.py @@ -1,31 +1,31 @@ from __future__ import annotations +import os import re from pathlib import Path from typing import Any + import jinja2 import jinja2.ext import structlog import yaml - from schemachange.JinjaEnvVar import JinjaEnvVar +import warnings logger = structlog.getLogger(__name__) snowflake_identifier_pattern = re.compile(r"^[\w]+$") -def get_snowflake_identifier_string(input_value: str, input_type: str) -> str: +def get_snowflake_identifier_string(input_value: str, input_type: str) -> str | None: # Words with alphanumeric characters and underscores only. - result = "" - if input_value is None: - result = None + return None elif snowflake_identifier_pattern.match(input_value): - result = input_value + return input_value elif input_value.startswith('"') and input_value.endswith('"'): - result = input_value + return input_value elif input_value.startswith('"') and not input_value.endswith('"'): raise ValueError( f"Invalid {input_type}: {input_value}. Missing ending double quote" @@ -35,9 +35,7 @@ def get_snowflake_identifier_string(input_value: str, input_type: str) -> str: f"Invalid {input_type}: {input_value}. Missing beginning double quote" ) else: - result = f'"{input_value}"' - - return result + return f'"{input_value}"' def get_config_secrets(config_vars: dict[str, dict | str] | None) -> set[str]: @@ -73,7 +71,9 @@ def inner_extract_dictionary_secrets( return inner_extract_dictionary_secrets(config_vars) -def validate_file_path(file_path: Path | str) -> Path: +def validate_file_path(file_path: Path | str | None) -> Path | None: + if file_path is None: + return None if isinstance(file_path, str): file_path = Path(file_path) if not file_path.is_file(): @@ -83,7 +83,7 @@ def validate_file_path(file_path: Path | str) -> Path: def validate_directory(path: Path | str | None) -> Path | None: if path is None: - return path + return None if isinstance(path, str): path = Path(path) if not path.is_dir(): @@ -130,3 +130,34 @@ def load_yaml_config(config_file_path: Path | None) -> dict[str, Any]: config = yaml.load(config_template.render(), Loader=yaml.FullLoader) logger.info("Using config file", config_file_path=str(config_file_path)) return config + + +def get_snowsql_pwd() -> str | None: + snowsql_pwd = os.getenv("SNOWSQL_PWD") + if snowsql_pwd is not None and snowsql_pwd: + warnings.warn( + "The SNOWSQL_PWD environment variable is deprecated and " + "will be removed in a later version of schemachange. " + "Please use SNOWFLAKE_PASSWORD instead.", + DeprecationWarning, + ) + return snowsql_pwd + + +def get_snowflake_password() -> str | None: + snowflake_password = os.getenv("SNOWFLAKE_PASSWORD") + snowsql_pwd = get_snowsql_pwd() + + if snowflake_password is not None and snowflake_password: + # Check legacy/deprecated env variable + if snowsql_pwd is not None and snowsql_pwd: + warnings.warn( + "Environment variables SNOWFLAKE_PASSWORD and SNOWSQL_PWD " + "are both present, using SNOWFLAKE_PASSWORD", + DeprecationWarning, + ) + return snowflake_password + elif snowsql_pwd is not None and snowsql_pwd: + return snowsql_pwd + else: + return None diff --git a/schemachange/session/Credential.py b/schemachange/session/Credential.py index f08e767b..e69de29b 100644 --- a/schemachange/session/Credential.py +++ b/schemachange/session/Credential.py @@ -1,106 +0,0 @@ -from __future__ import annotations - -import dataclasses -import os -from typing import Literal, Union - -import structlog - -from schemachange.session.utils import ( - get_snowflake_password, - get_private_key_bytes, - get_oauth_token, -) - - -@dataclasses.dataclass(frozen=True) -class OauthCredential: - token: str - authenticator: Literal["oauth"] = "oauth" - - -@dataclasses.dataclass(frozen=True) -class PasswordCredential: - password: str - authenticator: Literal["snowflake"] = "snowflake" - - -@dataclasses.dataclass(frozen=True) -class PrivateKeyCredential: - private_key: bytes - authenticator: Literal["snowflake"] = "snowflake" - - -@dataclasses.dataclass(frozen=True) -class ExternalBrowserCredential: - password: str | None = None - authenticator: Literal["externalbrowser"] = "externalbrowser" - - -@dataclasses.dataclass(frozen=True) -class OktaCredential: - authenticator: str - password: str - - -SomeCredential = Union[ - OauthCredential, - PasswordCredential, - ExternalBrowserCredential, - OktaCredential, - PrivateKeyCredential, -] - - -def credential_factory( - logger: structlog.BoundLogger, - oauth_config: dict | None = None, -) -> SomeCredential: - snowflake_authenticator = os.getenv("SNOWFLAKE_AUTHENTICATOR", default="snowflake") - - # OAuth based authentication - if snowflake_authenticator.lower() == "oauth": - logger.debug("Proceeding with Oauth Access Token authentication") - return OauthCredential(token=get_oauth_token(oauth_config)) - - # External Browser based SSO - if snowflake_authenticator.lower() == "externalbrowser": - logger.debug("Proceeding with External Browser authentication") - return ExternalBrowserCredential() - - snowflake_password = get_snowflake_password() - - # IDP based Authentication, limited to Okta - if snowflake_authenticator.lower()[:8] == "https://": - logger.debug( - "Proceeding with Okta authentication", okta_endpoint=snowflake_authenticator - ) - return OktaCredential( - authenticator=snowflake_authenticator, password=snowflake_password - ) - - if snowflake_authenticator.lower() != "snowflake": - logger.debug( - "Supplied authenticator is not supported authenticator option. Choose from snowflake, " - "externalbrowser, oauth, https://.okta.com. " - "Using default value = 'snowflake'", - snowflake_authenticator=snowflake_authenticator, - ) - - if snowflake_password: - logger.debug("Proceeding with password authentication") - - return PasswordCredential(password=snowflake_password) - - if os.getenv("SNOWFLAKE_PRIVATE_KEY_PATH", ""): - logger.debug("Proceeding with private key authentication") - - return PrivateKeyCredential(private_key=get_private_key_bytes()) - - raise NameError( - "Missing environment variable(s). \n" - "SNOWFLAKE_PASSWORD must be defined for password authentication. \n" - "SNOWFLAKE_PRIVATE_KEY_PATH and (optional) SNOWFLAKE_PRIVATE_KEY_PASSPHRASE " - "must be defined for private key authentication. \n" - "SNOWFLAKE_AUTHENTICATOR must be defined is using Oauth, OKTA or external Browser Authentication." - ) diff --git a/schemachange/session/SnowflakeSession.py b/schemachange/session/SnowflakeSession.py index bb914ca5..ba987f4e 100644 --- a/schemachange/session/SnowflakeSession.py +++ b/schemachange/session/SnowflakeSession.py @@ -3,26 +3,23 @@ import hashlib import time from collections import defaultdict -from dataclasses import asdict from textwrap import dedent, indent import snowflake.connector import structlog from schemachange.config.ChangeHistoryTable import ChangeHistoryTable -from schemachange.config.DeployConfig import DeployConfig -from schemachange.session.Credential import SomeCredential, credential_factory +from schemachange.config.utils import get_snowflake_identifier_string from schemachange.session.Script import VersionedScript, RepeatableScript, AlwaysScript class SnowflakeSession: - user: str account: str - role: str - warehouse: str - database: str | None + user: str | None # TODO: user: str when connections.toml is enforced + role: str | None # TODO: role: str when connections.toml is enforced + warehouse: str | None # TODO: warehouse: str when connections.toml is enforced + database: str | None # TODO: database: str when connections.toml is enforced schema: str | None - query_tag: str | None autocommit: bool change_history_table: ChangeHistoryTable logger: structlog.BoundLogger @@ -35,46 +32,66 @@ class SnowflakeSession: def __init__( self, - snowflake_user: str, - snowflake_account: str, - snowflake_role: str, - snowflake_warehouse: str, schemachange_version: str, application: str, - credential: SomeCredential, change_history_table: ChangeHistoryTable, logger: structlog.BoundLogger, - autocommit: bool = False, - snowflake_database: str | None = None, - snowflake_schema: str | None = None, + connection_name: str | None = None, + connections_file_path: str | None = None, + account: str | None = None, # TODO: Remove when connections.toml is enforced + user: str | None = None, # TODO: Remove when connections.toml is enforced + role: str | None = None, # TODO: Remove when connections.toml is enforced + warehouse: str | None = None, # TODO: Remove when connections.toml is enforced + database: str | None = None, # TODO: Remove when connections.toml is enforced + schema: str | None = None, # TODO: Remove when connections.toml is enforced query_tag: str | None = None, + autocommit: bool = False, + **kwargs, # TODO: Remove when connections.toml is enforced ): - self.user = snowflake_user - self.account = snowflake_account - self.role = snowflake_role - self.warehouse = snowflake_warehouse - self.database = snowflake_database - self.schema = snowflake_schema - self.autocommit = autocommit self.change_history_table = change_history_table + self.autocommit = autocommit self.logger = logger self.session_parameters = {"QUERY_TAG": f"schemachange {schemachange_version}"} if query_tag: self.session_parameters["QUERY_TAG"] += f";{query_tag}" - self.con = snowflake.connector.connect( - user=self.user, - account=self.account, - role=self.role, - warehouse=self.warehouse, - database=self.database, - schema=self.schema, - application=application, - session_parameters=self.session_parameters, - **asdict(credential), - ) + connect_kwargs = { + "account": account, # TODO: Remove when connections.toml is enforced + "user": user, # TODO: Remove when connections.toml is enforced + "database": database, # TODO: Remove when connections.toml is enforced + "schema": schema, # TODO: Remove when connections.toml is enforced + "role": role, # TODO: Remove when connections.toml is enforced + "warehouse": warehouse, # TODO: Remove when connections.toml is enforced + "private_key_file": kwargs.get( + "private_key_path" + ), # TODO: Remove when connections.toml is enforced + "token": kwargs.get( + "oauth_token" + ), # TODO: Remove when connections.toml is enforced + "password": kwargs.get( + "password" + ), # TODO: Remove when connections.toml is enforced + "authenticator": kwargs.get( + "authenticator" + ), # TODO: Remove when connections.toml is enforced + "connection_name": connection_name, + "connections_file_path": connections_file_path, + "application": application, + "session_parameters": self.session_parameters, + } + connect_kwargs = {k: v for k, v in connect_kwargs.items() if v is not None} + self.logger.debug("snowflake.connector.connect kwargs", **connect_kwargs) + self.con = snowflake.connector.connect(**connect_kwargs) print(f"Current session ID: {self.con.session_id}") + self.account = self.con.account + self.user = get_snowflake_identifier_string(self.con.user, "user") + self.role = get_snowflake_identifier_string(self.con.role, "role") + self.warehouse = get_snowflake_identifier_string( + self.con.warehouse, "warehouse" + ) + self.database = get_snowflake_identifier_string(self.con.database, "database") + self.schema = get_snowflake_identifier_string(self.con.schema, "schema") if not self.autocommit: self.con.autocommit(False) @@ -257,6 +274,7 @@ def fetch_versioned_scripts( "checksum": checksum, } + # noinspection PyTypeChecker return versioned_scripts, versions[0] if versions else None def reset_session(self, logger: structlog.BoundLogger): @@ -294,6 +312,7 @@ def apply_change_script( return logger.info("Applying change script") # Define a few other change related variables + # noinspection PyTypeChecker checksum = hashlib.sha224(script_content.encode("utf-8")).hexdigest() execution_time = 0 status = "Success" @@ -337,27 +356,3 @@ def apply_change_script( ); """ self.execute_snowflake_query(dedent(query), logger=logger) - - -def get_session_from_config( - config: DeployConfig, - logger: structlog.BoundLogger, - schemachange_version: str, - snowflake_application_name: str, -) -> SnowflakeSession: - credential = credential_factory(logger=logger, oauth_config=config.oauth_config) - return SnowflakeSession( - snowflake_user=config.snowflake_user, - snowflake_account=config.snowflake_account, - snowflake_role=config.snowflake_role, - snowflake_warehouse=config.snowflake_warehouse, - schemachange_version=schemachange_version, - application=snowflake_application_name, - credential=credential, - change_history_table=config.change_history_table, - logger=logger, - autocommit=config.autocommit, - snowflake_database=config.snowflake_database, - snowflake_schema=config.snowflake_schema, - query_tag=config.query_tag, - ) diff --git a/schemachange/session/utils.py b/schemachange/session/utils.py deleted file mode 100644 index 9cc58f38..00000000 --- a/schemachange/session/utils.py +++ /dev/null @@ -1,85 +0,0 @@ -from __future__ import annotations - -import json -import os -import warnings - -import requests -import structlog -from cryptography.hazmat.backends import default_backend -from cryptography.hazmat.primitives import serialization - -logger = structlog.getLogger(__name__) - - -def get_snowflake_password() -> str | None: - snowflake_password = None - if os.getenv("SNOWFLAKE_PASSWORD") is not None and os.getenv("SNOWFLAKE_PASSWORD"): - snowflake_password = os.getenv("SNOWFLAKE_PASSWORD") - - # Check legacy/deprecated env variable - if os.getenv("SNOWSQL_PWD") is not None and os.getenv("SNOWSQL_PWD"): - if snowflake_password: - warnings.warn( - "Environment variables SNOWFLAKE_PASSWORD and SNOWSQL_PWD " - "are both present, using SNOWFLAKE_PASSWORD", - DeprecationWarning, - ) - else: - warnings.warn( - "The SNOWSQL_PWD environment variable is deprecated and " - "will be removed in a later version of schemachange. " - "Please use SNOWFLAKE_PASSWORD instead.", - DeprecationWarning, - ) - snowflake_password = os.getenv("SNOWSQL_PWD") - return snowflake_password - - -def get_private_key_password() -> bytes | None: - private_key_password = os.getenv("SNOWFLAKE_PRIVATE_KEY_PASSPHRASE", "") - - if private_key_password: - return private_key_password.encode() - - logger.debug( - "No private key passphrase provided. Assuming the key is not encrypted." - ) - - return None - - -def get_private_key_bytes() -> bytes: - private_key_password = get_private_key_password() - with open(os.environ["SNOWFLAKE_PRIVATE_KEY_PATH"], "rb") as key: - p_key = serialization.load_pem_private_key( - key.read(), - password=private_key_password, - backend=default_backend(), - ) - - return p_key.private_bytes( - encoding=serialization.Encoding.DER, - format=serialization.PrivateFormat.PKCS8, - encryption_algorithm=serialization.NoEncryption(), - ) - - -def get_oauth_token(oauth_config: dict): - req_info = { - "url": oauth_config["token-provider-url"], - "headers": oauth_config["token-request-headers"], - "data": oauth_config["token-request-payload"], - } - token_name = oauth_config["token-response-name"] - response = requests.post(**req_info) - response_dict = json.loads(response.text) - try: - return response_dict[token_name] - except KeyError: - keys = ", ".join(response_dict.keys()) - errormessage = f"Response Json contains keys: {keys} \n but not {token_name}" - # if there is an error passed with the response include that - if "error_description" in response_dict.keys(): - errormessage = f"{errormessage}\n error description: {response_dict['error_description']}" - raise KeyError(errormessage) diff --git a/tests/config/__init__.py b/tests/config/__init__.py new file mode 100644 index 00000000..e69de29b diff --git a/tests/config/schemachange-config-full-no-connection.yml b/tests/config/schemachange-config-full-no-connection.yml new file mode 100644 index 00000000..189c65f2 --- /dev/null +++ b/tests/config/schemachange-config-full-no-connection.yml @@ -0,0 +1,18 @@ +config-version: 1 +root-folder: 'root-folder-from-yaml' +modules-folder: 'modules-folder-from-yaml' +snowflake-account: 'snowflake-account-from-yaml' +snowflake-user: 'snowflake-user-from-yaml' +snowflake-role: 'snowflake-role-from-yaml' +snowflake-warehouse: 'snowflake-warehouse-from-yaml' +snowflake-database: 'snowflake-database-from-yaml' +snowflake-schema: 'snowflake-schema-from-yaml' +change-history-table: 'change-history-table-from-yaml' +vars: + var1: 'from_yaml' + var2: 'also_from_yaml' +create-change-history-table: false +autocommit: false +verbose: false +dry-run: false +query-tag: 'query-tag-from-yaml' diff --git a/tests/config/schemachange-config-full.yml b/tests/config/schemachange-config-full.yml new file mode 100644 index 00000000..d16ed799 --- /dev/null +++ b/tests/config/schemachange-config-full.yml @@ -0,0 +1,20 @@ +config-version: 1 +root-folder: 'root-folder-from-yaml' +modules-folder: 'modules-folder-from-yaml' +snowflake-account: 'snowflake-account-from-yaml' +snowflake-user: 'snowflake-user-from-yaml' +snowflake-role: 'snowflake-role-from-yaml' +snowflake-warehouse: 'snowflake-warehouse-from-yaml' +snowflake-database: 'snowflake-database-from-yaml' +snowflake-schema: 'snowflake-schema-from-yaml' +connections-file-path: 'connections.toml' +connection-name: 'myconnection' +change-history-table: 'change-history-table-from-yaml' +vars: + var1: 'from_yaml' + var2: 'also_from_yaml' +create-change-history-table: false +autocommit: false +verbose: false +dry-run: false +query-tag: 'query-tag-from-yaml' diff --git a/tests/config/schemachange-config-partial-with-connection.yml b/tests/config/schemachange-config-partial-with-connection.yml new file mode 100644 index 00000000..863925b9 --- /dev/null +++ b/tests/config/schemachange-config-partial-with-connection.yml @@ -0,0 +1,14 @@ +config-version: 1 +root-folder: 'root-folder-from-yaml' +modules-folder: 'modules-folder-from-yaml' +connections-file-path: 'connections.toml' +connection-name: 'myconnection' +change-history-table: 'change-history-table-from-yaml' +vars: + var1: 'from_yaml' + var2: 'also_from_yaml' +create-change-history-table: false +autocommit: false +verbose: false +dry-run: false +query-tag: 'query-tag-from-yaml' diff --git a/tests/config/schemachange-config.yml b/tests/config/schemachange-config.yml index 73a5e56a..3dae2c5e 100644 --- a/tests/config/schemachange-config.yml +++ b/tests/config/schemachange-config.yml @@ -1,30 +1 @@ config-version: 1 -root-folder: 'root-folder-from-yaml' -modules-folder: 'modules-folder-from-yaml' -snowflake-account: 'snowflake-account-from-yaml' -snowflake-user: 'snowflake-user-from-yaml' -snowflake-role: 'snowflake-role-from-yaml' -snowflake-warehouse: 'snowflake-warehouse-from-yaml' -snowflake-database: 'snowflake-database-from-yaml' -snowflake-schema: 'snowflake-schema-from-yaml' -change-history-table: 'change-history-table-from-yaml' -vars: - var1: 'from_yaml' - var2: 'also_from_yaml' -create-change-history-table: false -autocommit: false -verbose: false -dry-run: false -query-tag: 'query-tag-from-yaml' -oauthconfig: - token-provider-url: 'token-provider-url-from-yaml' - token-response-name: 'token-response-name-from-yaml' - token-request-headers: - Content-Type: 'Content-Type-from-yaml' - User-Agent: 'User-Agent-from-yaml' - token-request-payload: - client_id: 'id-from-yaml' - username: 'username-from-yaml' - password: 'password-from-yaml' - grant_type: 'type-from-yaml' - scope: 'scope-from-yaml' diff --git a/tests/config/test_ChangeHistoryTable.py b/tests/config/test_ChangeHistoryTable.py new file mode 100644 index 00000000..fa88b45c --- /dev/null +++ b/tests/config/test_ChangeHistoryTable.py @@ -0,0 +1,81 @@ +from __future__ import annotations + + +import pytest + +from schemachange.config.ChangeHistoryTable import ChangeHistoryTable + + +@pytest.mark.parametrize( + "table_str, expected", + [ + ( + "DATABASE_NAME.SCHEMA_NAME.TABLE_NAME", + ChangeHistoryTable( + table_name="TABLE_NAME", + schema_name="SCHEMA_NAME", + database_name="DATABASE_NAME", + ), + ), + ( + "SCHEMA_NAME.TABLE_NAME", + ChangeHistoryTable( + table_name="TABLE_NAME", + schema_name="SCHEMA_NAME", + database_name="METADATA", + ), + ), + ( + "TABLE_NAME", + ChangeHistoryTable( + table_name="TABLE_NAME", + schema_name="SCHEMACHANGE", + database_name="METADATA", + ), + ), + ], +) +def test_from_str_happy_path(table_str: str, expected: ChangeHistoryTable): + result = ChangeHistoryTable.from_str(table_str) + assert result == expected + + +def test_from_str_exception(): + with pytest.raises(ValueError) as e: + ChangeHistoryTable.from_str("FOUR.THREE.TWO.ONE") + + assert "Invalid change history table name:" in str(e.value) + + +@pytest.mark.parametrize( + "table, expected", + [ + ( + ChangeHistoryTable( + table_name="TABLE_NAME", + schema_name="SCHEMA_NAME", + database_name="DATABASE_NAME", + ), + "DATABASE_NAME.SCHEMA_NAME.TABLE_NAME", + ), + ( + ChangeHistoryTable( + table_name="TABLE_NAME", + schema_name="SCHEMA_NAME", + database_name="METADATA", + ), + "METADATA.SCHEMA_NAME.TABLE_NAME", + ), + ( + ChangeHistoryTable( + table_name="TABLE_NAME", + schema_name="SCHEMACHANGE", + database_name="METADATA", + ), + "METADATA.SCHEMACHANGE.TABLE_NAME", + ), + ], +) +def test_fully_qualified(table: ChangeHistoryTable, expected: str): + result = table.fully_qualified + assert result == expected diff --git a/tests/config/test_Config.py b/tests/config/test_Config.py deleted file mode 100644 index eca2e269..00000000 --- a/tests/config/test_Config.py +++ /dev/null @@ -1,280 +0,0 @@ -from __future__ import annotations - -from pathlib import Path -from unittest import mock - -import pytest - -from schemachange.config.BaseConfig import BaseConfig -from schemachange.config.ChangeHistoryTable import ChangeHistoryTable -from schemachange.config.DeployConfig import DeployConfig -from schemachange.config.RenderConfig import RenderConfig -from schemachange.config.utils import get_config_secrets - - -@pytest.fixture -@mock.patch("pathlib.Path.is_dir", return_value=True) -def yaml_config(_) -> DeployConfig: - return DeployConfig.factory( - config_file_path=Path(__file__).parent.parent.parent - / "demo" - / "basics_demo" - / "schemachange-config.yml", - root_folder=Path(__file__).parent.parent.parent / "demo" / "basics_demo", - modules_folder=Path(__file__).parent.parent.parent / "demo" / "basics_demo", - config_vars={"var1": "yaml_vars"}, - snowflake_account="yaml_snowflake_account", - snowflake_user="yaml_snowflake_user", - snowflake_role="yaml_snowflake_role", - snowflake_warehouse="yaml_snowflake_warehouse", - snowflake_database="yaml_snowflake_database", - snowflake_schema="yaml_snowflake_schema", - change_history_table="yaml_change_history_table", - create_change_history_table=True, - autocommit=True, - dry_run=True, - query_tag="yaml_query_tag", - oauth_config={"oauth": "yaml_oauth"}, - ) - - -class TestGetConfigSecrets: - def test_given_empty_config_should_not_error(self): - get_config_secrets(config_vars={}) - - def test_given_none_should_not_error(self): - get_config_secrets(None) - - @pytest.mark.parametrize( - "config_vars, secret", - [ - ({"secret": "secret_val1"}, "secret_val1"), - ({"SECret": "secret_val2"}, "secret_val2"), - ({"secret_key": "secret_val3"}, "secret_val3"), - ({"s3_bucket_secret": "secret_val4"}, "secret_val4"), - ({"s3SecretKey": "secret_val5"}, "secret_val5"), - ({"nested": {"s3_bucket_secret": "secret_val6"}}, "secret_val6"), - ], - ) - def test_given__vars_with_keys_should_extract_secret(self, config_vars, secret): - results = get_config_secrets(config_vars) - assert secret in results - - def test_given_vars_with_secrets_key_then_all_children_should_be_treated_as_secrets( - self, - ): - config_vars = { - "secrets": { - "database_name": "database_name_val", - "schema_name": "schema_name_val", - "nested_secrets": {"SEC_ONE": "SEC_ONE_VAL"}, - } - } - results = get_config_secrets(config_vars=config_vars) - - assert len(results) == 3 - assert "database_name_val" in results - assert "schema_name_val" in results - assert "SEC_ONE_VAL" in results - - def test_given_vars_with_nested_secrets_key_then_all_children_should_be_treated_as_secrets( - self, - ): - config_vars = { - "nested": { - "secrets": { - "database_name": "database_name_val", - "schema_name": "schema_name_val", - "nested": {"SEC_ONE": "SEC_ONE_VAL"}, - } - } - } - - results = get_config_secrets(config_vars) - - assert len(results) == 3 - assert "database_name_val" in results - assert "schema_name_val" in results - assert "SEC_ONE_VAL" in results - - def test_given_vars_with_same_secret_twice_then_only_extracted_once(self): - config_vars = { - "secrets": { - "database_name": "SECRET_VALUE", - "schema_name": "SECRET_VALUE", - "nested_secrets": {"SEC_ONE": "SECRET_VALUE"}, - } - } - - results = get_config_secrets(config_vars) - - assert len(results) == 1 - assert "SECRET_VALUE" in results - - -class TestTable: - @pytest.mark.parametrize( - "table_str, expected", - [ - ( - "DATABASE_NAME.SCHEMA_NAME.TABLE_NAME", - ChangeHistoryTable( - table_name="TABLE_NAME", - schema_name="SCHEMA_NAME", - database_name="DATABASE_NAME", - ), - ), - ( - "SCHEMA_NAME.TABLE_NAME", - ChangeHistoryTable( - table_name="TABLE_NAME", - schema_name="SCHEMA_NAME", - database_name="METADATA", - ), - ), - ( - "TABLE_NAME", - ChangeHistoryTable( - table_name="TABLE_NAME", - schema_name="SCHEMACHANGE", - database_name="METADATA", - ), - ), - ], - ) - def test_from_str_happy_path(self, table_str: str, expected: ChangeHistoryTable): - result = ChangeHistoryTable.from_str(table_str) - assert result == expected - - def test_from_str_exception(self): - with pytest.raises(ValueError) as e: - ChangeHistoryTable.from_str("FOUR.THREE.TWO.ONE") - - assert "Invalid change history table name:" in str(e.value) - - @pytest.mark.parametrize( - "table, expected", - [ - ( - ChangeHistoryTable( - table_name="TABLE_NAME", - schema_name="SCHEMA_NAME", - database_name="DATABASE_NAME", - ), - "DATABASE_NAME.SCHEMA_NAME.TABLE_NAME", - ), - ( - ChangeHistoryTable( - table_name="TABLE_NAME", - schema_name="SCHEMA_NAME", - database_name="METADATA", - ), - "METADATA.SCHEMA_NAME.TABLE_NAME", - ), - ( - ChangeHistoryTable( - table_name="TABLE_NAME", - schema_name="SCHEMACHANGE", - database_name="METADATA", - ), - "METADATA.SCHEMACHANGE.TABLE_NAME", - ), - ], - ) - def test_fully_qualified(self, table: ChangeHistoryTable, expected: str): - result = table.fully_qualified - assert result == expected - - -class TestConfig: - @mock.patch("pathlib.Path.is_dir", side_effect=[False]) - def test_invalid_root_folder(self, _): - with pytest.raises(Exception) as e_info: - DeployConfig.factory( - config_file_path=Path("some_config_file_name"), - root_folder="some_root_folder_name", - modules_folder="some_modules_folder_name", - config_vars={"some": "config_vars"}, - snowflake_account="some_snowflake_account", - snowflake_user="some_snowflake_user", - snowflake_role="some_snowflake_role", - snowflake_warehouse="some_snowflake_warehouse", - snowflake_database="some_snowflake_database", - snowflake_schema="some_snowflake_schema", - change_history_table="some_history_table", - query_tag="some_query_tag", - oauth_config={"some": "values"}, - ) - e_info_value = str(e_info.value) - assert "Path is not valid directory: some_root_folder_name" in e_info_value - - @mock.patch("pathlib.Path.is_dir", side_effect=[True, False]) - def test_invalid_modules_folder(self, _): - with pytest.raises(Exception) as e_info: - DeployConfig.factory( - config_file_path=Path("some_config_file_name"), - root_folder="some_root_folder_name", - modules_folder="some_modules_folder_name", - config_vars={"some": "config_vars"}, - snowflake_account="some_snowflake_account", - snowflake_user="some_snowflake_user", - snowflake_role="some_snowflake_role", - snowflake_warehouse="some_snowflake_warehouse", - snowflake_database="some_snowflake_database", - snowflake_schema="some_snowflake_schema", - change_history_table="some_history_table", - query_tag="some_query_tag", - oauth_config={"some": "values"}, - ) - e_info_value = str(e_info.value) - assert "Path is not valid directory: some_modules_folder_name" in e_info_value - - def test_config_vars_not_a_dict(self): - with pytest.raises(Exception) as e_info: - BaseConfig.factory( - subcommand="deploy", - config_vars="a string", - config_file_path=Path("."), - ) - assert ( - "config_vars did not parse correctly, please check its configuration" - in str(e_info.value) - ) - - def test_config_vars_reserved_word(self): - with pytest.raises(Exception) as e_info: - BaseConfig.factory( - subcommand="deploy", - config_vars={"schemachange": "not allowed"}, - config_file_path=Path("."), - ) - assert ( - "The variable 'schemachange' has been reserved for use by schemachange, please use a different name" - in str(e_info.value) - ) - - def test_check_for_deploy_args_happy_path(self): - config = DeployConfig.factory( - snowflake_account="account", - snowflake_user="user", - snowflake_role="role", - snowflake_warehouse="warehouse", - config_file_path=Path("."), - ) - config.check_for_deploy_args() - - def test_check_for_deploy_args_exception(self): - config = DeployConfig.factory(config_file_path=Path(".")) - with pytest.raises(ValueError) as e: - config.check_for_deploy_args() - - assert "Missing config values. The following config values are required" in str( - e.value - ) - - -@mock.patch("pathlib.Path.is_file", return_value=False) -def test_render_config_invalid_path(_): - with pytest.raises(Exception) as e_info: - RenderConfig.factory(script_path="invalid path") - assert "invalid file path" in str(e_info) diff --git a/tests/config/test_DeployConfig.py b/tests/config/test_DeployConfig.py new file mode 100644 index 00000000..7c44c489 --- /dev/null +++ b/tests/config/test_DeployConfig.py @@ -0,0 +1,69 @@ +from __future__ import annotations + +from pathlib import Path +from unittest import mock + +import pytest + +from schemachange.config.BaseConfig import BaseConfig +from schemachange.config.DeployConfig import DeployConfig + +minimal_deploy_config_kwargs: dict = { + "snowflake_account": "some_snowflake_account", + "snowflake_user": "some_snowflake_user", + "snowflake_role": "some_snowflake_role", + "snowflake_warehouse": "some_snowflake_warehouse", +} + +complete_deploy_config_kwargs: dict = { + **minimal_deploy_config_kwargs, + "config_file_path": Path("some_config_file_name"), + "root_folder": "some_root_folder_name", + "modules_folder": "some_modules_folder_name", + "config_vars": {"some": "config_vars"}, + "snowflake_database": "some_snowflake_database", + "snowflake_schema": "some_snowflake_schema", + "change_history_table": "some_history_table", + "query_tag": "some_query_tag", +} + + +@mock.patch("pathlib.Path.is_dir", side_effect=[False]) +def test_invalid_root_folder(_): + with pytest.raises(Exception) as e_info: + DeployConfig.factory(**complete_deploy_config_kwargs) + e_info_value = str(e_info.value) + assert "Path is not valid directory: some_root_folder_name" in e_info_value + + +@mock.patch("pathlib.Path.is_dir", side_effect=[True, False]) +def test_invalid_modules_folder(_): + with pytest.raises(Exception) as e_info: + DeployConfig.factory(**complete_deploy_config_kwargs) + e_info_value = str(e_info.value) + assert "Path is not valid directory: some_modules_folder_name" in e_info_value + + +def test_config_vars_not_a_dict(): + with pytest.raises(Exception) as e_info: + BaseConfig.factory( + subcommand="deploy", + config_vars="a string", + config_file_path=Path("."), + ) + assert "config_vars did not parse correctly, please check its configuration" in str( + e_info.value + ) + + +def test_config_vars_reserved_word(): + with pytest.raises(Exception) as e_info: + BaseConfig.factory( + subcommand="deploy", + config_vars={"schemachange": "not allowed"}, + config_file_path=Path("."), + ) + assert ( + "The variable 'schemachange' has been reserved for use by schemachange, please use a different name" + in str(e_info.value) + ) diff --git a/tests/config/test_RenderConfig.py b/tests/config/test_RenderConfig.py new file mode 100644 index 00000000..af435e2c --- /dev/null +++ b/tests/config/test_RenderConfig.py @@ -0,0 +1,14 @@ +from __future__ import annotations + +from unittest import mock + +import pytest + +from schemachange.config.RenderConfig import RenderConfig + + +@mock.patch("pathlib.Path.is_file", return_value=False) +def test_render_config_invalid_path(_): + with pytest.raises(Exception) as e_info: + RenderConfig.factory(script_path="invalid path") + assert "invalid file path" in str(e_info) diff --git a/tests/config/test_get_config_secrets.py b/tests/config/test_get_config_secrets.py new file mode 100644 index 00000000..cb4f03f9 --- /dev/null +++ b/tests/config/test_get_config_secrets.py @@ -0,0 +1,79 @@ +from __future__ import annotations + +import pytest + +from schemachange.config.utils import get_config_secrets + + +def test_given_empty_config_should_not_error(): + get_config_secrets(config_vars={}) + + +def test_given_none_should_not_error(): + get_config_secrets(None) + + +@pytest.mark.parametrize( + "config_vars, secret", + [ + ({"secret": "secret_val1"}, "secret_val1"), + ({"SECret": "secret_val2"}, "secret_val2"), + ({"secret_key": "secret_val3"}, "secret_val3"), + ({"s3_bucket_secret": "secret_val4"}, "secret_val4"), + ({"s3SecretKey": "secret_val5"}, "secret_val5"), + ({"nested": {"s3_bucket_secret": "secret_val6"}}, "secret_val6"), + ], +) +def test_given__vars_with_keys_should_extract_secret(config_vars, secret): + results = get_config_secrets(config_vars) + assert secret in results + + +def test_given_vars_with_secrets_key_then_all_children_should_be_treated_as_secrets(): + config_vars = { + "secrets": { + "database_name": "database_name_val", + "schema_name": "schema_name_val", + "nested_secrets": {"SEC_ONE": "SEC_ONE_VAL"}, + } + } + results = get_config_secrets(config_vars=config_vars) + + assert len(results) == 3 + assert "database_name_val" in results + assert "schema_name_val" in results + assert "SEC_ONE_VAL" in results + + +def test_given_vars_with_nested_secrets_key_then_all_children_should_be_treated_as_secrets(): + config_vars = { + "nested": { + "secrets": { + "database_name": "database_name_val", + "schema_name": "schema_name_val", + "nested": {"SEC_ONE": "SEC_ONE_VAL"}, + } + } + } + + results = get_config_secrets(config_vars) + + assert len(results) == 3 + assert "database_name_val" in results + assert "schema_name_val" in results + assert "SEC_ONE_VAL" in results + + +def test_given_vars_with_same_secret_twice_then_only_extracted_once(): + config_vars = { + "secrets": { + "database_name": "SECRET_VALUE", + "schema_name": "SECRET_VALUE", + "nested_secrets": {"SEC_ONE": "SECRET_VALUE"}, + } + } + + results = get_config_secrets(config_vars) + + assert len(results) == 1 + assert "SECRET_VALUE" in results diff --git a/tests/config/test_get_merged_config.py b/tests/config/test_get_merged_config.py index f2676a93..fa7c9b42 100644 --- a/tests/config/test_get_merged_config.py +++ b/tests/config/test_get_merged_config.py @@ -1,160 +1,685 @@ +import logging + +import structlog from pathlib import Path from unittest import mock import pytest -from schemachange.config.ChangeHistoryTable import ChangeHistoryTable -from schemachange.config.get_merged_config import get_merged_config - -required_args = [ - "--snowflake-account", - "account", - "--snowflake-user", - "user", - "--snowflake-warehouse", - "warehouse", - "--snowflake-role", - "role", -] - - -class TestGetMergedConfig: - @mock.patch("pathlib.Path.is_dir", return_value=True) - def test_default_config_folder(self, _): - with mock.patch("sys.argv", ["schemachange", *required_args]): - config = get_merged_config() - assert ( - config.config_file_path == Path(".") / config.default_config_file_name - ) - - @mock.patch("pathlib.Path.is_dir", return_value=True) - def test_config_folder(self, _): - with mock.patch( - "sys.argv", ["schemachange", "--config-folder", "DUMMY", *required_args] - ): - config = get_merged_config() - assert ( - config.config_file_path - == Path("DUMMY") / config.default_config_file_name - ) - - @mock.patch("pathlib.Path.is_dir", return_value=False) - def test_invalid_config_folder(self, _): - with pytest.raises(Exception) as e_info: - with mock.patch( - "sys.argv", ["schemachange", "--config-folder", "DUMMY", *required_args] - ): - config = get_merged_config() - assert ( - config.config_file_path - == Path("DUMMY") / config.default_config_file_name - ) - e_info_value = str(e_info.value) - assert "Path is not valid directory: DUMMY" in e_info_value - - @mock.patch("pathlib.Path.is_dir", return_value=True) - def test_no_cli_args(self, _): - with mock.patch( - "sys.argv", ["schemachange", "--config-folder", str(Path(__file__).parent)] - ): - config = get_merged_config() - - assert config.snowflake_account == "snowflake-account-from-yaml" - assert config.snowflake_user == "snowflake-user-from-yaml" - assert config.snowflake_warehouse == '"snowflake-warehouse-from-yaml"' - assert config.snowflake_role == '"snowflake-role-from-yaml"' - assert str(config.root_folder) == "root-folder-from-yaml" - assert str(config.modules_folder) == "modules-folder-from-yaml" - assert config.snowflake_database == '"snowflake-database-from-yaml"' - assert config.snowflake_schema == '"snowflake-schema-from-yaml"' - assert config.change_history_table == ChangeHistoryTable( - table_name='"change-history-table-from-yaml"', - schema_name="SCHEMACHANGE", - database_name="METADATA", - ) - assert config.config_vars == {"var1": "from_yaml", "var2": "also_from_yaml"} - assert config.create_change_history_table is False - assert config.autocommit is False - assert config.dry_run is False - assert config.query_tag == "query-tag-from-yaml" - assert config.oauth_config == { - "token-provider-url": "token-provider-url-from-yaml", - "token-response-name": "token-response-name-from-yaml", - "token-request-headers": { - "Content-Type": "Content-Type-from-yaml", - "User-Agent": "User-Agent-from-yaml", +from schemachange.config.get_merged_config import ( + get_merged_config, + get_yaml_config_kwargs, +) + +default_cli_kwargs = { + "subcommand": "deploy", + "config_file_name": "schemachange-config.yml", + "config_vars": {}, +} + +assets_path = Path(__file__).parent + +schemachange_config = get_yaml_config_kwargs(assets_path / "schemachange-config.yml") +schemachange_config_full = get_yaml_config_kwargs( + assets_path / "schemachange-config-full.yml" +) +schemachange_config_full_no_connection = get_yaml_config_kwargs( + assets_path / "schemachange-config-full-no-connection.yml" +) +schemachange_config_partial_with_connection = get_yaml_config_kwargs( + assets_path / "schemachange-config-partial-with-connection.yml" +) + + +@pytest.mark.parametrize( + "cli_kwargs, yaml_kwargs, expected", + [ + pytest.param( + {**default_cli_kwargs}, # cli_kwargs + {}, # yaml_kwargs + { # expected + "config_file_path": Path("schemachange-config.yml"), + "config_vars": {}, + "subcommand": "deploy", + }, + id="Deploy: Only required arguments", + ), + pytest.param( + {**default_cli_kwargs}, # cli_kwargs + {}, # yaml_kwargs + { # expected + "config_file_path": Path("schemachange-config.yml"), + "config_vars": {}, + "subcommand": "deploy", + }, + id="Deploy: all connection_kwargs", + ), + pytest.param( + {**default_cli_kwargs}, # cli_kwargs + { # yaml_kwargs + "root_folder": "yaml_root_folder", + "modules_folder": "yaml_modules_folder", + "config_vars": { + "variable_1": "yaml_variable_1", + "variable_2": "yaml_variable_2", + "variable_3": "yaml_variable_3", + }, + "log_level": logging.DEBUG, + "snowflake_account": "yaml_snowflake_account", + "snowflake_user": "yaml_snowflake_user", + "snowflake_role": "yaml_snowflake_role", + "snowflake_warehouse": "yaml_snowflake_warehouse", + "snowflake_database": "yaml_snowflake_database", + "snowflake_schema": "yaml_snowflake_schema", + "connections_file_path": "yaml_connections_file_path", + "connection_name": "yaml_connection_name", + "change_history_table": "yaml_change_history_table", + "create_change_history_table": True, + "autocommit": True, + "dry_run": True, + "query_tag": "yaml_query_tag", + }, + { # expected + "log_level": logging.DEBUG, + "config_file_path": Path("schemachange-config.yml"), + "config_vars": { + "variable_1": "yaml_variable_1", + "variable_2": "yaml_variable_2", + "variable_3": "yaml_variable_3", }, - "token-request-payload": { - "client_id": "id-from-yaml", - "username": "username-from-yaml", - "password": "password-from-yaml", - "grant_type": "type-from-yaml", - "scope": "scope-from-yaml", + "subcommand": "deploy", + "root_folder": "yaml_root_folder", + "modules_folder": "yaml_modules_folder", + "snowflake_account": "yaml_snowflake_account", + "snowflake_user": "yaml_snowflake_user", + "snowflake_role": "yaml_snowflake_role", + "snowflake_warehouse": "yaml_snowflake_warehouse", + "snowflake_database": "yaml_snowflake_database", + "snowflake_schema": "yaml_snowflake_schema", + "connections_file_path": Path("yaml_connections_file_path"), + "connection_name": "yaml_connection_name", + "change_history_table": "yaml_change_history_table", + "create_change_history_table": True, + "autocommit": True, + "dry_run": True, + "query_tag": "yaml_query_tag", + }, + id="Deploy: all yaml, all connection_kwargs", + ), + pytest.param( + { # cli_kwargs + **default_cli_kwargs, + "config_folder": "cli_config_folder", + "root_folder": "cli_root_folder", + "modules_folder": "cli_modules_folder", + "config_vars": { + "variable_1": "cli_variable_1", + "variable_2": "cli_variable_2", }, - } - - @mock.patch("pathlib.Path.is_dir", return_value=True) - def test_all_cli_args(self, _): - with mock.patch( - "sys.argv", - [ - "schemachange", - "--config-folder", - str(Path(__file__).parent), - "--root-folder", - "root-folder-from-cli", - "--modules-folder", - "modules-folder-from-cli", - "--vars", - '{"var1": "from_cli", "var3": "also_from_cli"}', - "--snowflake-account", - "snowflake-account-from-cli", - "--snowflake-user", - "snowflake-user-from-cli", - "--snowflake-role", - "snowflake-role-from-cli", - "--snowflake-warehouse", - "snowflake-warehouse-from-cli", - "--snowflake-database", - "snowflake-database-from-cli", - "--snowflake-schema", - "snowflake-schema-from-cli", - "--change-history-table", - "change-history-table-from-cli", - "--create-change-history-table", - "--autocommit", - "--dry-run", - "--query-tag", - "query-tag-from-cli", - "--oauth-config", - '{"token-provider-url": "https//...", "token-request-payload": {"client_id": "GUID_xyz"} }', - ], - ): - config = get_merged_config() - - assert config.snowflake_account == "snowflake-account-from-cli" - assert config.snowflake_user == "snowflake-user-from-cli" - assert config.snowflake_warehouse == '"snowflake-warehouse-from-cli"' - assert config.snowflake_role == '"snowflake-role-from-cli"' - assert str(config.root_folder) == "root-folder-from-cli" - assert str(config.modules_folder) == "modules-folder-from-cli" - assert config.snowflake_database == '"snowflake-database-from-cli"' - assert config.snowflake_schema == '"snowflake-schema-from-cli"' - assert config.change_history_table == ChangeHistoryTable( - table_name='"change-history-table-from-cli"', - schema_name="SCHEMACHANGE", - database_name="METADATA", - ) - assert config.config_vars == { - "var1": "from_cli", - "var2": "also_from_yaml", - "var3": "also_from_cli", - } - assert config.create_change_history_table is True - assert config.autocommit is True - assert config.dry_run is True - assert config.query_tag == "query-tag-from-cli" - assert config.oauth_config == { - "token-provider-url": "https//...", - "token-request-payload": {"client_id": "GUID_xyz"}, - } + "log_level": logging.INFO, + "snowflake_account": "cli_snowflake_account", + "snowflake_user": "cli_snowflake_user", + "snowflake_role": "cli_snowflake_role", + "snowflake_warehouse": "cli_snowflake_warehouse", + "snowflake_database": "cli_snowflake_database", + "snowflake_schema": "cli_snowflake_schema", + "connections_file_path": "cli_connections_file_path", + "connection_name": "cli_connection_name", + "change_history_table": "cli_change_history_table", + "create_change_history_table": False, + "autocommit": False, + "dry_run": False, + "query_tag": "cli_query_tag", + }, + { # yaml_kwargs + "root_folder": "yaml_root_folder", + "modules_folder": "yaml_modules_folder", + "config_vars": { + "variable_1": "yaml_variable_1", + "variable_2": "yaml_variable_2", + "variable_3": "yaml_variable_3", + }, + "log_level": logging.DEBUG, + "snowflake_account": "yaml_snowflake_account", + "snowflake_user": "yaml_snowflake_user", + "snowflake_role": "yaml_snowflake_role", + "snowflake_warehouse": "yaml_snowflake_warehouse", + "snowflake_database": "yaml_snowflake_database", + "snowflake_schema": "yaml_snowflake_schema", + "connections_file_path": "yaml_connections_file_path", + "connection_name": "yaml_connection_name", + "change_history_table": "yaml_change_history_table", + "create_change_history_table": True, + "autocommit": True, + "dry_run": True, + "query_tag": "yaml_query_tag", + }, + { # expected + "log_level": logging.INFO, + "config_file_path": Path("cli_config_folder/schemachange-config.yml"), + "config_vars": { + "variable_1": "cli_variable_1", + "variable_2": "cli_variable_2", + "variable_3": "yaml_variable_3", + }, + "subcommand": "deploy", + "root_folder": "cli_root_folder", + "modules_folder": "cli_modules_folder", + "snowflake_account": "cli_snowflake_account", + "snowflake_user": "cli_snowflake_user", + "snowflake_role": "cli_snowflake_role", + "snowflake_warehouse": "cli_snowflake_warehouse", + "snowflake_database": "cli_snowflake_database", + "snowflake_schema": "cli_snowflake_schema", + "connections_file_path": Path("cli_connections_file_path"), + "connection_name": "cli_connection_name", + "change_history_table": "cli_change_history_table", + "create_change_history_table": False, + "autocommit": False, + "dry_run": False, + "query_tag": "cli_query_tag", + }, + id="Deploy: all cli, all yaml, all connection_kwargs", + ), + pytest.param( + { # cli_kwargs + **default_cli_kwargs, + "config_folder": "cli_config_folder", + "root_folder": "cli_root_folder", + "modules_folder": "cli_modules_folder", + "config_vars": { + "variable_1": "cli_variable_1", + "variable_2": "cli_variable_2", + }, + "log_level": logging.INFO, + "snowflake_account": "cli_snowflake_account", + "snowflake_user": "cli_snowflake_user", + "snowflake_role": "cli_snowflake_role", + "snowflake_warehouse": "cli_snowflake_warehouse", + "snowflake_database": "cli_snowflake_database", + "snowflake_schema": "cli_snowflake_schema", + "connections_file_path": "cli_connections_file_path", + "connection_name": "cli_connection_name", + "change_history_table": "cli_change_history_table", + "create_change_history_table": False, + "autocommit": False, + "dry_run": False, + "query_tag": "cli_query_tag", + }, + { # yaml_kwargs + "root_folder": "yaml_root_folder", + "modules_folder": "yaml_modules_folder", + "config_vars": { + "variable_1": "yaml_variable_1", + "variable_2": "yaml_variable_2", + "variable_3": "yaml_variable_3", + }, + "log_level": logging.DEBUG, + "snowflake_account": "yaml_snowflake_account", + "snowflake_user": "yaml_snowflake_user", + "snowflake_role": "yaml_snowflake_role", + "snowflake_warehouse": "yaml_snowflake_warehouse", + "snowflake_database": "yaml_snowflake_database", + "snowflake_schema": "yaml_snowflake_schema", + "connections_file_path": "yaml_connections_file_path", + "connection_name": "yaml_connection_name", + "change_history_table": "yaml_change_history_table", + "create_change_history_table": True, + "autocommit": True, + "dry_run": True, + "query_tag": "yaml_query_tag", + }, + { # expected + "log_level": logging.INFO, + "config_file_path": Path("cli_config_folder/schemachange-config.yml"), + "config_vars": { + "variable_1": "cli_variable_1", + "variable_2": "cli_variable_2", + "variable_3": "yaml_variable_3", + }, + "subcommand": "deploy", + "root_folder": "cli_root_folder", + "modules_folder": "cli_modules_folder", + "snowflake_account": "cli_snowflake_account", + "snowflake_user": "cli_snowflake_user", + "snowflake_role": "cli_snowflake_role", + "snowflake_warehouse": "cli_snowflake_warehouse", + "snowflake_database": "cli_snowflake_database", + "snowflake_schema": "cli_snowflake_schema", + "connections_file_path": Path("cli_connections_file_path"), + "connection_name": "cli_connection_name", + "change_history_table": "cli_change_history_table", + "create_change_history_table": False, + "autocommit": False, + "dry_run": False, + "query_tag": "cli_query_tag", + }, + id="Deploy: all env, all cli, all yaml, all connection_kwargs", + ), + ], +) +@mock.patch("pathlib.Path.is_dir", return_value=True) +@mock.patch("pathlib.Path.is_file", return_value=True) +@mock.patch("schemachange.config.get_merged_config.parse_cli_args") +@mock.patch("schemachange.config.get_merged_config.get_yaml_config_kwargs") +@mock.patch("schemachange.config.get_merged_config.DeployConfig.factory") +def test_get_merged_config_inheritance( + mock_deploy_config_factory, + mock_get_yaml_config_kwargs, + mock_parse_cli_args, + _, + __, + cli_kwargs, + yaml_kwargs, + expected, +): + mock_parse_cli_args.return_value = {**cli_kwargs} + mock_get_yaml_config_kwargs.return_value = {**yaml_kwargs} + logger = structlog.testing.CapturingLogger() + # noinspection PyTypeChecker + get_merged_config(logger=logger) + factory_kwargs = mock_deploy_config_factory.call_args.kwargs + for actual_key, actual_value in factory_kwargs.items(): + assert expected[actual_key] == actual_value + del expected[actual_key] + assert len(expected.keys()) == 0 + + +@mock.patch("pathlib.Path.is_dir", return_value=False) +@mock.patch("schemachange.config.get_merged_config.parse_cli_args") +def test_invalid_config_folder(mock_parse_cli_args, _): + cli_kwargs = { + "config_folder": "cli_config_folder", + **default_cli_kwargs, + } + mock_parse_cli_args.return_value = {**cli_kwargs} + logger = structlog.testing.CapturingLogger() + with pytest.raises(Exception) as e_info: + # noinspection PyTypeChecker + get_merged_config(logger=logger) + assert f"Path is not valid directory: {cli_kwargs['config_folder']}" in str( + e_info.value + ) + + +param_only_required_cli_arguments = pytest.param( + [ # cli_args + "schemachange", + "--config-folder", + str(assets_path), + ], + { # expected + "subcommand": "deploy", + "config_file_path": assets_path / "schemachange-config.yml", + "config_version": 1, + "config_vars": {}, + "log_level": logging.INFO, + }, + id="Deploy: Only required cli arguments", +) + +param_full_cli_and_connection = pytest.param( + [ # cli_args + "schemachange", + "--config-folder", + str(assets_path), + "--root-folder", + "root-folder-from-cli", + "--modules-folder", + "modules-folder-from-cli", + "--vars", + '{"var1": "from_cli", "var3": "also_from_cli"}', + "--snowflake-account", + "snowflake-account-from-cli", + "--snowflake-user", + "snowflake-user-from-cli", + "--snowflake-role", + "snowflake-role-from-cli", + "--snowflake-warehouse", + "snowflake-warehouse-from-cli", + "--snowflake-database", + "snowflake-database-from-cli", + "--snowflake-schema", + "snowflake-schema-from-cli", + "--connections-file-path", + str(assets_path / "alt-connections.toml"), + "--connection-name", + "myaltconnection", + "--change-history-table", + "change-history-table-from-cli", + "--create-change-history-table", + "--autocommit", + "--dry-run", + "--query-tag", + "query-tag-from-cli", + ], + { # expected + "subcommand": "deploy", + "config_file_path": assets_path / "schemachange-config.yml", + "config_version": 1, + "root_folder": "root-folder-from-cli", + "modules_folder": "modules-folder-from-cli", + "snowflake_account": "snowflake-account-from-cli", + "snowflake_user": "snowflake-user-from-cli", + "snowflake_role": "snowflake-role-from-cli", + "snowflake_warehouse": "snowflake-warehouse-from-cli", + "snowflake_database": "snowflake-database-from-cli", + "snowflake_schema": "snowflake-schema-from-cli", + "change_history_table": "change-history-table-from-cli", + "config_vars": { + "var1": "from_cli", + "var3": "also_from_cli", + }, + "create_change_history_table": True, + "autocommit": True, + "log_level": logging.INFO, + "dry_run": True, + "query_tag": "query-tag-from-cli", + "connection_name": "myaltconnection", + "connections_file_path": assets_path / "alt-connections.toml", + }, + id="Deploy: full cli and connections.toml", +) + +param_full_yaml_no_connection = pytest.param( + [ # cli_args + "schemachange", + "--config-folder", + str(assets_path), + "--config-file-name", + "schemachange-config-full-no-connection.yml", + ], + { # expected + "subcommand": "deploy", + "config_file_path": assets_path / "schemachange-config-full-no-connection.yml", + "log_level": logging.INFO, + **{ + k: v + for k, v in schemachange_config_full_no_connection.items() + if k + in [ + "config_version", + "root_folder", + "modules_folder", + "snowflake_account", + "snowflake_user", + "snowflake_role", + "snowflake_warehouse", + "snowflake_database", + "snowflake_schema", + "change_history_table", + "config_vars", + "create_change_history_table", + "autocommit", + "dry_run", + "query_tag", + ] + }, + }, + id="Deploy: yaml, no connections.toml", +) + +param_full_yaml_and_connection = pytest.param( + [ # cli_args + "schemachange", + "--config-folder", + str(assets_path), + "--config-file-name", + "schemachange-config-full.yml", + ], + { # expected + "subcommand": "deploy", + "config_file_path": assets_path / "schemachange-config-full.yml", + "log_level": logging.INFO, + "connections_file_path": assets_path / "connections.toml", + **{ + k: v + for k, v in schemachange_config_full.items() + if k + in [ + "config_version", + "root_folder", + "modules_folder", + "snowflake_account", + "snowflake_user", + "snowflake_role", + "snowflake_warehouse", + "snowflake_database", + "snowflake_schema", + "change_history_table", + "snowflake_private_key_path", + "config_vars", + "create_change_history_table", + "autocommit", + "dry_run", + "query_tag", + "connection_name", + ] + }, + }, + id="Deploy: full yaml and connections.toml", +) + +param_full_yaml_and_connection_and_cli = pytest.param( + [ # cli_args + "schemachange", + "--config-folder", + str(assets_path), + "--config-file-name", + "schemachange-config-full.yml", + "--root-folder", + "root-folder-from-cli", + "--modules-folder", + "modules-folder-from-cli", + "--vars", + '{"var1": "from_cli", "var3": "also_from_cli"}', + "--snowflake-account", + "snowflake-account-from-cli", + "--snowflake-user", + "snowflake-user-from-cli", + "--snowflake-role", + "snowflake-role-from-cli", + "--snowflake-warehouse", + "snowflake-warehouse-from-cli", + "--snowflake-database", + "snowflake-database-from-cli", + "--snowflake-schema", + "snowflake-schema-from-cli", + "--connections-file-path", + str(assets_path / "alt-connections.toml"), + "--connection-name", + "myaltconnection", + "--change-history-table", + "change-history-table-from-cli", + "--create-change-history-table", + "--autocommit", + "--dry-run", + "--query-tag", + "query-tag-from-cli", + ], + { # expected + "subcommand": "deploy", + "config_file_path": assets_path / "schemachange-config-full.yml", + "config_version": 1, + "root_folder": "root-folder-from-cli", + "modules_folder": "modules-folder-from-cli", + "snowflake_account": "snowflake-account-from-cli", + "snowflake_user": "snowflake-user-from-cli", + "snowflake_role": "snowflake-role-from-cli", + "snowflake_warehouse": "snowflake-warehouse-from-cli", + "snowflake_database": "snowflake-database-from-cli", + "snowflake_schema": "snowflake-schema-from-cli", + "change_history_table": "change-history-table-from-cli", + "config_vars": { + "var1": "from_cli", + "var2": "also_from_yaml", + "var3": "also_from_cli", + }, + "create_change_history_table": True, + "autocommit": True, + "log_level": logging.INFO, + "dry_run": True, + "query_tag": "query-tag-from-cli", + "connection_name": "myaltconnection", + "connections_file_path": assets_path / "alt-connections.toml", + }, + id="Deploy: full yaml, connections.toml, and cli", +) + +param_full_yaml_and_connection_and_cli_and_env = pytest.param( + [ # cli_args + "schemachange", + "--config-folder", + str(assets_path), + "--config-file-name", + "schemachange-config-full.yml", + "--root-folder", + "root-folder-from-cli", + "--modules-folder", + "modules-folder-from-cli", + "--vars", + '{"var1": "from_cli", "var3": "also_from_cli"}', + "--snowflake-account", + "snowflake-account-from-cli", + "--snowflake-user", + "snowflake-user-from-cli", + "--snowflake-role", + "snowflake-role-from-cli", + "--snowflake-warehouse", + "snowflake-warehouse-from-cli", + "--snowflake-database", + "snowflake-database-from-cli", + "--snowflake-schema", + "snowflake-schema-from-cli", + "--connections-file-path", + str(assets_path / "alt-connections.toml"), + "--connection-name", + "myaltconnection", + "--change-history-table", + "change-history-table-from-cli", + "--create-change-history-table", + "--autocommit", + "--dry-run", + "--query-tag", + "query-tag-from-cli", + ], + { # expected + "subcommand": "deploy", + "config_file_path": assets_path / "schemachange-config-full.yml", + "config_version": 1, + "root_folder": "root-folder-from-cli", + "modules_folder": "modules-folder-from-cli", + "snowflake_account": "snowflake-account-from-cli", + "snowflake_user": "snowflake-user-from-cli", + "snowflake_role": "snowflake-role-from-cli", + "snowflake_warehouse": "snowflake-warehouse-from-cli", + "snowflake_database": "snowflake-database-from-cli", + "snowflake_schema": "snowflake-schema-from-cli", + "change_history_table": "change-history-table-from-cli", + "config_vars": { + "var1": "from_cli", + "var2": "also_from_yaml", + "var3": "also_from_cli", + }, + "create_change_history_table": True, + "autocommit": True, + "log_level": logging.INFO, + "dry_run": True, + "query_tag": "query-tag-from-cli", + "connection_name": "myaltconnection", + "connections_file_path": assets_path / "alt-connections.toml", + }, + id="Deploy: full yaml, connections.toml, cli, and env", +) + + +param_connection_no_yaml = pytest.param( + [ # cli_args + "schemachange", + "--config-folder", + str(assets_path), + "--connections-file-path", + str(assets_path / "connections.toml"), + "--connection-name", + "myconnection", + ], + { # expected + "subcommand": "deploy", + "connections_file_path": assets_path / "connections.toml", + "connection_name": "myconnection", + "config_file_path": assets_path / "schemachange-config.yml", + "config_version": 1, + "config_vars": {}, + "log_level": logging.INFO, + }, + id="Deploy: connections.toml, no yaml", +) + +param_partial_yaml_and_connection = pytest.param( + [ # cli_arg + "schemachange", + "--config-folder", + str(assets_path), + "--config-file-name", + "schemachange-config-partial-with-connection.yml", + ], + { # expected + "subcommand": "deploy", + "config_file_path": assets_path + / "schemachange-config-partial-with-connection.yml", + "log_level": logging.INFO, + "connections_file_path": assets_path / "connections.toml", + **{ + k: v + for k, v in schemachange_config_partial_with_connection.items() + if k + in [ + "config_version", + "root_folder", + "modules_folder", + "change_history_table", + "config_vars", + "create_change_history_table", + "autocommit", + "dry_run", + "query_tag", + "connection_name", + ] + }, + }, + id="Deploy: partial yaml and connections.toml", +) + + +@pytest.mark.parametrize( + "cli_args, expected", + [ + param_only_required_cli_arguments, + param_full_cli_and_connection, + param_full_yaml_no_connection, + param_full_yaml_and_connection, + param_full_yaml_and_connection_and_cli, + param_full_yaml_and_connection_and_cli_and_env, + param_connection_no_yaml, + param_partial_yaml_and_connection, + ], +) +@mock.patch("pathlib.Path.is_dir", return_value=True) +@mock.patch("pathlib.Path.is_file", return_value=True) +@mock.patch("schemachange.config.get_merged_config.DeployConfig.factory") +def test_integration_get_merged_config_inheritance( + mock_deploy_config_factory, + _, + __, + cli_args, + expected, +): + logger = structlog.testing.CapturingLogger() + with mock.patch("sys.argv", cli_args): + # noinspection PyTypeChecker + get_merged_config(logger=logger) + factory_kwargs = mock_deploy_config_factory.call_args.kwargs + for actual_key, actual_value in factory_kwargs.items(): + assert expected[actual_key] == actual_value + del expected[actual_key] + assert len(expected.keys()) == 0 diff --git a/tests/config/test_get_yaml_config.py b/tests/config/test_get_yaml_config.py index 9748fff9..a389163d 100644 --- a/tests/config/test_get_yaml_config.py +++ b/tests/config/test_get_yaml_config.py @@ -22,7 +22,7 @@ def test_load_yaml_config__simple_config_file(tmp_path: Path): vars: database_name: SCHEMACHANGE_DEMO_JINJA """ - config_file = tmp_path / "schemachange-config.yml" + config_file = tmp_path / "schemachange-config-full.yml" config_file.write_text(config_contents) # noinspection PyTypeChecker @@ -45,7 +45,7 @@ def test_load_yaml_config__with_env_var_should_populate_value( vars: database_name: SCHEMACHANGE_DEMO_JINJA """ - config_file = tmp_path / "schemachange-config.yml" + config_file = tmp_path / "schemachange-config-full.yml" config_file.write_text(config_contents) config = load_yaml_config(config_file) @@ -63,7 +63,7 @@ def test_load_yaml_config__requiring_env_var_but_env_var_not_set_should_raise_ex vars: database_name: SCHEMACHANGE_DEMO_JINJA """ - config_file = tmp_path / "schemachange-config.yml" + config_file = tmp_path / "schemachange-config-full.yml" config_file.write_text(config_contents) with pytest.raises(ValueError) as e: @@ -76,7 +76,7 @@ def test_load_yaml_config__requiring_env_var_but_env_var_not_set_should_raise_ex @mock.patch("pathlib.Path.is_dir", return_value=True) def test_get_yaml_config(_): - config_file_path = Path(__file__).parent / "schemachange-config.yml" + config_file_path = Path(__file__).parent / "schemachange-config-full.yml" yaml_config = get_yaml_config_kwargs(config_file_path=config_file_path) assert str(yaml_config["root_folder"]) == "root-folder-from-yaml" assert str(yaml_config["modules_folder"]) == "modules-folder-from-yaml" @@ -94,18 +94,3 @@ def test_get_yaml_config(_): assert yaml_config["dry_run"] is False assert yaml_config["config_vars"] == {"var1": "from_yaml", "var2": "also_from_yaml"} - assert yaml_config["oauth_config"] == { - "token-provider-url": "token-provider-url-from-yaml", - "token-request-headers": { - "Content-Type": "Content-Type-from-yaml", - "User-Agent": "User-Agent-from-yaml", - }, - "token-request-payload": { - "client_id": "id-from-yaml", - "grant_type": "type-from-yaml", - "password": "password-from-yaml", - "scope": "scope-from-yaml", - "username": "username-from-yaml", - }, - "token-response-name": "token-response-name-from-yaml", - } diff --git a/tests/config/test_parse_cli_args.py b/tests/config/test_parse_cli_args.py index 31e5fa73..11f5c555 100644 --- a/tests/config/test_parse_cli_args.py +++ b/tests/config/test_parse_cli_args.py @@ -7,7 +7,11 @@ def test_parse_args_defaults(): args: list[str] = [] - test_args = [("--config-folder", None, ".")] + test_args = [ + ("--config-folder", None, "."), + ("--config-file-name", None, "schemachange-config.yml"), + ("--config-vars", None, {}), + ] expected: dict[str, str | int | None] = {} for arg, value, expected_value in test_args: if value: @@ -18,9 +22,6 @@ def test_parse_args_defaults(): parsed_args = parse_cli_args(args) for expected_arg, expected_value in expected.items(): assert parsed_args[expected_arg] == expected_value - assert parsed_args["create_change_history_table"] is None - assert parsed_args["autocommit"] is None - assert parsed_args["dry_run"] is None assert parsed_args["subcommand"] == "deploy" @@ -30,6 +31,7 @@ def test_parse_args_deploy_names(): valued_test_args: list[tuple[str, str, str]] = [ ("--config-folder", "some_config_folder_name", "some_config_folder_name"), + ("--config-file-name", "some_config_file_name", "some_config_file_name"), ("--root-folder", "some_root_folder_name", "some_root_folder_name"), ("--modules-folder", "some_modules_folder_name", "some_modules_folder_name"), ("--vars", json.dumps({"some": "vars"}), {"some": "vars"}), @@ -43,9 +45,14 @@ def test_parse_args_deploy_names(): ), ("--snowflake-database", "some_snowflake_database", "some_snowflake_database"), ("--snowflake-schema", "some_snowflake_schema", "some_snowflake_schema"), + ( + "--connections-file-path", + "some_connections_file_path", + "some_connections_file_path", + ), + ("--connection-name", "some_connection_name", "some_connection_name"), ("--change-history-table", "some_history_table", "some_history_table"), ("--query-tag", "some_query_tag", "some_query_tag"), - ("--oauth-config", json.dumps({"some": "values"}), {"some": "values"}), ] for arg, value, expected_value in valued_test_args: diff --git a/tests/session/test_utils.py b/tests/config/test_utils.py similarity index 58% rename from tests/session/test_utils.py rename to tests/config/test_utils.py index a3077b52..048814e8 100644 --- a/tests/session/test_utils.py +++ b/tests/config/test_utils.py @@ -1,14 +1,14 @@ from __future__ import annotations import os +from pathlib import Path from unittest import mock import pytest -from schemachange.session.utils import ( - get_snowflake_password, - get_private_key_password, -) +from schemachange.config.utils import get_snowflake_password + +assets_path = Path(__file__).parent @pytest.mark.parametrize( @@ -28,16 +28,3 @@ def test_get_snowflake_password(env_vars: dict, expected: str): with mock.patch.dict(os.environ, env_vars, clear=True): result = get_snowflake_password() assert result == expected - - -@pytest.mark.parametrize( - "env_vars, expected", - [ - ({"SNOWFLAKE_PRIVATE_KEY_PASSPHRASE": "my-passphrase"}, b"my-passphrase"), - ({}, None), - ], -) -def test_get_private_key_password(env_vars: dict, expected: str): - with mock.patch.dict(os.environ, env_vars, clear=True): - result = get_private_key_password() - assert result == expected diff --git a/tests/session/test_Credential.py b/tests/session/test_Credential.py deleted file mode 100644 index 6239645f..00000000 --- a/tests/session/test_Credential.py +++ /dev/null @@ -1,109 +0,0 @@ -from __future__ import annotations - -import json -import os -from unittest import mock -from unittest.mock import MagicMock - -import pytest -import structlog - -from schemachange.session.Credential import ( - credential_factory, - PasswordCredential, - ExternalBrowserCredential, - OktaCredential, - PrivateKeyCredential, - OauthCredential, -) - - -# noinspection PyTypeChecker -@pytest.mark.parametrize( - "env_vars, oauth_config, expected", - [ - ( - {"SNOWFLAKE_PASSWORD": "my-password"}, - None, - PasswordCredential(password="my-password"), - ), - ( - { - "SNOWFLAKE_PASSWORD": "my-password", - "SNOWFLAKE_AUTHENTICATOR": "snowflake", - }, - None, - PasswordCredential(password="my-password"), - ), - ( - { - "SNOWFLAKE_AUTHENTICATOR": "oauth", - }, - { - "token-provider-url": "token-provider-url-from-yaml", - "token-response-name": "token-response-name-from-yaml", - "token-request-headers": { - "Content-Type": "Content-Type-from-yaml", - "User-Agent": "User-Agent-from-yaml", - }, - "token-request-payload": { - "client_id": "id-from-yaml", - "username": "username-from-yaml", - "password": "password-from-yaml", - "grant_type": "type-from-yaml", - "scope": "scope-from-yaml", - }, - }, - OauthCredential(token="my-token"), - ), - ( - { - "SNOWFLAKE_AUTHENTICATOR": "externalbrowser", - }, - None, - ExternalBrowserCredential(), - ), - ( - { - "SNOWFLAKE_AUTHENTICATOR": "https://someurl.com", - "SNOWFLAKE_PASSWORD": "my-password", - }, - None, - OktaCredential(authenticator="https://someurl.com", password="my-password"), - ), - ( - { - "SNOWFLAKE_PRIVATE_KEY_PATH": "some_path", - "SNOWFLAKE_AUTHENTICATOR": "snowflake", - }, - None, - PrivateKeyCredential(private_key="some_path"), - ), - ], -) -@mock.patch( - "schemachange.session.Credential.get_private_key_bytes", - return_value="some_path", -) -@mock.patch("requests.post") -def test_credential_factory( - mock_post, _, env_vars: dict, oauth_config: dict | None, expected: str -): - mock_response = MagicMock() - mock_response.text = json.dumps({"token-response-name-from-yaml": "my-token"}) - mock_post.return_value = mock_response - logger = structlog.testing.CapturingLogger() - - with mock.patch.dict(os.environ, env_vars, clear=True): - # noinspection PyTypeChecker - result = credential_factory(oauth_config=oauth_config, logger=logger) - assert result == expected - - -@pytest.mark.parametrize("env_vars", [{}]) -def test_credential_factory_unhandled(env_vars): - logger = structlog.testing.CapturingLogger() - with pytest.raises(NameError): - with mock.patch.dict(os.environ, env_vars, clear=True): - # noinspection PyTypeChecker - credential_factory(logger=logger) diff --git a/tests/session/test_SnowflakeSession.py b/tests/session/test_SnowflakeSession.py index 68f308a5..647852d7 100644 --- a/tests/session/test_SnowflakeSession.py +++ b/tests/session/test_SnowflakeSession.py @@ -6,29 +6,29 @@ import structlog from schemachange.config.ChangeHistoryTable import ChangeHistoryTable -from schemachange.session.Credential import ExternalBrowserCredential from schemachange.session.SnowflakeSession import SnowflakeSession @pytest.fixture def session() -> SnowflakeSession: - credential = ExternalBrowserCredential(password="password") change_history_table = ChangeHistoryTable() logger = structlog.testing.CapturingLogger() with mock.patch("snowflake.connector.connect"): - # noinspection PyTypeChecker - return SnowflakeSession( - snowflake_user="user", - snowflake_account="account", - snowflake_role="role", - snowflake_warehouse="warehouse", - schemachange_version="3.6.1.dev", - application="schemachange", - credential=credential, - change_history_table=change_history_table, - logger=logger, - ) + with mock.patch( + "schemachange.session.SnowflakeSession.get_snowflake_identifier_string" + ): + # noinspection PyTypeChecker + return SnowflakeSession( + user="user", + account="account", + role="role", + warehouse="warehouse", + schemachange_version="3.6.1.dev", + application="schemachange", + change_history_table=change_history_table, + logger=logger, + ) class TestSnowflakeSession: @@ -37,7 +37,7 @@ def test_fetch_change_history_metadata_exists(self, session: SnowflakeSession): result = session.fetch_change_history_metadata() assert result == {"created": "created", "last_altered": "last_altered"} assert session.con.execute_string.call_count == 1 - assert session.logger.calls[0][1][0] == "Executing query" + assert session.logger.calls[1][1][0] == "Executing query" def test_fetch_change_history_metadata_does_not_exist( self, session: SnowflakeSession @@ -46,4 +46,4 @@ def test_fetch_change_history_metadata_does_not_exist( result = session.fetch_change_history_metadata() assert result == {} assert session.con.execute_string.call_count == 1 - assert session.logger.calls[0][1][0] == "Executing query" + assert session.logger.calls[1][1][0] == "Executing query" diff --git a/tests/test_cli_misc.py b/tests/test_cli_misc.py index 97bbc686..08e90669 100644 --- a/tests/test_cli_misc.py +++ b/tests/test_cli_misc.py @@ -3,7 +3,6 @@ import pytest from schemachange.cli import SCHEMACHANGE_VERSION, SNOWFLAKE_APPLICATION_NAME -from schemachange.config.BaseConfig import BaseConfig from schemachange.config.ChangeHistoryTable import ChangeHistoryTable from schemachange.config.utils import get_snowflake_identifier_string from schemachange.deploy import alphanum_convert, get_alphanum_key, sorted_alphanumeric @@ -14,7 +13,6 @@ def test_cli_given__schemachange_version_change_updated_in_setup_config_file(): def test_cli_given__constants_exist(): - assert BaseConfig.default_config_file_name == "schemachange-config.yml" assert ChangeHistoryTable._default_database_name == "METADATA" assert ChangeHistoryTable._default_schema_name == "SCHEMACHANGE" assert ChangeHistoryTable._default_table_name == "CHANGE_HISTORY" diff --git a/tests/test_main.py b/tests/test_main.py index 384c4048..cdbc661a 100644 --- a/tests/test_main.py +++ b/tests/test_main.py @@ -12,10 +12,13 @@ from schemachange.config.ChangeHistoryTable import ChangeHistoryTable import schemachange.cli as cli +from schemachange.config.utils import get_snowflake_identifier_string + +assets_path = Path(__file__).parent / "config" default_base_config = { # Shared configuration options - "config_file_path": Path(".") / "schemachange-config.yml", + "config_file_path": assets_path / "schemachange-config.yml", "root_folder": Path("."), "modules_folder": None, "config_vars": {}, @@ -30,6 +33,8 @@ "snowflake_warehouse": None, "snowflake_database": None, "snowflake_schema": None, + "connections_file_path": None, + "connection_name": None, "change_history_table": ChangeHistoryTable( table_name="CHANGE_HISTORY", schema_name="SCHEMACHANGE", @@ -39,10 +44,11 @@ "autocommit": False, "dry_run": False, "query_tag": None, - "oauth_config": None, } required_args = [ + "--config-folder", + str(assets_path), "--snowflake-account", "account", "--snowflake-user", @@ -54,218 +60,389 @@ ] required_config = { + "config_file_path": assets_path / "schemachange-config.yml", "snowflake_account": "account", "snowflake_user": "user", "snowflake_warehouse": "warehouse", "snowflake_role": "role", } -script_path = Path(__file__).parent.parent / "demo" / "basics_demo" / "A__basic001.sql" +script_path = ( + Path(__file__).parent.parent / "demo" / "basics_demo" / "2_test" / "A__basic001.sql" +) +no_command = pytest.param( + "schemachange.cli.deploy", + ["schemachange", *required_args], + {**default_deploy_config, **required_config}, + None, + id="no command", +) -@pytest.mark.parametrize( - "to_mock, cli_args, expected_config, expected_script_path", +deploy_only_required = pytest.param( + "schemachange.cli.deploy", + ["schemachange", "deploy", *required_args], + {**default_deploy_config, **required_config}, + None, + id="deploy: only required", +) + +deploy_all_cli_arg_names = pytest.param( + "schemachange.cli.deploy", [ - ( - "schemachange.cli.deploy", - ["schemachange", *required_args], - {**default_deploy_config, **required_config}, - None, - ), - ( - "schemachange.cli.deploy", - ["schemachange", "deploy", *required_args], - {**default_deploy_config, **required_config}, - None, - ), - ( - "schemachange.cli.deploy", - ["schemachange", "deploy", "-f", ".", *required_args], - {**default_deploy_config, **required_config, "root_folder": Path(".")}, - None, - ), - ( - "schemachange.cli.deploy", - [ - "schemachange", - "deploy", - *required_args, - "--snowflake-database", - "database", - ], - { - **default_deploy_config, - **required_config, - "snowflake_database": "database", - }, - None, + "schemachange", + "deploy", + "--config-folder", + str(assets_path), + "--config-file-name", + "schemachange-config.yml", + "--root-folder", + str(assets_path), + "--modules-folder", + str(assets_path), + "--vars", + '{"var1": "from_cli", "var3": "also_from_cli"}', + "--verbose", + "--snowflake-account", + "snowflake-account-from-cli", + "--snowflake-user", + "snowflake-user-from-cli", + "--snowflake-role", + "snowflake-role-from-cli", + "--snowflake-warehouse", + "snowflake-warehouse-from-cli", + "--snowflake-database", + "snowflake-database-from-cli", + "--snowflake-schema", + "snowflake-schema-from-cli", + "--connections-file-path", + str(assets_path / "alt-connections.toml"), + "--connection-name", + "myaltconnection", + "--change-history-table", + "db.schema.table_from_cli", + "--create-change-history-table", + "--autocommit", + "--dry-run", + "--query-tag", + "query-tag-from-cli", + ], + { # expected + "subcommand": "deploy", + "config_file_path": assets_path / "schemachange-config.yml", + "config_version": 1, + "root_folder": assets_path, + "modules_folder": assets_path, + "snowflake_account": "snowflake-account-from-cli", + "snowflake_user": "snowflake-user-from-cli", + "snowflake_role": get_snowflake_identifier_string( + "snowflake-role-from-cli", "placeholder" ), - ( - "schemachange.cli.deploy", - ["schemachange", "deploy", *required_args, "--snowflake-schema", "schema"], - {**default_deploy_config, **required_config, "snowflake_schema": "schema"}, - None, + "snowflake_warehouse": get_snowflake_identifier_string( + "snowflake-warehouse-from-cli", "placeholder" ), - ( - "schemachange.cli.deploy", - [ - "schemachange", - "deploy", - *required_args, - "--change-history-table", - "db.schema.table", - ], - { - **default_deploy_config, - **required_config, - "change_history_table": ChangeHistoryTable( - database_name="db", schema_name="schema", table_name="table" - ), - }, - None, + "snowflake_database": get_snowflake_identifier_string( + "snowflake-database-from-cli", "placeholder" ), - ( - "schemachange.cli.deploy", - ["schemachange", "deploy", *required_args, "--vars", '{"var1": "val"}'], - { - **default_deploy_config, - **required_config, - "config_vars": {"var1": "val"}, - }, - None, + "snowflake_schema": get_snowflake_identifier_string( + "snowflake-schema-from-cli", "placeholder" ), - ( - "schemachange.cli.deploy", - ["schemachange", "deploy", *required_args, "--create-change-history-table"], - { - **default_deploy_config, - **required_config, - "create_change_history_table": True, - }, - None, + "change_history_table": ChangeHistoryTable( + database_name="db", + schema_name="schema", + table_name="table_from_cli", ), - ( - "schemachange.cli.deploy", - ["schemachange", "deploy", *required_args, "--autocommit"], - {**default_deploy_config, **required_config, "autocommit": True}, - None, + "config_vars": { + "var1": "from_cli", + "var3": "also_from_cli", + }, + "create_change_history_table": True, + "autocommit": True, + "log_level": logging.DEBUG, + "dry_run": True, + "query_tag": "query-tag-from-cli", + "connection_name": "myaltconnection", + "connections_file_path": assets_path / "alt-connections.toml", + }, + None, + id="deploy: all cli argument names", +) + +deploy_all_cli_arg_flags = pytest.param( + "schemachange.cli.deploy", + [ + "schemachange", + "deploy", + "--config-folder", + str(assets_path), + "--config-file-name", + "schemachange-config.yml", + "-f", + str(assets_path), + "-m", + str(assets_path), + "--vars", + '{"var1": "from_cli", "var3": "also_from_cli"}', + "-v", + "-a", + "snowflake-account-from-cli", + "-u", + "snowflake-user-from-cli", + "-r", + "snowflake-role-from-cli", + "-w", + "snowflake-warehouse-from-cli", + "-d", + "snowflake-database-from-cli", + "-s", + "snowflake-schema-from-cli", + "--connections-file-path", + str(assets_path / "alt-connections.toml"), + "--connection-name", + "myaltconnection", + "-c", + "db.schema.table_from_cli", + "--create-change-history-table", + "-ac", + "--dry-run", + "--query-tag", + "query-tag-from-cli", + ], + { # expected + "subcommand": "deploy", + "config_file_path": assets_path / "schemachange-config.yml", + "config_version": 1, + "root_folder": assets_path, + "modules_folder": assets_path, + "snowflake_account": "snowflake-account-from-cli", + "snowflake_user": "snowflake-user-from-cli", + "snowflake_role": get_snowflake_identifier_string( + "snowflake-role-from-cli", "placeholder" ), - ( - "schemachange.cli.deploy", - ["schemachange", "deploy", *required_args, "--verbose"], - {**default_deploy_config, **required_config, "log_level": logging.DEBUG}, - None, + "snowflake_warehouse": get_snowflake_identifier_string( + "snowflake-warehouse-from-cli", "placeholder" ), - ( - "schemachange.cli.deploy", - ["schemachange", "deploy", *required_args, "--dry-run"], - {**default_deploy_config, **required_config, "dry_run": True}, - None, + "snowflake_database": get_snowflake_identifier_string( + "snowflake-database-from-cli", "placeholder" ), - ( - "schemachange.cli.deploy", - ["schemachange", "deploy", *required_args, "--query-tag", "querytag"], - {**default_deploy_config, **required_config, "query_tag": "querytag"}, - None, + "snowflake_schema": get_snowflake_identifier_string( + "snowflake-schema-from-cli", "placeholder" ), - ( - "schemachange.cli.deploy", - [ - "schemachange", - "deploy", - *required_args, - "--oauth-config", - '{"token-provider-url": "https//..."}', - ], - { - **default_deploy_config, - **required_config, - "oauth_config": {"token-provider-url": "https//..."}, - }, - None, + "change_history_table": ChangeHistoryTable( + database_name="db", + schema_name="schema", + table_name="table_from_cli", ), - ( - "schemachange.cli.deploy", - [ - "schemachange", - "deploy", - *required_args, - ], - { - **default_deploy_config, - **required_config, - "log_level": 20, - }, - None, + "config_vars": { + "var1": "from_cli", + "var3": "also_from_cli", + }, + "create_change_history_table": True, + "autocommit": True, + "log_level": logging.DEBUG, + "dry_run": True, + "query_tag": "query-tag-from-cli", + "connection_name": "myaltconnection", + "connections_file_path": assets_path / "alt-connections.toml", + }, + None, + id="deploy: all cli argument flags", +) + +deploy_all_env_all_cli = pytest.param( + "schemachange.cli.deploy", + [ + "schemachange", + "deploy", + "--config-folder", + str(assets_path), + "--config-file-name", + "schemachange-config.yml", + "--root-folder", + str(assets_path), + "--modules-folder", + str(assets_path), + "--vars", + '{"var1": "from_cli", "var3": "also_from_cli"}', + "--verbose", + "--snowflake-account", + "snowflake-account-from-cli", + "--snowflake-user", + "snowflake-user-from-cli", + "--snowflake-role", + "snowflake-role-from-cli", + "--snowflake-warehouse", + "snowflake-warehouse-from-cli", + "--snowflake-database", + "snowflake-database-from-cli", + "--snowflake-schema", + "snowflake-schema-from-cli", + "--connections-file-path", + str(assets_path / "alt-connections.toml"), + "--connection-name", + "myaltconnection", + "--change-history-table", + "db.schema.table_from_cli", + "--create-change-history-table", + "--autocommit", + "--dry-run", + "--query-tag", + "query-tag-from-cli", + ], + { # expected + "subcommand": "deploy", + "config_file_path": assets_path / "schemachange-config.yml", + "config_version": 1, + "root_folder": assets_path, + "modules_folder": assets_path, + "snowflake_account": "snowflake-account-from-cli", + "snowflake_user": "snowflake-user-from-cli", + "snowflake_role": get_snowflake_identifier_string( + "snowflake-role-from-cli", "placeholder" ), - ( - "schemachange.cli.render", - [ - "schemachange", - "render", - str(script_path), - ], - {**default_base_config}, - script_path, + "snowflake_warehouse": get_snowflake_identifier_string( + "snowflake-warehouse-from-cli", "placeholder" ), - ( - "schemachange.cli.render", - [ - "schemachange", - "render", - "--root-folder", - ".", - str(script_path), - ], - {**default_base_config, "root_folder": Path(".")}, - script_path, + "snowflake_database": get_snowflake_identifier_string( + "snowflake-database-from-cli", "placeholder" ), - ( - "schemachange.cli.render", - [ - "schemachange", - "render", - "--vars", - '{"var1": "val"}', - str(script_path), - ], - {**default_base_config, "config_vars": {"var1": "val"}}, - script_path, + "snowflake_schema": get_snowflake_identifier_string( + "snowflake-schema-from-cli", "placeholder" ), - ( - "schemachange.cli.render", - [ - "schemachange", - "render", - "--verbose", - str(script_path), - ], - {**default_base_config, "log_level": logging.DEBUG}, - script_path, + "change_history_table": ChangeHistoryTable( + database_name="db", + schema_name="schema", + table_name="table_from_cli", ), + "config_vars": { + "var1": "from_cli", + "var3": "also_from_cli", + }, + "create_change_history_table": True, + "autocommit": True, + "log_level": logging.DEBUG, + "dry_run": True, + "query_tag": "query-tag-from-cli", + "connection_name": "myaltconnection", + "connections_file_path": assets_path / "alt-connections.toml", + }, + None, + id="deploy: all env_vars and all cli argument names", +) + +deploy_snowflake_oauth_env_var = pytest.param( + "schemachange.cli.deploy", + [ + "schemachange", + "deploy", + *required_args, + ], + { + **default_deploy_config, + "snowflake_account": "account", + "snowflake_user": "user", + "snowflake_warehouse": "warehouse", + "snowflake_role": "role", + }, + None, + id="deploy: oauth env var", +) + +deploy_snowflake_oauth_file = pytest.param( + "schemachange.cli.deploy", + [ + "schemachange", + "deploy", + *required_args, + ], + { + **default_deploy_config, + "snowflake_account": "account", + "snowflake_user": "user", + "snowflake_warehouse": "warehouse", + "snowflake_role": "role", + }, + None, + id="deploy: oauth file", +) + +render_only_required = pytest.param( + "schemachange.cli.render", + [ + "schemachange", + "render", + str(script_path), + "--config-folder", + str(assets_path), ], + {**default_base_config}, + script_path, + id="render: only required", ) + +render_all_cli_arg_names = pytest.param( + "schemachange.cli.render", + [ + "schemachange", + "render", + "--root-folder", + ".", + "--vars", + '{"var1": "val"}', + "--verbose", + str(script_path), + "--config-folder", + str(assets_path), + ], + { + **default_base_config, + "root_folder": Path("."), + "config_vars": {"var1": "val"}, + "log_level": logging.DEBUG, + }, + script_path, + id="render: all cli argument names", +) + + +@pytest.mark.parametrize( + "to_mock, cli_args, expected_config, expected_script_path", + [ + no_command, + deploy_only_required, + deploy_all_cli_arg_names, + deploy_all_cli_arg_flags, + deploy_all_env_all_cli, + deploy_snowflake_oauth_env_var, + deploy_snowflake_oauth_file, + render_only_required, + render_all_cli_arg_names, + ], +) +@mock.patch("pathlib.Path.is_file", return_value=True) @mock.patch("schemachange.session.SnowflakeSession.snowflake.connector.connect") +@mock.patch("schemachange.session.SnowflakeSession.get_snowflake_identifier_string") def test_main_deploy_subcommand_given_arguments_make_sure_arguments_set_on_call( _, + __, + ___, to_mock: str, cli_args: list[str], expected_config: dict, expected_script_path: Path | None, ): - with mock.patch.dict(os.environ, {"SNOWFLAKE_PASSWORD": "password"}, clear=True): - with mock.patch("sys.argv", cli_args): - with mock.patch(to_mock) as mock_command: - cli.main() - mock_command.assert_called_once() - _, call_kwargs = mock_command.call_args - for expected_arg, expected_value in expected_config.items(): - actual_value = getattr(call_kwargs["config"], expected_arg) - if hasattr(actual_value, "table_name"): - assert asdict(actual_value) == asdict(expected_value) - else: - assert actual_value == expected_value - if expected_script_path is not None: - assert call_kwargs["script_path"] == expected_script_path + with mock.patch("sys.argv", cli_args): + with mock.patch(to_mock) as mock_command: + cli.main() + mock_command.assert_called_once() + _, call_kwargs = mock_command.call_args + for expected_arg, expected_value in expected_config.items(): + actual_value = getattr(call_kwargs["config"], expected_arg) + if hasattr(actual_value, "table_name"): + assert asdict(actual_value) == asdict(expected_value) + else: + assert actual_value == expected_value + if expected_script_path is not None: + assert call_kwargs["script_path"] == expected_script_path @pytest.mark.parametrize( @@ -303,43 +480,45 @@ def test_main_deploy_subcommand_given_arguments_make_sure_arguments_set_on_call( ], ) @mock.patch("schemachange.session.SnowflakeSession.snowflake.connector.connect") +@mock.patch("schemachange.session.SnowflakeSession.get_snowflake_identifier_string") def test_main_deploy_config_folder( _, + __, to_mock: str, args: list[str], expected_config: dict, expected_script_path: Path | None, ): - with mock.patch.dict(os.environ, {"SNOWFLAKE_PASSWORD": "password"}, clear=True): - with tempfile.TemporaryDirectory() as d: - with open(os.path.join(d, "schemachange-config.yml"), "w") as f: - f.write( - dedent( - """ - snowflake_account: account - snowflake_user: user - snowflake_warehouse: warehouse - snowflake_role: role - """ - ) + with tempfile.TemporaryDirectory() as d: + with open(os.path.join(d, "schemachange-config.yml"), "w") as f: + f.write( + dedent( + """ + snowflake_account: account + snowflake_user: user + snowflake_warehouse: warehouse + snowflake_role: role + """ ) + ) - args[args.index("DUMMY")] = d - expected_config["config_file_path"] = Path(d) / "schemachange-config.yml" + # noinspection PyTypeChecker + args[args.index("DUMMY")] = d + expected_config["config_file_path"] = Path(d) / "schemachange-config.yml" - with mock.patch(to_mock) as mock_command: - with mock.patch("sys.argv", args): - cli.main() - mock_command.assert_called_once() - _, call_kwargs = mock_command.call_args - for expected_arg, expected_value in expected_config.items(): - actual_value = getattr(call_kwargs["config"], expected_arg) - if hasattr(actual_value, "table_name"): - assert asdict(actual_value) == asdict(expected_value) - else: - assert actual_value == expected_value - if expected_script_path is not None: - assert call_kwargs["script_path"] == expected_script_path + with mock.patch(to_mock) as mock_command: + with mock.patch("sys.argv", args): + cli.main() + mock_command.assert_called_once() + _, call_kwargs = mock_command.call_args + for expected_arg, expected_value in expected_config.items(): + actual_value = getattr(call_kwargs["config"], expected_arg) + if hasattr(actual_value, "table_name"): + assert asdict(actual_value) == asdict(expected_value) + else: + assert actual_value == expected_value + if expected_script_path is not None: + assert call_kwargs["script_path"] == expected_script_path @pytest.mark.parametrize( @@ -359,6 +538,8 @@ def test_main_deploy_config_folder( str(script_path), "--modules-folder", "DUMMY", + "--config-folder", + str(assets_path), ], {**default_base_config, "modules_folder": "DUMMY"}, script_path, @@ -366,28 +547,30 @@ def test_main_deploy_config_folder( ], ) @mock.patch("schemachange.session.SnowflakeSession.snowflake.connector.connect") +@mock.patch("schemachange.session.SnowflakeSession.get_snowflake_identifier_string") def test_main_deploy_modules_folder( _, + __, to_mock: str, args: list[str], expected_config: dict, expected_script_path: Path | None, ): - with mock.patch.dict(os.environ, {"SNOWFLAKE_PASSWORD": "password"}, clear=True): - with tempfile.TemporaryDirectory() as d: - args[args.index("DUMMY")] = d - expected_config["modules_folder"] = Path(d) + with tempfile.TemporaryDirectory() as d: + # noinspection PyTypeChecker + args[args.index("DUMMY")] = d + expected_config["modules_folder"] = Path(d) - with mock.patch(to_mock) as mock_command: - with mock.patch("sys.argv", args): - cli.main() - mock_command.assert_called_once() - _, call_kwargs = mock_command.call_args - for expected_arg, expected_value in expected_config.items(): - actual_value = getattr(call_kwargs["config"], expected_arg) - if hasattr(actual_value, "table_name"): - assert asdict(actual_value) == asdict(expected_value) - else: - assert actual_value == expected_value - if expected_script_path is not None: - assert call_kwargs["script_path"] == expected_script_path + with mock.patch(to_mock) as mock_command: + with mock.patch("sys.argv", args): + cli.main() + mock_command.assert_called_once() + _, call_kwargs = mock_command.call_args + for expected_arg, expected_value in expected_config.items(): + actual_value = getattr(call_kwargs["config"], expected_arg) + if hasattr(actual_value, "table_name"): + assert asdict(actual_value) == asdict(expected_value) + else: + assert actual_value == expected_value + if expected_script_path is not None: + assert call_kwargs["script_path"] == expected_script_path