Skip to content

do changes to zendesk #36

do changes to zendesk

do changes to zendesk #36

name: run-dbt-test
on:
pull_request:
types: [opened, synchronize, reopened]
jobs:
run-dbt-tests:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Install Python
uses: actions/setup-python@v4
with:
python-version: 3.12
- name: Install poetry
uses: abatilo/actions-poetry@v2
- name: checkout
uses: actions/checkout@v2
- name: Install the project dependencies
run: poetry install
- name: Ensure .dbt directory exists
run: mkdir -p ~/.dbt
- name: Copy profiles
run: cp .github/ci/sample.profiles.yml ~/.dbt/profiles.yml
- name: Get what connector we are testing
id: get_connector
run: echo "::set-output name=connector::source_zendesk_support"
- name: Run Integration Tests for Postgres
working-directory: connectors/${{ steps.get_connector.outputs.connector }}/integration_tests
if: always()
env:
POSTGRES_DBT_HOST: ${{ secrets.POSTGRES_DBT_HOST }}
POSTGRES_DBT_USER: ${{ secrets.POSTGRES_DBT_USER }}
POSTGRES_DBT_PASSWORD: ${{ secrets.POSTGRES_DBT_PASSWORD }}
POSTGRES_DBT_DBNAME: ${{ secrets.POSTGRES_DBT_DBNAME }}
POSTGRES_DBT_SCHEMA: ${{ secrets.POSTGRES_DBT_SCHEMA }}
AIRBYTE_DATABASE: "postgres"
run: |
poetry run dbt deps
poetry run dbt run --target postgres --full-refresh --vars '{airbyte_database:${AIRBYTE_DATABASE}, fivetran_database:${AIRBYTE_DATABASE}}'
- name: Run Integration Tests for Snowflake
if: always()
working-directory: connectors/${{ steps.get_connector.outputs.connector }}/integration_tests
env:
SNOWFLAKE_DBT_ACCOUNT: ${{ secrets.SNOWFLAKE_DBT_ACCOUNT }}
SNOWFLAKE_DBT_USER: ${{ secrets.SNOWFLAKE_DBT_USER }}
SNOWFLAKE_DBT_PASSWORD: ${{ secrets.SNOWFLAKE_DBT_PASSWORD }}
SNOWFLAKE_DBT_ROLE: ${{ secrets.SNOWFLAKE_DBT_ROLE }}
SNOWFLAKE_DBT_DATABASE: ${{ secrets.SNOWFLAKE_DBT_DATABASE }}
SNOWFLAKE_DBT_WAREHOUSE: ${{ secrets.SNOWFLAKE_DBT_WAREHOUSE }}
SNOWFLAKE_DBT_SCHEMA: ${{ secrets.SNOWFLAKE_DBT_SCHEMA }}
AIRBYTE_DATABASE: "INTEGRATION_TEST_DESTINATION"
run: |
poetry run dbt deps
poetry run dbt run --target snowflake --full-refresh --vars '{airbyte_database:${AIRBYTE_DATABASE}, fivetran_database:${AIRBYTE_DATABASE}}'
- name: Run Integration Tests for BigQuery
if: always()
working-directory: connectors/${{ steps.get_connector.outputs.connector }}/integration_tests
env:
BIGQUERY_DBT_PROJECT: ${{ secrets.BIGQUERY_DBT_PROJECT }}
BIGQUERY_DBT_DATASET: ${{ secrets.BIGQUERY_DBT_DATASET }}
GCLOUD_SERVICE_KEY: ${{ secrets.GCLOUD_SERVICE_KEY }}
AIRBYTE_DATABASE: "dataline-integration-testing"
run: |
poetry run dbt deps
poetry run dbt run --target bigquery --full-refresh --vars '{airbyte_database: $AIRBYTE_DATABASE}, fivetran_database: $AIRBYTE_DATABASE}'