diff --git a/.github/workflows/test.yml b/.github/workflows/test.yml index 0963dba8e..3e3f0efc0 100644 --- a/.github/workflows/test.yml +++ b/.github/workflows/test.yml @@ -170,7 +170,7 @@ jobs: - name: Test Cosmos against Airflow ${{ matrix.airflow-version }} and Python ${{ matrix.python-version }} run: | - hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-setup + hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-expensive-setup DATABRICKS_UNIQUE_ID="${{github.run_id}}" hatch run tests.py${{ matrix.python-version }}-${{ matrix.airflow-version }}:test-integration-expensive env: AIRFLOW_HOME: /home/runner/work/astronomer-cosmos/astronomer-cosmos/ diff --git a/dev/dags/example_cosmos_sources.py b/dev/dags/example_cosmos_sources.py index 40f63c53a..27acdf0ba 100644 --- a/dev/dags/example_cosmos_sources.py +++ b/dev/dags/example_cosmos_sources.py @@ -3,25 +3,19 @@ from pathlib import Path from airflow.operators.dummy import DummyOperator +from airflow.models.dag import DAG +from airflow.utils.task_group import TaskGroup from cosmos import DbtDag, ProjectConfig, ProfileConfig, RenderConfig from cosmos.constants import DbtResourceType +from cosmos.dbt.graph import DbtNode DEFAULT_DBT_ROOT_PATH = Path(__file__).parent / "dbt" DBT_ROOT_PATH = Path(os.getenv("DBT_ROOT_PATH", DEFAULT_DBT_ROOT_PATH)) os.environ["DBT_SQLITE_PATH"] = str(DEFAULT_DBT_ROOT_PATH / "simple") -""" -profile_config = ProfileConfig( - profile_name="default", - target_name="dev", - profile_mapping=PostgresUserPasswordProfileMapping( - conn_id="airflow_db", - profile_args={"schema": "public"}, - ), -) -""" + profile_config = ProfileConfig( profile_name="simple", target_name="dev", @@ -29,13 +23,13 @@ ) -def convert_source(dag, task_group, node, **kwargs): +def convert_source(dag: DAG, task_group: TaskGroup, node: DbtNode, **kwargs): return DummyOperator(dag=dag, task_group=task_group, task_id=f"{node.name}_source") render_config = RenderConfig(dbt_resource_converter={DbtResourceType.SOURCE: convert_source}) -# [START local_example] + example_cosmos_sources = DbtDag( # dbt/cosmos-specific parameters project_config=ProjectConfig( @@ -50,4 +44,3 @@ def convert_source(dag, task_group, node, **kwargs): catchup=False, dag_id="example_cosmos_sources", ) -# [END local_example] diff --git a/pyproject.toml b/pyproject.toml index c3fbe63a4..2330f047f 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -155,6 +155,10 @@ test-cov = 'pytest -vv --cov=cosmos --cov-report=term-missing --cov-report=xml - test-integration-setup = """pip uninstall dbt-postgres dbt-databricks; \ rm -rf airflow.*; \ airflow db init; \ +pip install 'dbt-postgres<=1.5' 'dbt-databricks<=1.5'""" +test-integration-expensive-setup = """pip uninstall dbt-postgres dbt-databricks; \ +rm -rf airflow.*; \ +airflow db init; \ pip install 'dbt-postgres<=1.4' 'dbt-databricks<=1.4' 'dbt-sqlite<=1.4'""" test-integration = """rm -rf dbt/jaffle_shop/dbt_packages; pytest -vv \ @@ -163,7 +167,7 @@ pytest -vv \ --cov-report=xml \ --durations=0 \ -m integration \ --k 'not (test_example_dags_no_connections[example_cosmos_python_models] or test_example_dag[example_cosmos_python_models] or test_example_dag[example_virtualenv])' +-k 'not (test_example_dags[example_cosmos_sources] or test_example_dags_no_connections[example_cosmos_python_models] or test_example_dag[example_cosmos_python_models] or test_example_dag[example_virtualenv])' """ test-integration-expensive = """rm -rf dbt/jaffle_shop/dbt_packages; pytest -vv \ @@ -172,7 +176,7 @@ pytest -vv \ --cov-report=xml \ --durations=0 \ -m integration \ --k 'test_example_dags_no_connections[example_cosmos_python_models] or test_example_dag[example_cosmos_python_models] or test_example_dag[example_virtualenv]'""" +-k 'test_example_dags_no_connections[example_cosmos_python_models] or test_example_dag[example_cosmos_python_models] or test_example_dag[example_virtualenv] or test_example_dags[example_cosmos_sources]'""" [tool.pytest.ini_options] filterwarnings = [