From 214813bbeff6f6a63d307ccc712c0615c9c098c6 Mon Sep 17 00:00:00 2001 From: Pankaj Singh <98807258+pankajastro@users.noreply.github.com> Date: Mon, 20 May 2024 01:13:23 +0530 Subject: [PATCH] Enable ruff F linting (#985) I've noticed there are a few unused imports in this library. This PR aims to activate "[F](https://docs.astral.sh/ruff/rules/#pyflakes-f)" to cleanup things. --- cosmos/dbt/project.py | 1 - cosmos/operators/base.py | 2 -- cosmos/operators/kubernetes.py | 2 +- cosmos/operators/local.py | 6 ++---- pyproject.toml | 3 ++- tests/dbt/test_project.py | 2 -- tests/operators/test_local.py | 3 --- tests/plugin/test_plugin.py | 1 - 8 files changed, 5 insertions(+), 15 deletions(-) diff --git a/cosmos/dbt/project.py b/cosmos/dbt/project.py index 4a3b036b3..c1c7aa080 100644 --- a/cosmos/dbt/project.py +++ b/cosmos/dbt/project.py @@ -1,7 +1,6 @@ from __future__ import annotations import os -import shutil from contextlib import contextmanager from pathlib import Path from typing import Generator diff --git a/cosmos/operators/base.py b/cosmos/operators/base.py index b8112a73f..e22703fb5 100644 --- a/cosmos/operators/base.py +++ b/cosmos/operators/base.py @@ -2,7 +2,6 @@ import os from abc import ABCMeta, abstractmethod -from functools import cached_property from pathlib import Path from typing import Any, Sequence, Tuple @@ -12,7 +11,6 @@ from airflow.utils.operator_helpers import context_to_airflow_vars from airflow.utils.strings import to_boolean -from cosmos import cache from cosmos.dbt.executable import get_system_dbt from cosmos.log import get_logger diff --git a/cosmos/operators/kubernetes.py b/cosmos/operators/kubernetes.py index 14bcbcb84..f84219199 100644 --- a/cosmos/operators/kubernetes.py +++ b/cosmos/operators/kubernetes.py @@ -36,7 +36,7 @@ try: # apache-airflow-providers-cncf-kubernetes < 7.4.0 from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator - except ImportError as error: + except ImportError: raise ImportError( "Could not import KubernetesPodOperator. Ensure you've installed the Kubernetes provider " "separately or with with `pip install astronomer-cosmos[...,kubernetes]`." diff --git a/cosmos/operators/local.py b/cosmos/operators/local.py index a9b95baf7..5a3757e6b 100644 --- a/cosmos/operators/local.py +++ b/cosmos/operators/local.py @@ -41,8 +41,6 @@ from cosmos.config import ProfileConfig from cosmos.constants import ( - DBT_PARTIAL_PARSE_FILE_NAME, - DBT_TARGET_DIR_NAME, DEFAULT_OPENLINEAGE_NAMESPACE, OPENLINEAGE_PRODUCER, ) @@ -176,7 +174,7 @@ def _discover_invocation_mode(self) -> None: This method is called at runtime to work in the environment where the operator is running. """ try: - from dbt.cli.main import dbtRunner + from dbt.cli.main import dbtRunner # noqa except ImportError: self.invocation_mode = InvocationMode.SUBPROCESS logger.info("Could not import dbtRunner. Falling back to subprocess for invoking dbt.") @@ -426,7 +424,7 @@ def get_datasets(self, source: Literal["inputs", "outputs"]) -> list[Dataset]: datasets = [] try: datasets = [Dataset(uri) for uri in uris] - except ValueError as e: + except ValueError: raise AirflowCompatibilityError( """ Apache Airflow 2.9.0 & 2.9.1 introduced a breaking change in Dataset URIs, to be fixed in newer versions: diff --git a/pyproject.toml b/pyproject.toml index 97788aee5..8c29c6827 100644 --- a/pyproject.toml +++ b/pyproject.toml @@ -199,7 +199,8 @@ no_warn_unused_ignores = true [tool.ruff] line-length = 120 [tool.ruff.lint] -select = ["C901", "I"] +select = ["C901", "I", "F"] +ignore = ["F541"] [tool.ruff.lint.mccabe] max-complexity = 10 diff --git a/tests/dbt/test_project.py b/tests/dbt/test_project.py index 09ab1a735..f55525a43 100644 --- a/tests/dbt/test_project.py +++ b/tests/dbt/test_project.py @@ -2,8 +2,6 @@ from pathlib import Path from unittest.mock import patch -import pytest - from cosmos.dbt.project import change_working_directory, create_symlinks, environ DBT_PROJECTS_ROOT_DIR = Path(__file__).parent.parent.parent / "dev/dags/dbt" diff --git a/tests/operators/test_local.py b/tests/operators/test_local.py index 11652e10d..0f35705b6 100644 --- a/tests/operators/test_local.py +++ b/tests/operators/test_local.py @@ -3,7 +3,6 @@ import shutil import sys import tempfile -from datetime import datetime from pathlib import Path from unittest.mock import MagicMock, call, patch @@ -451,7 +450,6 @@ def test_run_operator_dataset_inlets_and_outlets(caplog): ) @pytest.mark.integration def test_run_operator_dataset_emission_fails(caplog): - from airflow.datasets import Dataset with DAG("test-id-1", start_date=datetime(2022, 1, 1)) as dag: seed_operator = DbtSeedLocalOperator( @@ -494,7 +492,6 @@ def test_run_operator_dataset_emission_fails(caplog): ) @pytest.mark.integration def test_run_operator_dataset_emission_is_skipped(caplog): - from airflow.datasets import Dataset with DAG("test-id-1", start_date=datetime(2022, 1, 1)) as dag: seed_operator = DbtSeedLocalOperator( diff --git a/tests/plugin/test_plugin.py b/tests/plugin/test_plugin.py index 8d0e3742f..25fcacbda 100644 --- a/tests/plugin/test_plugin.py +++ b/tests/plugin/test_plugin.py @@ -18,7 +18,6 @@ import pytest from airflow.configuration import conf -from airflow.exceptions import AirflowConfigException from airflow.utils.db import initdb, resetdb from airflow.www.app import cached_app from airflow.www.extensions.init_appbuilder import AirflowAppBuilder