Skip to content

Commit

Permalink
Enable ruff F linting (#985)
Browse files Browse the repository at this point in the history
I've noticed there are a few unused imports in this library. This PR
aims to activate "[F](https://docs.astral.sh/ruff/rules/#pyflakes-f)" to
cleanup things.
  • Loading branch information
pankajastro authored May 19, 2024
1 parent 39b4b7d commit 214813b
Show file tree
Hide file tree
Showing 8 changed files with 5 additions and 15 deletions.
1 change: 0 additions & 1 deletion cosmos/dbt/project.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,6 @@
from __future__ import annotations

import os
import shutil
from contextlib import contextmanager
from pathlib import Path
from typing import Generator
Expand Down
2 changes: 0 additions & 2 deletions cosmos/operators/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,6 @@

import os
from abc import ABCMeta, abstractmethod
from functools import cached_property
from pathlib import Path
from typing import Any, Sequence, Tuple

Expand All @@ -12,7 +11,6 @@
from airflow.utils.operator_helpers import context_to_airflow_vars
from airflow.utils.strings import to_boolean

from cosmos import cache
from cosmos.dbt.executable import get_system_dbt
from cosmos.log import get_logger

Expand Down
2 changes: 1 addition & 1 deletion cosmos/operators/kubernetes.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@
try:
# apache-airflow-providers-cncf-kubernetes < 7.4.0
from airflow.providers.cncf.kubernetes.operators.kubernetes_pod import KubernetesPodOperator
except ImportError as error:
except ImportError:
raise ImportError(
"Could not import KubernetesPodOperator. Ensure you've installed the Kubernetes provider "
"separately or with with `pip install astronomer-cosmos[...,kubernetes]`."
Expand Down
6 changes: 2 additions & 4 deletions cosmos/operators/local.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,8 +41,6 @@

from cosmos.config import ProfileConfig
from cosmos.constants import (
DBT_PARTIAL_PARSE_FILE_NAME,
DBT_TARGET_DIR_NAME,
DEFAULT_OPENLINEAGE_NAMESPACE,
OPENLINEAGE_PRODUCER,
)
Expand Down Expand Up @@ -176,7 +174,7 @@ def _discover_invocation_mode(self) -> None:
This method is called at runtime to work in the environment where the operator is running.
"""
try:
from dbt.cli.main import dbtRunner
from dbt.cli.main import dbtRunner # noqa
except ImportError:
self.invocation_mode = InvocationMode.SUBPROCESS
logger.info("Could not import dbtRunner. Falling back to subprocess for invoking dbt.")
Expand Down Expand Up @@ -426,7 +424,7 @@ def get_datasets(self, source: Literal["inputs", "outputs"]) -> list[Dataset]:
datasets = []
try:
datasets = [Dataset(uri) for uri in uris]
except ValueError as e:
except ValueError:
raise AirflowCompatibilityError(
"""
Apache Airflow 2.9.0 & 2.9.1 introduced a breaking change in Dataset URIs, to be fixed in newer versions:
Expand Down
3 changes: 2 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -199,7 +199,8 @@ no_warn_unused_ignores = true
[tool.ruff]
line-length = 120
[tool.ruff.lint]
select = ["C901", "I"]
select = ["C901", "I", "F"]
ignore = ["F541"]
[tool.ruff.lint.mccabe]
max-complexity = 10

Expand Down
2 changes: 0 additions & 2 deletions tests/dbt/test_project.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,8 +2,6 @@
from pathlib import Path
from unittest.mock import patch

import pytest

from cosmos.dbt.project import change_working_directory, create_symlinks, environ

DBT_PROJECTS_ROOT_DIR = Path(__file__).parent.parent.parent / "dev/dags/dbt"
Expand Down
3 changes: 0 additions & 3 deletions tests/operators/test_local.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
import shutil
import sys
import tempfile
from datetime import datetime
from pathlib import Path
from unittest.mock import MagicMock, call, patch

Expand Down Expand Up @@ -451,7 +450,6 @@ def test_run_operator_dataset_inlets_and_outlets(caplog):
)
@pytest.mark.integration
def test_run_operator_dataset_emission_fails(caplog):
from airflow.datasets import Dataset

with DAG("test-id-1", start_date=datetime(2022, 1, 1)) as dag:
seed_operator = DbtSeedLocalOperator(
Expand Down Expand Up @@ -494,7 +492,6 @@ def test_run_operator_dataset_emission_fails(caplog):
)
@pytest.mark.integration
def test_run_operator_dataset_emission_is_skipped(caplog):
from airflow.datasets import Dataset

with DAG("test-id-1", start_date=datetime(2022, 1, 1)) as dag:
seed_operator = DbtSeedLocalOperator(
Expand Down
1 change: 0 additions & 1 deletion tests/plugin/test_plugin.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,6 @@

import pytest
from airflow.configuration import conf
from airflow.exceptions import AirflowConfigException
from airflow.utils.db import initdb, resetdb
from airflow.www.app import cached_app
from airflow.www.extensions.init_appbuilder import AirflowAppBuilder
Expand Down

0 comments on commit 214813b

Please sign in to comment.