Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

[chore] add UP rule to ruff #25703

Open
wants to merge 1 commit into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
The table of contents is too big for display.
Diff view
Diff view
  •  
  •  
  •  
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def build_tox_step(
None,
[
"tox",
"-c %s " % tox_file if tox_file else None,
f"-c {tox_file} " if tox_file else None,
"-vv", # extra-verbose
"-e",
tox_env,
Expand Down
59 changes: 28 additions & 31 deletions .buildkite/dagster-buildkite/dagster_buildkite/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,38 +63,35 @@ class GroupStep(TypedDict):

WaitStep: TypeAlias = Literal["wait"]

InputSelectOption = TypedDict("InputSelectOption", {"label": str, "value": str})
InputSelectField = TypedDict(
"InputSelectField",
{
"select": str,
"key": str,
"options": List[InputSelectOption],
"hint": Optional[str],
"default": Optional[str],
"required": Optional[bool],
"multiple": Optional[bool],
},
)
InputTextField = TypedDict(
"InputTextField",
{
"text": str,
"key": str,
"hint": Optional[str],
"default": Optional[str],
"required": Optional[bool],
},
)

BlockStep = TypedDict(
"BlockStep",
{
"block": str,
"prompt": Optional[str],
"fields": List[Union[InputSelectField, InputTextField]],
},
)
class InputSelectOption(TypedDict):
label: str
value: str


class InputSelectField(TypedDict):
select: str
key: str
options: List[InputSelectOption]
hint: Optional[str]
default: Optional[str]
required: Optional[bool]
multiple: Optional[bool]


class InputTextField(TypedDict):
text: str
key: str
hint: Optional[str]
default: Optional[str]
required: Optional[bool]


class BlockStep(TypedDict):
block: str
prompt: Optional[str]
fields: List[Union[InputSelectField, InputTextField]]


BuildkiteStep: TypeAlias = Union[CommandStep, GroupStep, TriggerStep, WaitStep, BlockStep]
BuildkiteLeafStep = Union[CommandStep, TriggerStep, WaitStep]
Expand Down
Binary file modified docs/content/api/modules.json.gz
Binary file not shown.
Binary file modified docs/content/api/searchindex.json.gz
Binary file not shown.
Binary file modified docs/content/api/sections.json.gz
Binary file not shown.
Original file line number Diff line number Diff line change
Expand Up @@ -143,8 +143,7 @@ To define multiple, similar asset checks, use a factory pattern. In the followin

```python file=/concepts/assets/asset_checks/factory.py
from typing import Any, Mapping, Sequence

from mock import MagicMock
from unittest.mock import MagicMock

from dagster import (
AssetCheckResult,
Expand Down
2 changes: 1 addition & 1 deletion docs/content/concepts/assets/software-defined-assets.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -130,7 +130,7 @@ from dagster import AssetKey, Definitions, asset

@asset(deps=["code_location_1_asset"])
def code_location_2_asset():
with open("/data/code_location_1_asset.json", "r") as f:
with open("/data/code_location_1_asset.json") as f:
x = json.load(f)

with open("/data/code_location_2_asset.json", "w+") as f:
Expand Down
2 changes: 1 addition & 1 deletion docs/content/concepts/metadata-tags/asset-metadata.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ from dagster import AssetExecutionContext, MetadataValue, asset, MaterializeResu

@asset(deps=[topstory_ids])
def topstories(context: AssetExecutionContext) -> MaterializeResult:
with open("data/topstory_ids.json", "r") as f:
with open("data/topstory_ids.json") as f:
topstory_ids = json.load(f)

results = []
Expand Down
2 changes: 1 addition & 1 deletion docs/content/concepts/ops-jobs-graphs/op-hooks.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -215,7 +215,7 @@ Then, we can execute a job with the config through Python API, CLI, or the Dagst
```python file=/concepts/ops_jobs_graphs/op_hooks.py startafter=start_repo_main endbefore=end_repo_main
if __name__ == "__main__":
prod_op_hooks_run_config_yaml = file_relative_path(__file__, "prod_op_hooks.yaml")
with open(prod_op_hooks_run_config_yaml, "r", encoding="utf8") as fd:
with open(prod_op_hooks_run_config_yaml, encoding="utf8") as fd:
run_config = yaml.safe_load(fd.read())

notif_all_prod.execute_in_process(run_config=run_config, raise_on_error=False)
Expand Down
4 changes: 2 additions & 2 deletions docs/content/concepts/testing.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -245,7 +245,7 @@ Mock resources can be provided directly using `materialize_to_memory`:

```python file=/concepts/ops_jobs_graphs/unit_tests.py startafter=start_materialize_resources endbefore=end_materialize_resources
from dagster import asset, materialize_to_memory, ConfigurableResource
import mock
from unittest import mock


class MyServiceResource(ConfigurableResource): ...
Expand Down Expand Up @@ -384,7 +384,7 @@ If your ops rely on more complex resources, such as those that build separate cl

```python file=/concepts/resources/pythonic_resources.py startafter=start_new_resource_testing_with_state_ops endbefore=end_new_resource_testing_with_state_ops dedent=4
from dagster import ConfigurableResource, op
import mock
from unittest import mock

class MyClient:
...
Expand Down
2 changes: 1 addition & 1 deletion docs/content/getting-started/quickstart.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -156,7 +156,7 @@ def hackernews_top_story_ids(config: HNStoriesConfig):
@asset(deps=[hackernews_top_story_ids])
def hackernews_top_stories(config: HNStoriesConfig) -> MaterializeResult:
"""Get items based on story ids from the HackerNews items endpoint."""
with open(config.hn_top_story_ids_path, "r") as f:
with open(config.hn_top_story_ids_path) as f:
hackernews_top_story_ids = json.load(f)

results = []
Expand Down
4 changes: 1 addition & 3 deletions docs/content/integrations/pandas.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -133,9 +133,7 @@ To tie this back to our example, let's say that we want to validate that the amo
class DivisibleByFiveConstraint(ColumnConstraint):
def __init__(self):
message = "Value must be divisible by 5"
super(DivisibleByFiveConstraint, self).__init__(
error_description=message, markdown_description=message
)
super().__init__(error_description=message, markdown_description=message)

def validate(self, dataframe, column_name):
rows_with_unexpected_buckets = dataframe[
Expand Down
6 changes: 3 additions & 3 deletions docs/content/tutorial/building-an-asset-graph.mdx
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ from dagster import asset

@asset(deps=[topstory_ids]) # this asset is dependent on topstory_ids
def topstories() -> None:
with open("data/topstory_ids.json", "r") as f:
with open("data/topstory_ids.json") as f:
topstory_ids = json.load(f)

results = []
Expand Down Expand Up @@ -79,7 +79,7 @@ from dagster import asset, AssetExecutionContext

@asset(deps=[topstory_ids])
def topstories(context: AssetExecutionContext) -> None:
with open("data/topstory_ids.json", "r") as f:
with open("data/topstory_ids.json") as f:
topstory_ids = json.load(f)

results = []
Expand Down Expand Up @@ -160,7 +160,7 @@ from dagster import AssetExecutionContext, MetadataValue, asset, MaterializeResu

@asset(deps=[topstory_ids])
def topstories(context: AssetExecutionContext) -> MaterializeResult:
with open("data/topstory_ids.json", "r") as f:
with open("data/topstory_ids.json") as f:
topstory_ids = json.load(f)

results = []
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -40,13 +40,13 @@

SVG_FONT_DATA_FILE = os.path.join(os.path.dirname(__file__), "..", "static", "font_info.svg")

with open(SVG_FONT_DATA_FILE, "r", encoding="utf-8") as f:
with open(SVG_FONT_DATA_FILE, encoding="utf-8") as f:
SVG_FONT_DATA = f.read()


def _add_font_info_to_svg(svg_filepath: str):
"""Adds embedded Dagster font information to an SVG file downloaded from Dagit."""
with open(svg_filepath, "r", encoding="utf-8") as f:
with open(svg_filepath, encoding="utf-8") as f:
svg = f.read()
with open(svg_filepath, "w", encoding="utf-8") as f:
f.write(svg.replace('<style xmlns="http://www.w3.org/1999/xhtml"></style>', SVG_FONT_DATA))
Expand All @@ -66,7 +66,7 @@ def _setup_snippet_file(code_path: str, snippet_fn: Optional[str]):
setting up the given snippet function as a repository if specified.
"""
with TemporaryDirectory() as temp_dir:
with open(code_path, "r", encoding="utf-8") as f:
with open(code_path, encoding="utf-8") as f:
code = f.read()

if snippet_fn:
Expand Down Expand Up @@ -139,7 +139,7 @@ def parse_params(param_str: str) -> Dict[str, str]:

def generate_svg(target_mdx_file: str):
# Parse all code blocks in the MD file
with open(target_mdx_file, "r", encoding="utf-8") as f:
with open(target_mdx_file, encoding="utf-8") as f:
snippets = [
parse_params(x) for x in re.findall(r"```python([^\n]+dagimage[^\n]+)", f.read())
]
Expand All @@ -165,7 +165,7 @@ def generate_svg(target_mdx_file: str):
}
)

with open(target_mdx_file, "r", encoding="utf-8") as f:
with open(target_mdx_file, encoding="utf-8") as f:
pattern = re.compile(r"(```python)([^\n]+dagimage[^\n]+)", re.S)

# Find and replace the code block params with our updated params
Expand Down
2 changes: 1 addition & 1 deletion docs/dagster-ui-screenshot/dagster_ui_screenshot/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -101,7 +101,7 @@ def _is_single_file_spec_db(spec_db_path: str) -> bool:


def _load_yaml(path: str):
with open(path, "r", encoding="utf8") as f:
with open(path, encoding="utf8") as f:
return yaml.safe_load(f)


Expand Down
Binary file modified docs/next/public/objects.inv
Binary file not shown.
2 changes: 1 addition & 1 deletion docs/scripts/validate-pyobjects.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@ def extract_pyobject_object_and_module() -> Dict[str, List[dict]]:
mdx_files = glob("**/*.mdx", recursive=True)
references = {}
for f in mdx_files:
content = open(f, "r").read().replace("\n", " ")
content = open(f).read().replace("\n", " ")
matches = [
{"object": m.group(1), "module": m.group(2)}
for m in re.finditer(PYOBJECT_PATTERN, content)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -104,7 +104,7 @@ class flag(nodes.Element):


def visit_flag(self, node: flag):
flag_type, message = [node.attributes[k] for k in FLAG_ATTRS]
flag_type, message = (node.attributes[k] for k in FLAG_ATTRS)
# We are currently not parsing the content of the message, so manually sub
# all `references` with `<cite>` tags, which is what the HTML writer does
# for parsed RST.
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -69,7 +69,7 @@ def write_doc(self, docname: str, doctree: nodes.document) -> None:
try:
with open(outfilename, "w", encoding="utf-8") as f:
f.write(self.writer.output)
except (IOError, OSError) as err:
except OSError as err:
logger.warning(f"error writing file {outfilename}: {err}")
raise err

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -41,7 +41,7 @@ def _wrap_chunks(self, chunks: list[str]) -> list[str]:
"""
lines: list[str] = []
if self.width <= 0:
raise ValueError("invalid width %r (must be > 0)" % self.width)
raise ValueError(f"invalid width {self.width!r} (must be > 0)")

chunks.reverse()

Expand Down Expand Up @@ -237,11 +237,11 @@ def starttag(self, node, tagname, suffix="\n", empty=False, **attributes):
# not all have a "href" attribute).
if empty or isinstance(node, (nodes.Sequential, nodes.docinfo, nodes.table)):
# Insert target right in front of element.
prefix.append('<Link id="%s"></Link>' % id)
prefix.append(f'<Link id="{id}"></Link>')
else:
# Non-empty tag. Place the auxiliary <span> tag
# *inside* the element, as the first child.
suffix += '<Link id="%s"></Link>' % id
suffix += f'<Link id="{id}"></Link>'
attlist = sorted(atts.items())
parts = [tagname]
for name, value in attlist:
Expand All @@ -250,14 +250,14 @@ def starttag(self, node, tagname, suffix="\n", empty=False, **attributes):
assert value is not None
if isinstance(value, list):
values = [str(v) for v in value]
parts.append('%s="%s"' % (name.lower(), self.attval(" ".join(values))))
parts.append('{}="{}"'.format(name.lower(), self.attval(" ".join(values))))
else:
parts.append('%s="%s"' % (name.lower(), self.attval(str(value))))
parts.append(f'{name.lower()}="{self.attval(str(value))}"')
if empty:
infix = " /"
else:
infix = ""
return "".join(prefix) + "<%s%s>" % (" ".join(parts), infix) + suffix
return "".join(prefix) + "<{}{}>".format(" ".join(parts), infix) + suffix

def end_state(
self,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -38,6 +38,6 @@ def test_mdx_builder(sphinx_build):
assert expected_file.exists(), f"{expected_file} was not generated"

# Optionally, check the content of the generated files
with open(sphinx_build / "datasets" / "index.mdx", "r") as f:
with open(sphinx_build / "datasets" / "index.mdx") as f:
content = f.read()
assert "This folder contains test sources." in content
Original file line number Diff line number Diff line change
Expand Up @@ -70,7 +70,7 @@ def has_output(self, context) -> bool:
version_fpath = fpath + ".version"
if not os.path.exists(version_fpath):
return False
with open(version_fpath, "r", encoding="utf8") as f:
with open(version_fpath, encoding="utf8") as f:
version = f.read()

return version == context.version
Expand Down
4 changes: 2 additions & 2 deletions examples/deploy_ecs/tests/test_deploy.py
Original file line number Diff line number Diff line change
Expand Up @@ -80,7 +80,7 @@ def source_code(reference_deployment, tmpdir):
@pytest.fixture
def overridden_dockerfile(source_code):
# Override Dockerfile to copy our source code into the container
with open("Dockerfile", "r", encoding="utf8") as f:
with open("Dockerfile", encoding="utf8") as f:
dockerfile = f.readlines()
# Copy the files in directly after we set the WORKDIR
index = dockerfile.index("WORKDIR $DAGSTER_HOME\n") + 1
Expand All @@ -96,7 +96,7 @@ def overridden_dagster_yaml(reference_deployment):
# run on a real ECS cluster whereas DefaultRunLauncher can successfully run
# end-to-end on a local ECS simulation. This is because the local ECS
# simulation doesn't mock out the ECS API in its entirety.
with open("dagster.yaml", "r", encoding="utf8") as f:
with open("dagster.yaml", encoding="utf8") as f:
dagster_yaml = yaml.safe_load(f)
dagster_yaml["run_launcher"] = {
"module": "dagster.core.launcher",
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import mock
from unittest import mock

from dagster_aws.s3 import S3FileHandle, S3FileManager

import dagster as dg
Expand Down
Original file line number Diff line number Diff line change
@@ -1,4 +1,5 @@
import mock
from unittest import mock

from dagster_aws.s3 import S3FileHandle, S3FileManager

import dagster as dg
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -360,7 +360,7 @@ def adhoc_request_sensor(context: dg.SensorEvaluationContext):
filename not in previous_state
or previous_state[filename] != last_modified
):
with open(file_path, "r") as f:
with open(file_path) as f:
request_config = json.load(f)

runs_to_request.append(
Expand Down
Original file line number Diff line number Diff line change
@@ -1,6 +1,5 @@
from typing import Any, Mapping, Sequence

from mock import MagicMock
from unittest.mock import MagicMock

from dagster import (
AssetCheckResult,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,7 @@

@asset(deps=["code_location_1_asset"])
def code_location_2_asset():
with open("/data/code_location_1_asset.json", "r") as f:
with open("/data/code_location_1_asset.json") as f:
x = json.load(f)

with open("/data/code_location_2_asset.json", "w+") as f:
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
Definitions,
OpExecutionContext,
)
from mock import MagicMock
from unittest.mock import MagicMock


def create_db_connection():
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -103,7 +103,7 @@ def repo():
# start_repo_main
if __name__ == "__main__":
prod_op_hooks_run_config_yaml = file_relative_path(__file__, "prod_op_hooks.yaml")
with open(prod_op_hooks_run_config_yaml, "r", encoding="utf8") as fd:
with open(prod_op_hooks_run_config_yaml, encoding="utf8") as fd:
run_config = yaml.safe_load(fd.read())

notif_all_prod.execute_in_process(run_config=run_config, raise_on_error=False)
Expand Down
Loading