Skip to content

Commit

Permalink
pyupgrade --py3-plus
Browse files Browse the repository at this point in the history
  • Loading branch information
JB Lovland committed Jan 8, 2024
1 parent 32576f2 commit 956dcc2
Show file tree
Hide file tree
Showing 15 changed files with 63 additions and 64 deletions.
1 change: 0 additions & 1 deletion .dmypy.json

This file was deleted.

3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -96,3 +96,6 @@ venv.bak/

# setuptools_scm version
src/fmu/dataio/version.py

# mypy
.dmypy.json
2 changes: 1 addition & 1 deletion examples/s/d/nn/_project/aggregate_surfaces.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,7 +127,7 @@ def _parse_yaml(fname):
dict
"""

with open(fname, "r") as stream:
with open(fname) as stream:
data = yaml.safe_load(stream)
return data

Expand Down
4 changes: 1 addition & 3 deletions src/fmu/dataio/_design_kw.py
Original file line number Diff line number Diff line change
Expand Up @@ -37,7 +37,7 @@ def run(

key_vals.update(rm_genkw_prefix(key_vals))

with open(template_file_name, "r") as template_file:
with open(template_file_name) as template_file:
template = template_file.readlines()

with open(result_file_name, "w") as result_file:
Expand All @@ -60,10 +60,8 @@ def all_matched(line: str, template_file_name: str, template: list[str]) -> bool
for unmatched in unmatched_templates(line):
if is_perl(template_file_name, template):
_logger.warning( # pylint: disable=logging-fstring-interpolation
(
f"{unmatched} not found in design matrix, "
f"but this is probably a Perl file"
)
)
else:
_logger.error( # pylint: disable=logging-fstring-interpolation
Expand Down
2 changes: 1 addition & 1 deletion src/fmu/dataio/_filedata_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -234,6 +234,6 @@ def _get_path_generic(

# check that destination actually exists if verifyfolder is True
if self.dataio.verifyfolder and not dest.exists():
raise IOError(f"Folder {str(dest)} is not present.")
raise OSError(f"Folder {str(dest)} is not present.")

return dest
2 changes: 1 addition & 1 deletion src/fmu/dataio/_fmu_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -237,7 +237,7 @@ def get_ert2_information(self) -> None:
if self.dataio.include_ert2jobs:
jobs_file = self.iter_path / "jobs.json"
if jobs_file.is_file():
with open(jobs_file, "r") as stream:
with open(jobs_file) as stream:
self.ert2["jobs"] = json.load(stream)
logger.debug("jobs.json parsed.")
logger.debug("jobs.json was not found")
Expand Down
4 changes: 2 additions & 2 deletions src/fmu/dataio/_objectdata_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -128,7 +128,7 @@ class _ObjectDataProvider:
# input fields
obj: Any
dataio: Any
meta_existing: Optional[dict] = None
meta_existing: dict | None = None

# result properties; the most important is metadata which IS the 'data' part in
# the resulting metadata. But other variables needed later are also given
Expand Down Expand Up @@ -687,7 +687,7 @@ def _derive_from_existing(self) -> None:

self.time0, self.time1 = parse_timedata(self.meta_existing["data"])

def _process_content(self) -> Tuple[str, Optional[dict]]:
def _process_content(self) -> tuple[str, dict | None]:
"""Work with the `content` metadata"""

# content == "unset" is not wanted, but in case metadata has been produced while
Expand Down
2 changes: 1 addition & 1 deletion src/fmu/dataio/_oyaml.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,4 +49,4 @@ def map_constructor(loader, node):
# Merge PyYAML namespace into ours.
# This allows users a drop-in replacement:
# import oyaml as yaml
from yaml import *
from yaml import *
30 changes: 15 additions & 15 deletions src/fmu/dataio/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -190,17 +190,17 @@ def create_symlink(source: str, target: str) -> None:

thesource = Path(source)
if not thesource.exists():
raise IOError(f"Cannot symlink: Source file {thesource} does not exist.")
raise OSError(f"Cannot symlink: Source file {thesource} does not exist.")

thetarget = Path(target)

if thetarget.exists() and not thetarget.is_symlink():
raise IOError(f"Target file {thetarget} exists already as a normal file.")
raise OSError(f"Target file {thetarget} exists already as a normal file.")

os.symlink(source, target)

if not (thetarget.exists() and thetarget.is_symlink()):
raise IOError(f"Target file {thesource} does not exist or is not a symlink.")
raise OSError(f"Target file {thesource} does not exist or is not a symlink.")


def size(fname: str) -> int:
Expand All @@ -213,8 +213,8 @@ def uuid_from_string(string: str) -> str:


def read_parameters_txt(
pfile: Union[Path, str]
) -> Dict[str, Union[str, float, int | None]]:
pfile: Path | str
) -> dict[str, str | float | int | None]:
"""Read the parameters.txt file and convert to a dict.
The parameters.txt file has this structure::
SENSNAME rms_seed
Expand Down Expand Up @@ -258,8 +258,8 @@ def read_parameters_txt(


def nested_parameters_dict(
paramdict: Dict[str, Union[str, int, float]]
) -> Dict[str, Union[str, int, float, Dict[str, Union[str, int, float]]]]:
paramdict: dict[str, str | int | float]
) -> dict[str, str | int | float | dict[str, str | int | float]]:
"""Interpret a flat parameters dictionary into a nested dictionary, based on
presence of colons in keys.
Expand All @@ -268,10 +268,10 @@ def nested_parameters_dict(
In design_kw (semeio) this namespace identifier is actively ignored, meaning that
the keys without the namespace must be unique.
"""
nested_dict: Dict[
str, Union[str, int, float, Dict[str, Union[str, int, float]]]
nested_dict: dict[
str, str | int | float | dict[str, str | int | float]
] = {}
unique_keys: List[str] = []
unique_keys: list[str] = []
for key, value in paramdict.items():
if ":" in key:
subdict, newkey = key.split(":", 1)
Expand Down Expand Up @@ -391,7 +391,7 @@ def filter_validate_metadata(metadata_in: dict) -> dict:
return metadata


def generate_description(desc: Optional[Union[str, list]] = None) -> Union[list, None]:
def generate_description(desc: str | list | None = None) -> list | None:
"""Parse desciption input (generic)."""
if not desc:
return None
Expand All @@ -404,7 +404,7 @@ def generate_description(desc: Optional[Union[str, list]] = None) -> Union[list,
raise ValueError("Description of wrong type, must be list of strings or string")


def read_metadata(filename: Union[str, Path]) -> dict:
def read_metadata(filename: str | Path) -> dict:
"""Read the metadata as a dictionary given a filename.
If the filename is e.g. /some/path/mymap.gri, the assosiated metafile
Expand All @@ -418,13 +418,13 @@ def read_metadata(filename: Union[str, Path]) -> dict:
"""
fname = Path(filename)
if fname.stem.startswith("."):
raise IOError(f"The input is a hidden file, cannot continue: {fname.stem}")
raise OSError(f"The input is a hidden file, cannot continue: {fname.stem}")

metafile = str(fname.parent) + "/." + fname.stem + fname.suffix + ".yml"
metafilepath = Path(metafile)
if not metafilepath.exists():
raise IOError(f"Cannot find requested metafile: {metafile}")
with open(metafilepath, "r") as stream:
raise OSError(f"Cannot find requested metafile: {metafile}")
with open(metafilepath) as stream:
metacfg = yaml.safe_load(stream)

return metacfg
Expand Down
50 changes: 25 additions & 25 deletions src/fmu/dataio/dataio.py
Original file line number Diff line number Diff line change
Expand Up @@ -185,7 +185,7 @@ def _check_global_config(


# the two next content key related function may require refactoring/simplification
def _check_content(proposed: Union[str, dict]) -> Any:
def _check_content(proposed: str | dict) -> Any:
"""Check content and return a validated version."""
logger.info("Evaluate content")

Expand Down Expand Up @@ -297,7 +297,7 @@ def _content_validate(name: str, fields: dict[str, type]) -> None:
# ======================================================================================


def read_metadata(filename: Union[str, Path]) -> dict:
def read_metadata(filename: str | Path) -> dict:
"""Read the metadata as a dictionary given a filename.
If the filename is e.g. /some/path/mymap.gri, the assosiated metafile
Expand Down Expand Up @@ -562,31 +562,31 @@ class ExportData:
# input keys (alphabetic)
access_ssdl: dict = field(default_factory=dict)
aggregation: bool = False
casepath: Union[str, Path, None] = None
casepath: str | Path | None = None
config: dict = field(default_factory=dict)
content: Union[dict, str, None] = None
content: dict | str | None = None
depth_reference: str = "msl"
description: Union[str, list] = ""
display_name: Optional[str] = None
description: str | list = ""
display_name: str | None = None
fmu_context: str = "realization"
forcefolder: str = ""
grid_model: Optional[str] = None
grid_model: str | None = None
is_observation: bool = False
is_prediction: bool = True
name: str = ""
undef_is_zero: bool = False
parent: str = ""
realization: int = -999
reuse_metadata_rule: Optional[str] = None
runpath: Union[str, Path, None] = None
reuse_metadata_rule: str | None = None
runpath: str | Path | None = None
subfolder: str = ""
tagname: str = ""
timedata: Optional[List[list]] = None
timedata: list[list] | None = None
unit: str = ""
verbosity: str = "CRITICAL"
vertical_domain: dict = field(default_factory=dict)
workflow: str = ""
table_index: Optional[list] = None
table_index: list | None = None

# some keys that are modified version of input, prepended with _use
_usecontent: dict = field(default_factory=dict, init=False)
Expand Down Expand Up @@ -900,7 +900,7 @@ def export(
outfile = Path(metadata["file"]["absolute_path"])
metafile = outfile.parent / ("." + str(outfile.name) + ".yml")

useflag: Union[bool, str]
useflag: bool | str
if isinstance(obj, pd.DataFrame):
useflag = self.table_include_index
else:
Expand Down Expand Up @@ -976,11 +976,11 @@ class InitializeCase: # pylint: disable=too-few-public-methods

# instance
config: dict
rootfolder: Union[str, Path, None] = None
casename: Optional[str] = None
caseuser: Optional[str] = None
restart_from: Optional[str] = None
description: Union[str, list, None] = None
rootfolder: str | Path | None = None
casename: str | None = None
caseuser: str | None = None
restart_from: str | None = None
description: str | list | None = None
verbosity: str = "CRITICAL"

_metadata: dict = field(default_factory=dict, init=False)
Expand Down Expand Up @@ -1069,7 +1069,7 @@ def generate_metadata(
force: bool = False,
skip_null: bool = True,
**kwargs: object,
) -> Union[dict, None]:
) -> dict | None:
"""Generate case metadata.
Args:
Expand Down Expand Up @@ -1138,7 +1138,7 @@ def export(
force: bool = False,
skip_null: bool = True,
**kwargs: dict[str, object],
) -> Union[str, None]:
) -> str | None:
"""Export case metadata to file.
Args:
Expand Down Expand Up @@ -1206,8 +1206,8 @@ class AggregatedData:
meta_format: ClassVar[str] = "yaml"

# instance
aggregation_id: Optional[str] = None
casepath: Union[str, Path, None] = None
aggregation_id: str | None = None
casepath: str | Path | None = None
source_metadata: list = field(default_factory=list)
name: str = ""
operation: str = ""
Expand Down Expand Up @@ -1245,7 +1245,7 @@ def _update_settings(self, newsettings: dict) -> None:
logger.setLevel(level=self.verbosity)
logger.info("New setting OK for %s", setting)

def _construct_filename(self, template: dict) -> Tuple[Path, Path | None]:
def _construct_filename(self, template: dict) -> tuple[Path, Path | None]:
"""Construct the paths/filenames for aggregated data.
These filenames are constructed a bit different than in a forward job, since we
Expand Down Expand Up @@ -1350,8 +1350,8 @@ def _construct_filename(self, template: dict) -> Tuple[Path, Path | None]:
def _generate_aggrd_metadata(
self,
obj: object,
real_ids: List[int],
uuids: List[str],
real_ids: list[int],
uuids: list[str],
compute_md5: bool = True,
) -> None:
logger.info(
Expand Down Expand Up @@ -1497,7 +1497,7 @@ def export(self, obj: object, **kwargs: object) -> str:
abspath = metadata["file"].get("absolute_path", None)

if not abspath:
raise IOError(
raise OSError(
"The absolute_path is None, hence no export is possible. "
"Use the ``casepath`` key to provide a valid absolute path."
)
Expand Down
2 changes: 1 addition & 1 deletion src/fmu/dataio/scripts/create_case_metadata.py
Original file line number Diff line number Diff line change
Expand Up @@ -150,7 +150,7 @@ def register_on_sumo(
def _parse_yaml(path: Path) -> dict:
"""Parse the global variables, return as dict"""

with open(path, "r") as stream:
with open(path) as stream:
data = yaml.safe_load(stream)

return data
Expand Down
7 changes: 3 additions & 4 deletions tests/conftest.py
Original file line number Diff line number Diff line change
Expand Up @@ -147,7 +147,7 @@ def fixture_rmsglobalconfig(rmssetup):
# read the global config
os.chdir(rmssetup)
logger.info("Global config is %s", str(rmssetup / "global_variables.yml"))
with open("global_variables.yml", "r", encoding="utf8") as stream:
with open("global_variables.yml", encoding="utf8") as stream:
global_cfg = yaml.safe_load(stream)

logger.info("Ran setup for %s", "rmsglobalconfig")
Expand Down Expand Up @@ -284,7 +284,6 @@ def fixture_globalconfig2() -> dict:
globvar = {}
with open(
ROOTPWD / "tests/data/drogon/global_config2/global_variables.yml",
"r",
encoding="utf-8",
) as stream:
globvar = yaml.safe_load(stream)
Expand Down Expand Up @@ -576,15 +575,15 @@ def fixture_drogon_volumes():

def _parse_json(schema_path):
"""Parse the schema, return JSON"""
with open(schema_path, "r", encoding="utf-8") as stream:
with open(schema_path, encoding="utf-8") as stream:
data = json.load(stream)

return data


def _parse_yaml(yaml_path):
"""Parse the filename as json, return data"""
with open(yaml_path, "r", encoding="utf-8") as stream:
with open(yaml_path, encoding="utf-8") as stream:
data = yaml.safe_load(stream)

data = _isoformat_all_datetimes(data)
Expand Down
4 changes: 2 additions & 2 deletions tests/test_units/test_dataio.py
Original file line number Diff line number Diff line change
Expand Up @@ -426,7 +426,7 @@ def test_norwegian_letters_globalconfig(globalvars_norw_letters, regsurf):
# export to file and reread as raw
result = pathlib.Path(edata.export(regsurf))
metafile = result.parent / ("." + str(result.stem) + ".gri.yml")
with open(metafile, "r", encoding="utf-8") as stream:
with open(metafile, encoding="utf-8") as stream:
stuff = stream.read()
assert "DRÅGØN" in stuff

Expand All @@ -453,7 +453,7 @@ def test_norwegian_letters_globalconfig_as_json(globalvars_norw_letters, regsurf

result = pathlib.Path(edata.export(regsurf))
metafile = result.parent / ("." + str(result.stem) + ".gri.json")
with open(metafile, "r", encoding="utf-8") as stream:
with open(metafile, encoding="utf-8") as stream:
stuff = stream.read()
assert "DRÅGØN" in stuff

Expand Down
6 changes: 3 additions & 3 deletions tests/test_units/test_dictionary.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,7 +32,7 @@ def _fixture_json(fmurun_w_casemetadata):
os.chdir(fmurun_w_casemetadata)
print(fmurun_w_casemetadata)
with open(
fmurun_w_casemetadata / "parameters.json", "r", encoding="utf-8"
fmurun_w_casemetadata / "parameters.json", encoding="utf-8"
) as stream:
json_dict = json.load(stream)
return json_dict
Expand Down Expand Up @@ -89,10 +89,10 @@ def read_dict_and_meta(path):
tuple: the dictionary produced with corresponding metadata
"""
result_dict = None
with open(path, "r", encoding="utf-8") as stream:
with open(path, encoding="utf-8") as stream:
result_dict = json.load(stream)
path = Path(path)
with open(path.parent / f".{path.name}.yml", "r", encoding="utf-8") as meta_stream:
with open(path.parent / f".{path.name}.yml", encoding="utf-8") as meta_stream:
meta = yaml.load(meta_stream, Loader=yaml.Loader)
return result_dict, meta

Expand Down
Loading

0 comments on commit 956dcc2

Please sign in to comment.