Skip to content

Commit

Permalink
Black, mypy and pyupgrade --py3-plus
Browse files Browse the repository at this point in the history
  • Loading branch information
JB Lovland committed Jan 8, 2024
1 parent 975c21c commit 1a94336
Show file tree
Hide file tree
Showing 26 changed files with 391 additions and 318 deletions.
3 changes: 3 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
Expand Up @@ -96,3 +96,6 @@ venv.bak/

# setuptools_scm version
src/fmu/dataio/version.py

# mypy
.dmypy.json
9 changes: 4 additions & 5 deletions examples/s/d/nn/_project/aggregate_surfaces.py
Original file line number Diff line number Diff line change
@@ -1,12 +1,12 @@
"""Use fmu-dataio for aggregated surfaces created by an aggregation service."""

from pathlib import Path
import logging
from pathlib import Path

import yaml
import numpy as np

import xtgeo
import yaml

import fmu.dataio


Expand Down Expand Up @@ -67,7 +67,6 @@ def main():
)

for operation in operations:

print(f"Running aggregation: {operation}")

# Call the aggregation machine and create an aggregated surface
Expand Down Expand Up @@ -128,7 +127,7 @@ def _parse_yaml(fname):
dict
"""

with open(fname, "r") as stream:
with open(fname) as stream:
data = yaml.safe_load(stream)
return data

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -6,10 +6,12 @@
For the file case, CSV files are read from disk. The dataio function is the same.
"""
import pathlib

import pandas as pd
import fmu.dataio
from fmu.config import utilities as ut

import fmu.dataio

CFG = ut.yaml_load("../../fmuconfig/output/global_variables.yml")

IN_ROXAR = False
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -34,7 +34,6 @@ def export_faultlines():
)

for hname in HORISONNAMES:

# RMS version for reading polygons from a project:
# poly = xtgeo.polygons_from_roxar(project, hname, RMS_POL_CATEGORY)

Expand Down
8 changes: 8 additions & 0 deletions mypy.ini
Original file line number Diff line number Diff line change
@@ -1,4 +1,12 @@
[mypy]
disallow_untyped_defs = True
extra_checks = True
ignore_missing_imports = True
strict_equality = True
warn_redundant_casts = True
warn_unused_configs = True
warn_unused_ignores = True
exclude = ^((tests|docs|examples|bin)/|conftest.py?)

[mypy-numpy.*]
# Applies to Python 3.6:
Expand Down
27 changes: 14 additions & 13 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -30,12 +30,12 @@ classifiers = [
]
dynamic = ["version"]
dependencies = [
"xtgeo>=2.16",
"PyYAML",
"pyarrow",
"fmu-config>=1.1.0",
"pandas",
"numpy",
"pandas",
"pyarrow",
"PyYAML",
"xtgeo>=2.16",
]

[project.urls]
Expand All @@ -49,27 +49,28 @@ dev = [
"black",
"coverage>=4.1",
"flake8",
"hypothesis",
"isort",
"jsonschema",
"hypothesis",
"mypy",
"pydocstyle",
"pylint",
"pytest",
"pytest-cov",
"pydocstyle",
"pytest-runner",
"pytest-mock",
"termcolor",
"pytest-runner",
"pytest",
"rstcheck",
"termcolor",
"types-PyYAML",
]
docs = [
"pydocstyle",
"Sphinx<7",
"autoapi",
"sphinx-rtd-theme",
"pydocstyle",
"sphinx-autodoc-typehints<1.23",
"sphinxcontrib-apidoc",
"sphinx-rtd-theme",
"sphinx-togglebutton",
"Sphinx<7",
"sphinxcontrib-apidoc",
"urllib3<1.27",
]

Expand Down
1 change: 0 additions & 1 deletion src/fmu/dataio/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,6 @@
from fmu.dataio.dataio import AggregatedData # noqa # type: ignore
from fmu.dataio.dataio import ExportData # noqa # type: ignore
from fmu.dataio.dataio import InitializeCase # noqa # type: ignore

from fmu.dataio.dataio import read_metadata # noqa

try:
Expand Down
2 changes: 1 addition & 1 deletion src/fmu/dataio/_definitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -18,7 +18,7 @@ class _ValidFormats:
points: dict = field(default_factory=dict)
dictionary: dict = field(default_factory=dict)

def __post_init__(self):
def __post_init__(self) -> None:
self.surface = {"irap_binary": ".gri"}
self.grid = {"hdf": ".hdf", "roff": ".roff"}
self.cube = {"segy": ".segy"}
Expand Down
71 changes: 34 additions & 37 deletions src/fmu/dataio/_design_kw.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,66 +7,60 @@

# pylint: disable=logging-fstring-interpolation
import logging
import shlex
import re

import shlex
from typing import Any, Iterable

_STATUS_FILE_NAME = "DESIGN_KW.OK"

_logger = logging.getLogger(__name__)


def run(
template_file_name,
result_file_name,
log_level,
parameters_file_name="parameters.txt",
):
template_file_name: str,
result_file_name: str,
log_level: str,
parameters_file_name: str = "parameters.txt",
) -> None:
# Get all key, value pairs
# If FWL key is having multiple entries in the parameters file
# KeyError is raised. This will be logged, and no OK
# file is written

_logger.setLevel(log_level)

valid = True

with open(parameters_file_name) as parameters_file:
parameters = parameters_file.readlines()

key_vals = extract_key_value(parameters)

key_vals.update(rm_genkw_prefix(key_vals))

with open(template_file_name, "r") as template_file:
with open(template_file_name) as template_file:
template = template_file.readlines()

if valid:
with open(result_file_name, "w") as result_file:
for line in template:
if not is_comment(line):
for key, value in key_vals.items():
line = line.replace(f"<{key}>", str(value))
with open(result_file_name, "w") as result_file:
for line in template:
if not is_comment(line):
for key, value in key_vals.items():
line = line.replace(f"<{key}>", str(value))

if not all_matched(line, template_file_name, template):
valid = False
if not all_matched(line, template_file_name, template):
valid = False

result_file.write(line)
result_file.write(line)

if valid:
with open(_STATUS_FILE_NAME, "w") as status_file:
status_file.write("DESIGN_KW OK\n")
with open(_STATUS_FILE_NAME, "w") as status_file:
status_file.write("DESIGN_KW OK\n")


def all_matched(line, template_file_name, template):
def all_matched(line: str, template_file_name: str, template: list[str]) -> bool:
valid = True
for unmatched in unmatched_templates(line):
if is_perl(template_file_name, template):
_logger.warning( # pylint: disable=logging-fstring-interpolation
(
f"{unmatched} not found in design matrix, "
f"but this is probably a Perl file"
)
f"{unmatched} not found in design matrix, "
f"but this is probably a Perl file"
)
else:
_logger.error( # pylint: disable=logging-fstring-interpolation
Expand All @@ -76,25 +70,24 @@ def all_matched(line, template_file_name, template):
return valid


def is_perl(file_name, template):
return file_name.endswith(".pl") or template[0].find("perl") != -1
def is_perl(file_name: str, template: list[str]) -> bool:
return bool(file_name.endswith(".pl") or template[0].find("perl") != -1)


def unmatched_templates(line):
def unmatched_templates(line: str) -> list[str]:
bracketpattern = re.compile("<.+?>")
if bracketpattern.search(line):
return bracketpattern.findall(line)
else:
return []
return []


def is_comment(line):
def is_comment(line: str) -> bool:
ecl_comment_pattern = re.compile("^--")
std_comment_pattern = re.compile("^#")
return ecl_comment_pattern.search(line) or std_comment_pattern.search(line)
return bool(ecl_comment_pattern.search(line) or std_comment_pattern.search(line))


def extract_key_value(parameters):
def extract_key_value(parameters: Iterable[str]) -> dict[str, str]:
"""Parses a list of strings, looking for key-value pairs pr. line
separated by whitespace, into a dictionary.
Expand Down Expand Up @@ -132,7 +125,10 @@ def extract_key_value(parameters):
return res


def rm_genkw_prefix(paramsdict, ignoreprefixes="LOG10_"):
def rm_genkw_prefix(
paramsdict: dict[str, Any],
ignoreprefixes: str | list[str] | None = "LOG10_",
) -> dict[str, Any]:
"""Strip prefixes from keys in a dictionary.
Prefix is any string before a colon. No colon means no prefix.
Expand All @@ -156,7 +152,8 @@ def rm_genkw_prefix(paramsdict, ignoreprefixes="LOG10_"):
ignoreprefixes = []
if isinstance(ignoreprefixes, str):
ignoreprefixes = [ignoreprefixes]
ignoreprefixes = filter(None, ignoreprefixes)

ignoreprefixes = list(filter(None, ignoreprefixes))

for ignore_str in ignoreprefixes:
paramsdict = {
Expand Down
25 changes: 14 additions & 11 deletions src/fmu/dataio/_filedata_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,6 +3,7 @@
Populate and verify stuff in the 'file' block in fmu (partial excpetion is checksum_md5
as this is convinient to populate later, on demand)
"""
from __future__ import annotations

import logging
from copy import deepcopy
Expand Down Expand Up @@ -41,7 +42,7 @@ class _FileDataProvider:
absolute_path_symlink: Optional[str] = field(default="", init=False)
checksum_md5: Optional[str] = field(default="", init=False)

def __post_init__(self):
def __post_init__(self) -> None:
logger.setLevel(level=self.verbosity)

if self.dataio.name:
Expand All @@ -63,10 +64,11 @@ def __post_init__(self):

self.fmu_context = self.dataio._usecontext # may be None!

logger.info("Initialize %s", __class__)
logger.info("Initialize %s", self.__class__)

def derive_filedata(self):
def derive_filedata(self) -> None:
relpath, symrelpath = self._get_path()
assert relpath is not None
relative, absolute = self._derive_filedata_generic(relpath)
self.relative_path = relative
self.absolute_path = absolute
Expand All @@ -78,7 +80,7 @@ def derive_filedata(self):

logger.info("Derived filedata")

def _derive_filedata_generic(self, inrelpath):
def _derive_filedata_generic(self, inrelpath: Path) -> tuple[str, str]:
"""This works with both normal data and symlinks."""
stem = self._get_filestem()

Expand Down Expand Up @@ -116,7 +118,7 @@ def _derive_filedata_generic(self, inrelpath):
logger.info("Derived filedata")
return str(relpath), str(abspath)

def _get_filestem(self):
def _get_filestem(self) -> str:
"""Construct the file"""

if not self.name:
Expand Down Expand Up @@ -153,15 +155,15 @@ def _get_filestem(self):
stem = stem.replace("__", "_")

# treat norwegian special letters
# BUG(?): What about germen letter like "Ü"?
stem = stem.replace("æ", "ae")
stem = stem.replace("ø", "oe")
stem = stem.replace("å", "aa")

return stem

def _get_path(self):
def _get_path(self) -> tuple[Path, Path | None]:
"""Construct and get the folder path(s)."""
dest = None
linkdest = None

dest = self._get_path_generic(mode=self.fmu_context, allow_forcefolder=True)
Expand All @@ -173,7 +175,9 @@ def _get_path(self):

return dest, linkdest

def _get_path_generic(self, mode="realization", allow_forcefolder=True, info=""):
def _get_path_generic(
self, mode: str = "realization", allow_forcefolder: bool = True, info: str = ""
) -> Path:
"""Generically construct and get the folder path and verify."""
dest = None

Expand Down Expand Up @@ -215,8 +219,7 @@ def _get_path_generic(self, mode="realization", allow_forcefolder=True, info="")
warn("Using absolute paths in forcefolder is not recommended!")

# absolute if starts with "/", otherwise relative to outroot
dest = Path(self.dataio.forcefolder)
dest = dest.absolute()
dest = Path(self.dataio.forcefolder).absolute()
self.forcefolder_is_absolute = True

if not allow_forcefolder:
Expand All @@ -232,6 +235,6 @@ def _get_path_generic(self, mode="realization", allow_forcefolder=True, info="")

# check that destination actually exists if verifyfolder is True
if self.dataio.verifyfolder and not dest.exists():
raise IOError(f"Folder {str(dest)} is not present.")
raise OSError(f"Folder {str(dest)} is not present.")

return dest
Loading

0 comments on commit 1a94336

Please sign in to comment.