Skip to content

Commit

Permalink
Review comments
Browse files Browse the repository at this point in the history
  • Loading branch information
JB Lovland committed Jan 10, 2024
1 parent 2e1eb9b commit 66e91ce
Show file tree
Hide file tree
Showing 12 changed files with 48 additions and 56 deletions.
Binary file added .DS_Store
Binary file not shown.
4 changes: 1 addition & 3 deletions .github/workflows/ci-fmudataio.yml
Original file line number Diff line number Diff line change
Expand Up @@ -18,11 +18,9 @@ jobs:

steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Set up Python
uses: actions/setup-python@v2
uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python-version }}

Expand Down
2 changes: 0 additions & 2 deletions .github/workflows/fmudataio-documention.yml
Original file line number Diff line number Diff line change
Expand Up @@ -20,8 +20,6 @@ jobs:

steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Set up Python
uses: actions/setup-python@v4
Expand Down
6 changes: 4 additions & 2 deletions .github/workflows/fmudataio-publish-pypi.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,16 +10,18 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Set up Python 3.10
uses: actions/setup-python@v4
with:
python-version: "3.10"

- name: Install pypa/build
run: python -m pip install build twine

- name: Build package
run: python -m build . --sdist --wheel --outdir dist/

- name: Upload deploy
env:
TWINE_USERNAME: __token__
Expand Down
2 changes: 0 additions & 2 deletions .github/workflows/linting.yml
Original file line number Diff line number Diff line change
Expand Up @@ -10,8 +10,6 @@ jobs:
python-version: ["3.10"]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Set up python
uses: actions/setup-python@v4
- name: Install dev-env.
Expand Down
14 changes: 4 additions & 10 deletions .github/workflows/mypy.yml
Original file line number Diff line number Diff line change
Expand Up @@ -7,23 +7,17 @@ jobs:
runs-on: ubuntu-latest
strategy:
matrix:
python-version: ["3.10"]
python-version: ["3.8", "3.10"]
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0

- name: Set up python
uses: actions/setup-python@v4
- name: Cache pip
uses: actions/cache@v3
with:
path: ~/.cache/pip
key: ${{ runner.os }}-pip-${{ hashFiles('**/pyproject.toml') }}
restore-keys: |
${{ runner.os }}-pip-

- name: Install dev-env.
run: |
pip install -U pip
pip install ".[dev]"
- name: Mypy
run: mypy .
10 changes: 5 additions & 5 deletions src/fmu/dataio/_definitions.py
Original file line number Diff line number Diff line change
Expand Up @@ -59,7 +59,7 @@ class _ValidFormats:
)


ALLOWED_CONTENTS = {
ALLOWED_CONTENTS: Final = {
"depth": None,
"time": None,
"thickness": None,
Expand Down Expand Up @@ -92,14 +92,14 @@ class _ValidFormats:
"transmissibilities": None,
}

STANDARD_TABLE_INDEX_COLUMNS = {
STANDARD_TABLE_INDEX_COLUMNS: Final = {
"inplace_volumes": ["ZONE", "REGION", "FACIES", "LICENCE"],
"timeseries": ["DATE"], # summary
"rft": ["measured_depth", "well", "time"],
"wellpicks": ["WELL", "HORIZON"],
}

DEPRECATED_CONTENTS = {
DEPRECATED_CONTENTS: Final = {
"seismic": {
"offset": {
"replaced_by": "stacking_offset",
Expand All @@ -109,15 +109,15 @@ class _ValidFormats:

# This setting will set if subkeys is required or not. If not found in list then
# assume False.
CONTENTS_REQUIRED = {
CONTENTS_REQUIRED: Final = {
"fluid_contact": {"contact": True},
"field_outline": {"contact": False},
"field_region": {"id": True},
}

# This setting sets the FMU context for the output. If detected as a non-fmu run,
# the code will internally set actual_context=None
ALLOWED_FMU_CONTEXTS = {
ALLOWED_FMU_CONTEXTS: Final = {
"realization": "To realization-N/iter_M/share",
"case": "To casename/share, but will also work on project disk",
"case_symlink_realization": "To case/share, with symlinks on realizations level",
Expand Down
9 changes: 5 additions & 4 deletions src/fmu/dataio/_design_kw.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ def run(
# If FWL key is having multiple entries in the parameters file
# KeyError is raised. This will be logged, and no OK
# file is written

_logger.setLevel(log_level)

with open(parameters_file_name) as parameters_file:
Expand All @@ -40,19 +39,21 @@ def run(
with open(template_file_name) as template_file:
template = template_file.readlines()

valid = True
with open(result_file_name, "w") as result_file:
for line in template:
if not is_comment(line):
for key, value in key_vals.items():
line = line.replace(f"<{key}>", str(value))

if not all_matched(line, template_file_name, template):
pass
valid = False

result_file.write(line)

with open(_STATUS_FILE_NAME, "w") as status_file:
status_file.write("DESIGN_KW OK\n")
if valid:
with open(_STATUS_FILE_NAME, "w") as status_file:
status_file.write("DESIGN_KW OK\n")


def all_matched(line: str, template_file_name: str, template: list[str]) -> bool:
Expand Down
1 change: 0 additions & 1 deletion src/fmu/dataio/_filedata_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -68,7 +68,6 @@ def __post_init__(self) -> None:

def derive_filedata(self) -> None:
relpath, symrelpath = self._get_path()
assert relpath is not None
relative, absolute = self._derive_filedata_generic(relpath)
self.relative_path = relative
self.absolute_path = absolute
Expand Down
40 changes: 20 additions & 20 deletions src/fmu/dataio/_objectdata_provider.py
Original file line number Diff line number Diff line change
Expand Up @@ -89,7 +89,7 @@
from dataclasses import dataclass, field
from datetime import datetime as dt
from pathlib import Path
from typing import Any, Dict, Final, Optional
from typing import Any, Final, Optional
from warnings import warn

import numpy as np
Expand Down Expand Up @@ -159,7 +159,7 @@ def _derive_name_stratigraphy(self) -> dict:
"""
logger.info("Evaluate data:name attribute and stratigraphy")
result: Dict[str, Any] = {}
result: dict[str, Any] = {}

name = self.dataio.name

Expand Down Expand Up @@ -209,7 +209,7 @@ def _validate_get_ext(
def _derive_objectdata(self) -> dict:
"""Derive object spesific data."""
logger.info("Evaluate data settings for object")
result: Dict[str, Any] = {}
result: dict[str, Any] = {}

if isinstance(self.obj, xtgeo.RegularSurface):
result["subtype"] = "RegularSurface"
Expand Down Expand Up @@ -393,14 +393,14 @@ def _derive_spec_bbox_polygons(self) -> tuple[dict, dict]:
bbox["zmax"] = float(zmax)
return specs, bbox

def _derive_spec_bbox_points(self) -> tuple[Dict[str, Any], Dict[str, Any]]:
def _derive_spec_bbox_points(self) -> tuple[dict[str, Any], dict[str, Any]]:
"""Process/collect the data.spec and data.bbox for Points"""
logger.info("Derive bbox and specs for Points")
pnts = self.obj

specs: Dict[str, Any] = {}
specs: dict[str, Any] = {}

bbox: Dict[str, Any] = {}
bbox: dict[str, Any] = {}

if len(pnts.dataframe.columns) > 3:
attrnames = pnts.dataframe.columns[3:]
Expand Down Expand Up @@ -482,8 +482,8 @@ def _derive_spec_bbox_cpgridproperty(self) -> tuple[dict, dict]:
logger.info("Derive bbox and specs for GridProperty")
gridprop = self.obj

specs: Dict[str, Any] = {}
bbox: Dict[str, Any] = {}
specs: dict[str, Any] = {}
bbox: dict[str, Any] = {}

specs["ncol"] = gridprop.ncol
specs["nrow"] = gridprop.nrow
Expand All @@ -493,15 +493,15 @@ def _derive_spec_bbox_cpgridproperty(self) -> tuple[dict, dict]:
def _derive_spec_bbox_dataframe(
self,
) -> tuple[
Dict[str, Any],
Dict[str, Any],
dict[str, Any],
dict[str, Any],
]:
"""Process/collect the data items for DataFrame."""
logger.info("Process data metadata for DataFrame (tables)")
dfr = self.obj

specs: Dict[str, Any] = {}
bbox: Dict[str, Any] = {}
specs: dict[str, Any] = {}
bbox: dict[str, Any] = {}

specs["columns"] = list(dfr.columns)
specs["size"] = int(dfr.size)
Expand All @@ -511,22 +511,22 @@ def _derive_spec_bbox_dataframe(
def _derive_spec_bbox_arrowtable(
self,
) -> tuple[
Dict[str, Any],
Dict[str, Any],
dict[str, Any],
dict[str, Any],
]:
"""Process/collect the data items for Arrow table."""
logger.info("Process data metadata for arrow (tables)")
table = self.obj

specs: Dict[str, Any] = {}
bbox: Dict[str, Any] = {}
specs: dict[str, Any] = {}
bbox: dict[str, Any] = {}

specs["columns"] = list(table.column_names)
specs["size"] = table.num_columns * table.num_rows

return specs, bbox

def _derive_spec_bbox_dict(self) -> tuple[Dict[str, Any], Dict[str, Any]]:
def _derive_spec_bbox_dict(self) -> tuple[dict[str, Any], dict[str, Any]]:
"""Process/collect the data items for dictionary."""
logger.info("Process data metadata for dictionary")
return {}, {}
Expand Down Expand Up @@ -591,12 +591,12 @@ def _derive_timedata(self) -> dict:
timedata = self._derive_timedata_newformat()
return timedata

def _derive_timedata_legacy(self) -> Dict[str, Any]:
def _derive_timedata_legacy(self) -> dict[str, Any]:
"""Format input timedata to metadata. legacy version."""
# TODO(JB): Covnert tresult to TypedDict or Dataclass.
tdata = self.dataio.timedata

tresult: Dict[str, Any] = {}
tresult: dict[str, Any] = {}
tresult["time"] = []
if len(tdata) == 1:
elem = tdata[0]
Expand Down Expand Up @@ -640,7 +640,7 @@ def _derive_timedata_newformat(self) -> dict[str, Any]:
set for those who wants it turned around).
"""
tdata = self.dataio.timedata
tresult: Dict[str, Any] = {}
tresult: dict[str, Any] = {}

if len(tdata) == 1:
elem = tdata[0]
Expand Down
7 changes: 5 additions & 2 deletions src/fmu/dataio/_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,7 +13,7 @@
from copy import deepcopy
from datetime import datetime
from pathlib import Path
from typing import Any, Final
from typing import Any, Final, Literal

import pandas as pd
import yaml
Expand Down Expand Up @@ -83,7 +83,10 @@ def drop_nones(dinput: dict) -> dict:


def export_metadata_file(
yfile: Path, metadata: dict, savefmt: str = "yaml", verbosity: str = "WARNING"
yfile: Path,
metadata: dict,
savefmt: Literal["yaml", "json"] = "yaml",
verbosity: str = "WARNING",
) -> None:
"""Export genericly and ordered to the complementary metadata file."""
logger.setLevel(level=verbosity)
Expand Down
9 changes: 4 additions & 5 deletions src/fmu/dataio/dataio.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
from copy import deepcopy
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any, ClassVar, Final, List, Optional, Union
from typing import Any, ClassVar, Final, List, Literal, Optional, Union
from warnings import warn

import pandas as pd
Expand Down Expand Up @@ -74,7 +74,6 @@ def _validate_variable(key: str, value: type, legals: dict[str, str | type]) ->
validcheck = valid_type

if "typing." not in str(validcheck):
print(f"{value=}, {validcheck=}, {type(value)=}, {type(validcheck)=}")
if not isinstance(value, validcheck):
logger.warning("Wrong type of value, raise an error")
raise ValidationError(
Expand Down Expand Up @@ -550,7 +549,7 @@ class ExportData:
grid_fformat: ClassVar[str] = "roff"
include_ert2jobs: ClassVar[bool] = False # if True, include jobs.json from ERT2
legacy_time_format: ClassVar[bool] = False
meta_format: ClassVar[str] = "yaml"
meta_format: ClassVar[Literal["yaml", "json"]] = "yaml"
polygons_fformat: ClassVar[str] = "csv" # or use "csv|xtgeo"
points_fformat: ClassVar[str] = "csv" # or use "csv|xtgeo"
surface_fformat: ClassVar[str] = "irap_binary"
Expand Down Expand Up @@ -976,7 +975,7 @@ class InitializeCase: # pylint: disable=too-few-public-methods
"""

# class variables
meta_format: ClassVar[str] = "yaml"
meta_format: ClassVar[Literal["yaml", "json"]] = "yaml"

# instance
config: dict
Expand Down Expand Up @@ -1209,7 +1208,7 @@ class AggregatedData:
"""

# class variable(s)
meta_format: ClassVar[str] = "yaml"
meta_format: ClassVar[Literal["yaml", "json"]] = "yaml"

# instance
aggregation_id: Optional[str] = None
Expand Down

0 comments on commit 66e91ce

Please sign in to comment.