Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

fix(xpansion): catch Exception when no sensitvity folder in xpansion #1961

Merged
merged 6 commits into from
Mar 11, 2024
2 changes: 1 addition & 1 deletion antarest/study/business/xpansion_management.py
Original file line number Diff line number Diff line change
Expand Up @@ -365,7 +365,7 @@ def get_xpansion_settings(self, study: Study) -> GetXpansionSettings:
logger.info(f"Getting xpansion settings for study '{study.id}'")
file_study = self.study_storage_service.get_storage(study).get_raw(study)
config_obj = file_study.tree.get(["user", "expansion", "settings"])
with contextlib.suppress(KeyError):
with contextlib.suppress(ChildNotFoundError):
config_obj["sensitivity_config"] = file_study.tree.get(
["user", "expansion", "sensitivity", "sensitivity_in"]
)
Expand Down
2 changes: 1 addition & 1 deletion antarest/study/storage/rawstudy/ini_reader.py
Original file line number Diff line number Diff line change
Expand Up @@ -109,7 +109,7 @@ def read(self, path: t.Any) -> JSON:
sections = self._parse_ini_file(f)
except FileNotFoundError:
# If the file is missing, an empty dictionary is returned.
# This is required tp mimic the behavior of `configparser.ConfigParser`.
# This is required to mimic the behavior of `configparser.ConfigParser`.
return {}

elif hasattr(path, "read"):
Expand Down
46 changes: 20 additions & 26 deletions antarest/study/storage/rawstudy/model/filesystem/bucket_node.py
Original file line number Diff line number Diff line change
@@ -1,4 +1,4 @@
from typing import Any, Callable, Dict, List, Optional
import typing as t

from antarest.core.model import JSON, SUB_JSON
from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig
Expand All @@ -12,7 +12,7 @@ class RegisteredFile:
def __init__(
self,
key: str,
node: Optional[Callable[[ContextServer, FileStudyTreeConfig], INode[Any, Any, Any]]],
node: t.Optional[t.Callable[[ContextServer, FileStudyTreeConfig], INode[t.Any, t.Any, t.Any]]],
filename: str = "",
):
self.key = key
Expand All @@ -29,42 +29,36 @@ def __init__(
self,
context: ContextServer,
config: FileStudyTreeConfig,
registered_files: Optional[List[RegisteredFile]] = None,
default_file_node: Callable[..., INode[Any, Any, Any]] = RawFileNode,
registered_files: t.Optional[t.List[RegisteredFile]] = None,
default_file_node: t.Callable[..., INode[t.Any, t.Any, t.Any]] = RawFileNode,
):
super().__init__(context, config)
self.registered_files: List[RegisteredFile] = registered_files or []
self.default_file_node: Callable[..., INode[Any, Any, Any]] = default_file_node
self.registered_files: t.List[RegisteredFile] = registered_files or []
self.default_file_node: t.Callable[..., INode[t.Any, t.Any, t.Any]] = default_file_node

def _get_registered_file(self, key: str) -> Optional[RegisteredFile]:
for registered_file in self.registered_files:
if registered_file.key == key:
return registered_file
return None
def _get_registered_file_by_key(self, key: str) -> t.Optional[RegisteredFile]:
return next((rf for rf in self.registered_files if rf.key == key), None)

def _get_registered_file_from_filename(self, filename: str) -> Optional[RegisteredFile]:
for registered_file in self.registered_files:
if registered_file.filename == filename:
return registered_file
return None
def _get_registered_file_by_filename(self, filename: str) -> t.Optional[RegisteredFile]:
return next((rf for rf in self.registered_files if rf.filename == filename), None)

def save(
self,
data: SUB_JSON,
url: Optional[List[str]] = None,
url: t.Optional[t.List[str]] = None,
) -> None:
self._assert_not_in_zipped_file()
if not self.config.path.exists():
self.config.path.mkdir()

if url is None or len(url) == 0:
assert isinstance(data, Dict)
if not url:
assert isinstance(data, dict)
for key, value in data.items():
self._save(value, key)
else:
key = url[0]
if len(url) > 1:
registered_file = self._get_registered_file(key)
registered_file = self._get_registered_file_by_key(key)
if registered_file:
node = registered_file.node or self.default_file_node
node(self.context, self.config.next_file(key)).save(data, url[1:])
Expand All @@ -74,7 +68,7 @@ def save(
self._save(data, key)

def _save(self, data: SUB_JSON, key: str) -> None:
registered_file = self._get_registered_file(key)
registered_file = self._get_registered_file_by_key(key)
if registered_file:
node, filename = (
registered_file.node or self.default_file_node,
Expand All @@ -88,12 +82,12 @@ def _save(self, data: SUB_JSON, key: str) -> None:
BucketNode(self.context, self.config.next_file(key)).save(data)

def build(self) -> TREE:
if not self.config.path.exists():
return dict()
if not self.config.path.is_dir():
return {}

children: TREE = {}
for item in sorted(self.config.path.iterdir()):
registered_file = self._get_registered_file_from_filename(item.name)
registered_file = self._get_registered_file_by_filename(item.name)
if registered_file:
node = registered_file.node or self.default_file_node
children[registered_file.key] = node(self.context, self.config.next_file(item.name))
Expand All @@ -107,7 +101,7 @@ def build(self) -> TREE:
def check_errors(
self,
data: JSON,
url: Optional[List[str]] = None,
url: t.Optional[t.List[str]] = None,
raising: bool = False,
) -> List[str]:
) -> t.List[str]:
return []
32 changes: 16 additions & 16 deletions antarest/study/storage/rawstudy/model/filesystem/folder_node.py
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
import shutil
import typing as t
from abc import ABC, abstractmethod
from http import HTTPStatus
from typing import Dict, List, Optional, Tuple, Union

from fastapi import HTTPException

Expand Down Expand Up @@ -38,7 +38,7 @@ def __init__(
self,
context: ContextServer,
config: FileStudyTreeConfig,
children_glob_exceptions: Optional[List[str]] = None,
children_glob_exceptions: t.Optional[t.List[str]] = None,
) -> None:
super().__init__(config)
self.context = context
Expand All @@ -50,11 +50,11 @@ def build(self) -> TREE:

def _forward_get(
self,
url: List[str],
url: t.List[str],
depth: int = -1,
formatted: bool = True,
get_node: bool = False,
) -> Union[JSON, INode[JSON, SUB_JSON, JSON]]:
) -> t.Union[JSON, INode[JSON, SUB_JSON, JSON]]:
children = self.build()
names, sub_url = self.extract_child(children, url)

Expand Down Expand Up @@ -84,7 +84,7 @@ def _forward_get(

def _expand_get(
self, depth: int = -1, formatted: bool = True, get_node: bool = False
) -> Union[JSON, INode[JSON, SUB_JSON, JSON]]:
) -> t.Union[JSON, INode[JSON, SUB_JSON, JSON]]:
if get_node:
return self

Expand All @@ -99,19 +99,19 @@ def _expand_get(

def _get(
self,
url: Optional[List[str]] = None,
url: t.Optional[t.List[str]] = None,
depth: int = -1,
formatted: bool = True,
get_node: bool = False,
) -> Union[JSON, INode[JSON, SUB_JSON, JSON]]:
) -> t.Union[JSON, INode[JSON, SUB_JSON, JSON]]:
if url and url != [""]:
return self._forward_get(url, depth, formatted, get_node)
else:
return self._expand_get(depth, formatted, get_node)

def get(
self,
url: Optional[List[str]] = None,
url: t.Optional[t.List[str]] = None,
depth: int = -1,
expanded: bool = False,
formatted: bool = True,
Expand All @@ -122,7 +122,7 @@ def get(

def get_node(
self,
url: Optional[List[str]] = None,
url: t.Optional[t.List[str]] = None,
) -> INode[JSON, SUB_JSON, JSON]:
output = self._get(url=url, get_node=True)
assert isinstance(output, INode)
Expand All @@ -131,7 +131,7 @@ def get_node(
def save(
self,
data: SUB_JSON,
url: Optional[List[str]] = None,
url: t.Optional[t.List[str]] = None,
) -> None:
self._assert_not_in_zipped_file()
children = self.build()
Expand All @@ -146,7 +146,7 @@ def save(
for key in data:
children[key].save(data[key])

def delete(self, url: Optional[List[str]] = None) -> None:
def delete(self, url: t.Optional[t.List[str]] = None) -> None:
if url and url != [""]:
children = self.build()
names, sub_url = self.extract_child(children, url)
Expand All @@ -158,16 +158,16 @@ def delete(self, url: Optional[List[str]] = None) -> None:
def check_errors(
self,
data: JSON,
url: Optional[List[str]] = None,
url: t.Optional[t.List[str]] = None,
raising: bool = False,
) -> List[str]:
) -> t.List[str]:
children = self.build()

if url and url != [""]:
(name,), sub_url = self.extract_child(children, url)
return children[name].check_errors(data, sub_url, raising)
else:
errors: List[str] = []
errors: t.List[str] = []
for key in data:
if key not in children:
msg = f"key={key} not in {list(children.keys())} for {self.__class__.__name__}"
Expand All @@ -186,7 +186,7 @@ def denormalize(self) -> None:
for child in self.build().values():
child.denormalize()

def extract_child(self, children: TREE, url: List[str]) -> Tuple[List[str], List[str]]:
def extract_child(self, children: TREE, url: t.List[str]) -> t.Tuple[t.List[str], t.List[str]]:
names, sub_url = url[0].split(","), url[1:]
names = (
list(
Expand All @@ -208,6 +208,6 @@ def extract_child(self, children: TREE, url: List[str]) -> Tuple[List[str], List
for name in names:
if name not in children:
raise ChildNotFoundError(f"'{name}' not a child of {self.__class__.__name__}")
if type(children[name]) != child_class:
if not isinstance(children[name], child_class):
raise FilterError("Filter selection has different classes")
return names, sub_url
40 changes: 34 additions & 6 deletions antarest/study/storage/rawstudy/model/filesystem/json_file_node.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
import json
import typing as t
from pathlib import Path
from typing import Any, Dict, Optional, cast

from antarest.core.model import JSON
from antarest.study.storage.rawstudy.ini_reader import IReader
Expand All @@ -11,13 +11,41 @@


class JsonReader(IReader):
def read(self, path: Any) -> JSON:
if isinstance(path, Path):
return cast(JSON, json.loads(path.read_text(encoding="utf-8")))
return cast(JSON, json.loads(path))
"""
JSON file reader.
"""

def read(self, path: t.Any) -> JSON:
content: t.Union[str, bytes]

if isinstance(path, (Path, str)):
try:
with open(path, mode="r", encoding="utf-8") as f:
content = f.read()
except FileNotFoundError:
# If the file is missing, an empty dictionary is returned,
# to mimic the behavior of `configparser.ConfigParser`.
return {}

elif hasattr(path, "read"):
with path:
content = path.read()

else: # pragma: no cover
raise TypeError(repr(type(path)))

try:
return t.cast(JSON, json.loads(content))
except json.JSONDecodeError as exc:
err_msg = f"Failed to parse JSON file '{path}'"
raise ValueError(err_msg) from exc


class JsonWriter(IniWriter):
"""
JSON file writer.
"""

def write(self, data: JSON, path: Path) -> None:
with open(path, "w") as fh:
json.dump(data, fh)
Expand All @@ -28,6 +56,6 @@ def __init__(
self,
context: ContextServer,
config: FileStudyTreeConfig,
types: Optional[Dict[str, Any]] = None,
types: t.Optional[t.Dict[str, t.Any]] = None,
) -> None:
super().__init__(context, config, types, JsonReader(), JsonWriter())
Original file line number Diff line number Diff line change
Expand Up @@ -14,11 +14,7 @@

class Expansion(BucketNode):
registered_files = [
RegisteredFile(
key="candidates",
node=ExpansionCandidates,
filename="candidates.ini",
),
RegisteredFile(key="candidates", node=ExpansionCandidates, filename="candidates.ini"),
RegisteredFile(key="settings", node=ExpansionSettings, filename="settings.ini"),
RegisteredFile(key="capa", node=ExpansionMatrixResources),
RegisteredFile(key="weights", node=ExpansionMatrixResources),
Expand Down
Loading
Loading