diff --git a/antarest/__init__.py b/antarest/__init__.py index 75af56d75a..258ee329c3 100644 --- a/antarest/__init__.py +++ b/antarest/__init__.py @@ -7,9 +7,9 @@ # Standard project metadata -__version__ = "2.17.3" +__version__ = "2.17.4" __author__ = "RTE, Antares Web Team" -__date__ = "2024-07-18" +__date__ = "2024-07-29" # noinspection SpellCheckingInspection __credits__ = "(c) Réseau de Transport de l’Électricité (RTE)" diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py index 66859234fc..6fce8f0213 100644 --- a/antarest/core/exceptions.py +++ b/antarest/core/exceptions.py @@ -9,6 +9,12 @@ class ShouldNotHappenException(Exception): pass +class MustNotModifyOutputException(Exception): + def __init__(self, file_name: str) -> None: + msg = f"Should not modify output file {file_name}" + super().__init__(msg) + + # ============================================================ # Exceptions related to the study configuration (`.ini` files) # ============================================================ @@ -597,3 +603,8 @@ def __init__(self) -> None: HTTPStatus.BAD_REQUEST, "You cannot scan the default internal workspace", ) + + +class ChildNotFoundError(HTTPException): + def __init__(self, message: str) -> None: + super().__init__(HTTPStatus.NOT_FOUND, message) diff --git a/antarest/study/business/areas/properties_management.py b/antarest/study/business/areas/properties_management.py index 2014c554dc..0bccdad784 100644 --- a/antarest/study/business/areas/properties_management.py +++ b/antarest/study/business/areas/properties_management.py @@ -1,13 +1,13 @@ import re +import typing as t from builtins import sorted -from typing import Any, Dict, Iterable, List, Optional, Set, cast from pydantic import root_validator +from antarest.core.exceptions import ChildNotFoundError from antarest.study.business.utils import FieldInfo, FormFieldsBaseModel, execute_or_add_commands from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.config.area import AdequacyPatchMode -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig @@ -21,35 +21,35 @@ DEFAULT_FILTER_VALUE = FILTER_OPTIONS -def sort_filter_options(options: Iterable[str]) -> List[str]: +def sort_filter_options(options: t.Iterable[str]) -> t.List[str]: return sorted( options, key=lambda x: FILTER_OPTIONS.index(x), ) -def encode_filter(value: str) -> Set[str]: +def encode_filter(value: str) -> t.Set[str]: stripped = value.strip() return set(re.split(r"\s*,\s*", stripped) if stripped else []) -def decode_filter(encoded_value: Set[str], current_filter: Optional[str] = None) -> str: +def decode_filter(encoded_value: t.Set[str], current_filter: t.Optional[str] = None) -> str: return ", ".join(sort_filter_options(encoded_value)) class PropertiesFormFields(FormFieldsBaseModel): - energy_cost_unsupplied: Optional[float] - energy_cost_spilled: Optional[float] - non_dispatch_power: Optional[bool] - dispatch_hydro_power: Optional[bool] - other_dispatch_power: Optional[bool] - filter_synthesis: Optional[Set[str]] - filter_by_year: Optional[Set[str]] + energy_cost_unsupplied: t.Optional[float] + energy_cost_spilled: t.Optional[float] + non_dispatch_power: t.Optional[bool] + dispatch_hydro_power: t.Optional[bool] + other_dispatch_power: t.Optional[bool] + filter_synthesis: t.Optional[t.Set[str]] + filter_by_year: t.Optional[t.Set[str]] # version 830 - adequacy_patch_mode: Optional[AdequacyPatchMode] + adequacy_patch_mode: t.Optional[AdequacyPatchMode] @root_validator - def validation(cls, values: Dict[str, Any]) -> Dict[str, Any]: + def validation(cls, values: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]: filters = { "filter_synthesis": values.get("filter_synthesis"), "filter_by_year": values.get("filter_by_year"), @@ -63,7 +63,7 @@ def validation(cls, values: Dict[str, Any]) -> Dict[str, Any]: return values -FIELDS_INFO: Dict[str, FieldInfo] = { +FIELDS_INFO: t.Dict[str, FieldInfo] = { "energy_cost_unsupplied": { "path": THERMAL_PATH.format(field="unserverdenergycost"), "default_value": 0.0, @@ -116,9 +116,9 @@ def get_field_values( file_study = self.storage_service.get_storage(study).get_raw(study) study_ver = file_study.config.version - def get_value(field_info: FieldInfo) -> Any: - start_ver = cast(int, field_info.get("start_version", 0)) - end_ver = cast(int, field_info.get("end_version", study_ver)) + def get_value(field_info: FieldInfo) -> t.Any: + start_ver = t.cast(int, field_info.get("start_version", 0)) + end_ver = t.cast(int, field_info.get("end_version", study_ver)) is_in_version = start_ver <= study_ver <= end_ver if not is_in_version: return None @@ -139,7 +139,7 @@ def set_field_values( area_id: str, field_values: PropertiesFormFields, ) -> None: - commands: List[UpdateConfig] = [] + commands: t.List[UpdateConfig] = [] file_study = self.storage_service.get_storage(study).get_raw(study) context = self.storage_service.variant_study_service.command_factory.command_context diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index 373f8c3ea4..776f57a039 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -11,6 +11,7 @@ from antarest.core.exceptions import ( AreaNotFound, + ChildNotFoundError, DuplicateSTStorage, STStorageConfigNotFound, STStorageMatrixNotFound, @@ -29,7 +30,6 @@ create_st_storage_config, ) from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.create_st_storage import CreateSTStorage from antarest.study.storage.variantstudy.model.command.remove_st_storage import RemoveSTStorage diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index 7f42bb7f59..50220da54a 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -1,5 +1,4 @@ import collections -import itertools import json import logging import typing as t @@ -46,11 +45,11 @@ from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( DEFAULT_GROUP, EXPECTED_MATRIX_SHAPES, - TERM_MATRICES, BindingConstraintMatrices, BindingConstraintPropertiesBase, CreateBindingConstraint, OptionalProperties, + TermMatrices, ) from antarest.study.storage.variantstudy.model.command.remove_binding_constraint import RemoveBindingConstraint from antarest.study.storage.variantstudy.model.command.update_binding_constraint import UpdateBindingConstraint @@ -59,6 +58,14 @@ logger = logging.getLogger(__name__) +OPERATOR_CONFLICT_MAP = { + BindingConstraintOperator.EQUAL: [TermMatrices.LESS.value, TermMatrices.GREATER.value], + BindingConstraintOperator.GREATER: [TermMatrices.LESS.value, TermMatrices.EQUAL.value], + BindingConstraintOperator.LESS: [TermMatrices.EQUAL.value, TermMatrices.GREATER.value], + BindingConstraintOperator.BOTH: [TermMatrices.EQUAL.value], +} + + class LinkTerm(BaseModel): """ DTO for a constraint term on a link between two areas. @@ -246,7 +253,7 @@ class ConstraintCreation(ConstraintInput): @root_validator(pre=True) def check_matrices_dimensions(cls, values: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]: - for _key in ["time_step"] + TERM_MATRICES: + for _key in ["time_step"] + [m.value for m in TermMatrices]: _camel = to_camel_case(_key) values[_key] = values.pop(_camel, values.get(_key)) @@ -264,7 +271,7 @@ def check_matrices_dimensions(cls, values: t.Dict[str, t.Any]) -> t.Dict[str, t. # Collect the matrix shapes matrix_shapes = {} - for _field_name in ["values"] + TERM_MATRICES: + for _field_name in ["values"] + [m.value for m in TermMatrices]: if _matrix := values.get(_field_name): _array = np.array(_matrix) # We only store the shape if the array is not empty @@ -330,32 +337,35 @@ def _get_references_by_widths( The height of the matrices may vary depending on the time step, but the width should be consistent within a group of binding constraints. """ - if int(file_study.config.version) < 870: - matrix_id_fmts = {"{bc_id}"} - else: - matrix_id_fmts = {"{bc_id}_eq", "{bc_id}_lt", "{bc_id}_gt"} + operator_matrix_file_map = { + BindingConstraintOperator.EQUAL: ["{bc_id}_eq"], + BindingConstraintOperator.GREATER: ["{bc_id}_gt"], + BindingConstraintOperator.LESS: ["{bc_id}_lt"], + BindingConstraintOperator.BOTH: ["{bc_id}_lt", "{bc_id}_gt"], + } references_by_width: t.Dict[int, t.List[t.Tuple[str, str]]] = {} - _total = len(bcs) * len(matrix_id_fmts) - for _index, (bc, fmt) in enumerate(itertools.product(bcs, matrix_id_fmts), 1): - bc_id = bc.id - matrix_id = fmt.format(bc_id=bc.id) - logger.info(f"⏲ Validating BC '{bc_id}': {matrix_id=} [{_index}/{_total}]") - obj = file_study.tree.get(url=["input", "bindingconstraints", matrix_id]) - matrix = np.array(obj["data"], dtype=float) - # We ignore empty matrices as there are default matrices for the simulator. - if not matrix.size: - continue - - matrix_height = matrix.shape[0] - expected_height = EXPECTED_MATRIX_SHAPES[bc.time_step][0] - if matrix_height != expected_height: - raise WrongMatrixHeightError( - f"The binding constraint '{bc.name}' should have {expected_height} rows, currently: {matrix_height}" - ) - matrix_width = matrix.shape[1] - if matrix_width > 1: - references_by_width.setdefault(matrix_width, []).append((bc_id, matrix_id)) + _total = len(bcs) + for _index, bc in enumerate(bcs): + matrices_name = operator_matrix_file_map[bc.operator] if file_study.config.version >= 870 else ["{bc_id}"] + for matrix_name in matrices_name: + matrix_id = matrix_name.format(bc_id=bc.id) + logger.info(f"⏲ Validating BC '{bc.id}': {matrix_id=} [{_index+1}/{_total}]") + obj = file_study.tree.get(url=["input", "bindingconstraints", matrix_id]) + matrix = np.array(obj["data"], dtype=float) + # We ignore empty matrices as there are default matrices for the simulator. + if not matrix.size: + continue + + matrix_height = matrix.shape[0] + expected_height = EXPECTED_MATRIX_SHAPES[bc.time_step][0] + if matrix_height != expected_height: + raise WrongMatrixHeightError( + f"The binding constraint '{bc.name}' should have {expected_height} rows, currently: {matrix_height}" + ) + matrix_width = matrix.shape[1] + if matrix_width > 1: + references_by_width.setdefault(matrix_width, []).append((bc.id, matrix_id)) return references_by_width @@ -684,7 +694,8 @@ def create_binding_constraint( if bc_id in {bc.id for bc in self.get_binding_constraints(study)}: raise DuplicateConstraintName(f"A binding constraint with the same name already exists: {bc_id}.") - check_attributes_coherence(data, version) + # TODO: the default operator should be fixed somewhere so this condition can be consistent + check_attributes_coherence(data, version, data.operator or BindingConstraintOperator.EQUAL) new_constraint = {"name": data.name, **json.loads(data.json(exclude={"terms", "name"}, exclude_none=True))} args = { @@ -718,8 +729,9 @@ def update_binding_constraint( ) -> ConstraintOutput: file_study = self.storage_service.get_storage(study).get_raw(study) existing_constraint = self.get_binding_constraint(study, binding_constraint_id) + study_version = int(study.version) - check_attributes_coherence(data, study_version) + check_attributes_coherence(data, study_version, data.operator or existing_constraint.operator) upd_constraint = { "id": binding_constraint_id, @@ -740,7 +752,7 @@ def update_binding_constraint( # Validates the matrices. Needed when the study is a variant because we only append the command to the list if isinstance(study, VariantStudy): - updated_matrices = [term for term in TERM_MATRICES if getattr(data, term)] + updated_matrices = [term for term in [m.value for m in TermMatrices] if getattr(data, term)] time_step = data.time_step or existing_constraint.time_step command.validates_and_fills_matrices( time_step=time_step, specific_matrices=updated_matrices, version=study_version, create=False @@ -912,13 +924,17 @@ def _replace_matrices_according_to_frequency_and_version( BindingConstraintFrequency.DAILY.value: default_bc_weekly_daily_87, BindingConstraintFrequency.WEEKLY.value: default_bc_weekly_daily_87, }[data.time_step].tolist() - for term in TERM_MATRICES: + for term in [m.value for m in TermMatrices]: if term not in args: args[term] = matrix return args -def check_attributes_coherence(data: t.Union[ConstraintCreation, ConstraintInput], study_version: int) -> None: +def check_attributes_coherence( + data: t.Union[ConstraintCreation, ConstraintInput], + study_version: int, + operator: BindingConstraintOperator, +) -> None: if study_version < 870: if data.group: raise InvalidFieldForVersionError( @@ -928,3 +944,10 @@ def check_attributes_coherence(data: t.Union[ConstraintCreation, ConstraintInput raise InvalidFieldForVersionError("You cannot fill a 'matrix_term' as these values refer to v8.7+ studies") elif data.values: raise InvalidFieldForVersionError("You cannot fill 'values' as it refers to the matrix before v8.7") + conflicting_matrices = [ + getattr(data, matrix) for matrix in OPERATOR_CONFLICT_MAP[operator] if getattr(data, matrix) + ] + if conflicting_matrices: + raise InvalidFieldForVersionError( + f"You cannot fill matrices '{OPERATOR_CONFLICT_MAP[operator]}' while using the operator '{operator}'" + ) diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 65687af9c9..bc31683139 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -4,6 +4,7 @@ import numpy as np import pandas as pd +from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON from antarest.study.business.area_management import AreaManager, AreaOutput from antarest.study.business.areas.renewable_management import RenewableClusterInput, RenewableManager @@ -13,7 +14,6 @@ from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.link_management import LinkManager, LinkOutput from antarest.study.model import RawStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError _TableIndex = str # row name _TableColumn = str # column name diff --git a/antarest/study/business/xpansion_management.py b/antarest/study/business/xpansion_management.py index 22c612af9a..66d25860dd 100644 --- a/antarest/study/business/xpansion_management.py +++ b/antarest/study/business/xpansion_management.py @@ -9,14 +9,14 @@ from fastapi import HTTPException, UploadFile from pydantic import BaseModel, Extra, Field, ValidationError, root_validator, validator -from antarest.core.exceptions import BadZipBinary +from antarest.core.exceptions import BadZipBinary, ChildNotFoundError from antarest.core.model import JSON from antarest.study.business.all_optional_meta import AllOptionalMetaclass from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.bucket_node import BucketNode from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError, FolderNode +from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode from antarest.study.storage.rawstudy.model.filesystem.root.user.expansion.expansion import Expansion from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.utils import fix_study_root diff --git a/antarest/study/service.py b/antarest/study/service.py index 494e7d2f2f..5a2bfcda2a 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -21,6 +21,7 @@ from antarest.core.config import Config from antarest.core.exceptions import ( BadEditInstructionException, + ChildNotFoundError, CommandApplicationError, IncorrectPathError, NotAManagedStudyException, @@ -106,7 +107,6 @@ ) from antarest.study.storage.matrix_profile import adjust_matrix_columns_index from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfigDTO -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode from antarest.study.storage.rawstudy.model.filesystem.inode import INode from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix diff --git a/antarest/study/storage/rawstudy/model/filesystem/folder_node.py b/antarest/study/storage/rawstudy/model/filesystem/folder_node.py index 3ea51c098d..ba1d859ce3 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/folder_node.py +++ b/antarest/study/storage/rawstudy/model/filesystem/folder_node.py @@ -1,10 +1,8 @@ import shutil import typing as t from abc import ABC, abstractmethod -from http import HTTPStatus - -from fastapi import HTTPException +from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON, SUB_JSON from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer @@ -15,11 +13,6 @@ class FilterError(Exception): pass -class ChildNotFoundError(HTTPException): - def __init__(self, message: str) -> None: - super().__init__(HTTPStatus.NOT_FOUND, message) - - class FolderNode(INode[JSON, SUB_JSON, JSON], ABC): # noinspection SpellCheckingInspection """ diff --git a/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py b/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py index 98f0c74a40..7e47affbc9 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py +++ b/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py @@ -1,10 +1,12 @@ +import shutil +import typing as t from abc import ABC, abstractmethod from dataclasses import dataclass from datetime import datetime, timedelta from pathlib import Path -from typing import Any, Dict, Generic, List, Optional, Tuple, Union, cast from zipfile import ZipFile +from antarest.core.exceptions import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.inode import G, INode, S, V @@ -12,16 +14,16 @@ @dataclass class SimpleCache: - value: Any + value: t.Any expiration_date: datetime -class LazyNode(INode, ABC, Generic[G, S, V]): # type: ignore +class LazyNode(INode, ABC, t.Generic[G, S, V]): # type: ignore """ Abstract left with implemented a lazy loading for its daughter implementation. """ - ZIP_FILELIST_CACHE: Dict[str, SimpleCache] = {} + ZIP_FILELIST_CACHE: t.Dict[str, SimpleCache] = {} def __init__( self, @@ -33,7 +35,7 @@ def __init__( def _get_real_file_path( self, - ) -> Tuple[Path, Any]: + ) -> t.Tuple[Path, t.Any]: tmp_dir = None if self.config.zip_path: path, tmp_dir = self._extract_file_to_tmp_dir() @@ -58,12 +60,12 @@ def file_exists(self) -> bool: def _get( self, - url: Optional[List[str]] = None, + url: t.Optional[t.List[str]] = None, depth: int = -1, expanded: bool = False, formatted: bool = True, get_node: bool = False, - ) -> Union[Union[str, G], INode[G, S, V]]: + ) -> t.Union[t.Union[str, G], INode[G, S, V]]: self._assert_url_end(url) if get_node: @@ -74,7 +76,7 @@ def _get( if expanded: return link else: - return cast(G, self.context.resolver.resolve(link, formatted)) + return t.cast(G, self.context.resolver.resolve(link, formatted)) if expanded: return self.get_lazy_content() @@ -83,35 +85,51 @@ def _get( def get( self, - url: Optional[List[str]] = None, + url: t.Optional[t.List[str]] = None, depth: int = -1, expanded: bool = False, formatted: bool = True, - ) -> Union[str, G]: + ) -> t.Union[str, G]: output = self._get(url, depth, expanded, formatted, get_node=False) assert not isinstance(output, INode) return output def get_node( self, - url: Optional[List[str]] = None, + url: t.Optional[t.List[str]] = None, ) -> INode[G, S, V]: output = self._get(url, get_node=True) assert isinstance(output, INode) return output - def delete(self, url: Optional[List[str]] = None) -> None: + def delete(self, url: t.Optional[t.List[str]] = None) -> None: self._assert_url_end(url) if self.get_link_path().exists(): self.get_link_path().unlink() elif self.config.path.exists(): self.config.path.unlink() + def _infer_path(self) -> Path: + if self.get_link_path().exists(): + return self.get_link_path() + elif self.config.path.exists(): + return self.config.path + else: + raise ChildNotFoundError( + f"Neither link file {self.get_link_path} nor matrix file {self.config.path} exists" + ) + + def _infer_target_path(self, is_link: bool) -> Path: + if is_link: + return self.get_link_path() + else: + return self.config.path + def get_link_path(self) -> Path: path = self.config.path.parent / (self.config.path.name + ".link") return path - def save(self, data: Union[str, bytes, S], url: Optional[List[str]] = None) -> None: + def save(self, data: t.Union[str, bytes, S], url: t.Optional[t.List[str]] = None) -> None: self._assert_not_in_zipped_file() self._assert_url_end(url) @@ -121,14 +139,24 @@ def save(self, data: Union[str, bytes, S], url: Optional[List[str]] = None) -> N self.config.path.unlink() return None - self.dump(cast(S, data), url) + self.dump(t.cast(S, data), url) if self.get_link_path().exists(): self.get_link_path().unlink() return None + def rename_file(self, target: "LazyNode[t.Any, t.Any, t.Any]") -> None: + target_path = target._infer_target_path(self.get_link_path().exists()) + target_path.unlink(missing_ok=True) + self._infer_path().rename(target_path) + + def copy_file(self, target: "LazyNode[t.Any, t.Any, t.Any]") -> None: + target_path = target._infer_target_path(self.get_link_path().exists()) + target_path.unlink(missing_ok=True) + shutil.copy(self._infer_path(), target_path) + def get_lazy_content( self, - url: Optional[List[str]] = None, + url: t.Optional[t.List[str]] = None, depth: int = -1, expanded: bool = False, ) -> str: @@ -137,7 +165,7 @@ def get_lazy_content( @abstractmethod def load( self, - url: Optional[List[str]] = None, + url: t.Optional[t.List[str]] = None, depth: int = -1, expanded: bool = False, formatted: bool = True, @@ -148,7 +176,7 @@ def load( Args: url: data path to retrieve depth: after url is reached, node expand tree until matches depth asked - expanded: context parameter to determine if current node become from a expansion + expanded: context parameter to determine if current node comes from an expansion formatted: ask for raw file transformation Returns: @@ -157,7 +185,7 @@ def load( raise NotImplementedError() @abstractmethod - def dump(self, data: S, url: Optional[List[str]] = None) -> None: + def dump(self, data: S, url: t.Optional[t.List[str]] = None) -> None: """ Store data on tree. diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py index 4cda0b4027..a68b0f521e 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py +++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py @@ -7,11 +7,11 @@ from numpy import typing as npt from pandas.errors import EmptyDataError +from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON from antarest.core.utils.utils import StopWatch from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency, MatrixNode logger = logging.getLogger(__name__) diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py index dbc3c4385e..70317c6255 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py +++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py @@ -5,10 +5,10 @@ import pandas as pd from pandas import DataFrame +from antarest.core.exceptions import ChildNotFoundError, MustNotModifyOutputException from antarest.core.model import JSON from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode from antarest.study.storage.rawstudy.model.filesystem.matrix.date_serializer import ( FactoryDateSerializer, @@ -94,29 +94,6 @@ def parse( matrix = self.parse_dataframe(file_path, tmp_dir) return cast(JSON, matrix.to_dict(orient="split")) - def _dump_json(self, data: JSON) -> None: - df = pd.DataFrame(**data) - - headers = pd.DataFrame(df.columns.values.tolist()).T - matrix = pd.concat([headers, pd.DataFrame(df.values)], axis=0) - - time = self.date_serializer.build_date(df.index) - matrix.index = time.index - - matrix = pd.concat([time, matrix], axis=1) - - head = self.head_writer.build(var=df.columns.size, end=df.index.size) - with self.config.path.open(mode="w", newline="\n") as fd: - fd.write(head) - if not matrix.empty: - matrix.to_csv( - fd, - sep="\t", - header=False, - index=False, - float_format="%.6f", - ) - def check_errors( self, data: JSON, @@ -160,11 +137,7 @@ def load( ) from e def dump(self, data: Union[bytes, JSON], url: Optional[List[str]] = None) -> None: - if isinstance(data, bytes): - self.config.path.parent.mkdir(exist_ok=True, parents=True) - self.config.path.write_bytes(data) - else: - self._dump_json(data) + raise MustNotModifyOutputException(self.config.path.name) def normalize(self) -> None: pass # no external store in this node diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcall/grid.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcall/grid.py index 6778e09b7d..9a542e0c97 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcall/grid.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/mcall/grid.py @@ -1,6 +1,14 @@ +import typing as t + +import pandas as pd + +from antarest.core.exceptions import MustNotModifyOutputException +from antarest.core.model import JSON +from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig +from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode from antarest.study.storage.rawstudy.model.filesystem.inode import TREE -from antarest.study.storage.rawstudy.model.filesystem.raw_file_node import RawFileNode +from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode class OutputSimulationModeMcAllGrid(FolderNode): @@ -8,5 +16,93 @@ def build(self) -> TREE: files = [d.stem for d in self.config.path.iterdir()] children: TREE = {} for file in files: - children[file] = RawFileNode(self.context, self.config.next_file(f"{file}.txt")) + synthesis_class = DigestSynthesis if file == "digest" else OutputSynthesis + children[file] = synthesis_class(self.context, self.config.next_file(f"{file}.txt")) return children + + +class OutputSynthesis(LazyNode[JSON, bytes, bytes]): + def __init__(self, context: ContextServer, config: FileStudyTreeConfig): + super().__init__(context, config) + + def get_lazy_content( + self, + url: t.Optional[t.List[str]] = None, + depth: int = -1, + expanded: bool = False, + ) -> str: + return f"matrix://{self.config.path.name}" # prefix used by the front to parse the back-end response + + def load( + self, + url: t.Optional[t.List[str]] = None, + depth: int = -1, + expanded: bool = False, + formatted: bool = True, + ) -> JSON: + file_path = self.config.path + df = pd.read_csv(file_path, sep="\t") + df.fillna("", inplace=True) # replace NaN values for the front-end + output = df.to_dict(orient="split") + del output["index"] + return t.cast(JSON, output) + + def dump(self, data: bytes, url: t.Optional[t.List[str]] = None) -> None: + raise MustNotModifyOutputException(self.config.path.name) + + def check_errors(self, data: str, url: t.Optional[t.List[str]] = None, raising: bool = False) -> t.List[str]: + if not self.config.path.exists(): + msg = f"{self.config.path} not exist" + if raising: + raise ValueError(msg) + return [msg] + return [] + + def normalize(self) -> None: + pass # shouldn't be normalized as it's an output file + + def denormalize(self) -> None: + pass # shouldn't be denormalized as it's an output file + + +class DigestSynthesis(OutputSynthesis): + def __init__(self, context: ContextServer, config: FileStudyTreeConfig): + super().__init__(context, config) + + def load( + self, + url: t.Optional[t.List[str]] = None, + depth: int = -1, + expanded: bool = False, + formatted: bool = True, + ) -> JSON: + file_path = self.config.path + with open(file_path, "r") as f: + df = _parse_digest_file(f) + + df.fillna("", inplace=True) # replace NaN values for the front-end + output = df.to_dict(orient="split") + del output["index"] + return t.cast(JSON, output) + + +def _parse_digest_file(digest_file: t.TextIO) -> pd.DataFrame: + """ + Parse a digest file as a whole and return a single DataFrame. + + The `digest.txt` file is a TSV file containing synthetic results of the simulation. + This file contains several data tables, each being separated by empty lines + and preceded by a header describing the nature and dimensions of the table. + + Note that rows in the file may have different number of columns. + """ + + # Reads the file and find the maximum number of columns in any row + data = [row.split("\t") for row in digest_file.read().splitlines()] + max_cols = max(len(row) for row in data) + + # Adjust the number of columns in each row + data = [row + [""] * (max_cols - len(row)) for row in data] + + # Returns a DataFrame from the data (do not convert values to float) + return pd.DataFrame(data=data, columns=[str(i) for i in range(max_cols)], dtype=object) diff --git a/antarest/study/storage/study_download_utils.py b/antarest/study/storage/study_download_utils.py index 6ca846ca30..4c08b0c14b 100644 --- a/antarest/study/storage/study_download_utils.py +++ b/antarest/study/storage/study_download_utils.py @@ -13,6 +13,7 @@ from fastapi import HTTPException +from antarest.core.exceptions import ChildNotFoundError from antarest.study.model import ( ExportFormat, MatrixAggregationResult, @@ -24,7 +25,7 @@ ) from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, EnrModelling, FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError, FilterError, FolderNode +from antarest.study.storage.rawstudy.model.filesystem.folder_node import FilterError, FolderNode from antarest.study.storage.rawstudy.model.filesystem.inode import INode from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import OutputSeriesMatrix diff --git a/antarest/study/storage/variantstudy/business/command_reverter.py b/antarest/study/storage/variantstudy/business/command_reverter.py index 089589576f..c60cfad601 100644 --- a/antarest/study/storage/variantstudy/business/command_reverter.py +++ b/antarest/study/storage/variantstudy/business/command_reverter.py @@ -2,14 +2,14 @@ import typing as t from pathlib import Path +from antarest.core.exceptions import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.variantstudy.model.command.common import CommandName from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( - TERM_MATRICES, CreateBindingConstraint, + TermMatrices, ) from antarest.study.storage.variantstudy.model.command.create_cluster import CreateCluster from antarest.study.storage.variantstudy.model.command.create_district import CreateDistrict @@ -115,7 +115,7 @@ def _revert_update_binding_constraint( } matrix_service = command.command_context.matrix_service - for matrix_name in ["values"] + TERM_MATRICES: + for matrix_name in ["values"] + [m.value for m in TermMatrices]: matrix = getattr(command, matrix_name) if matrix is not None: args[matrix_name] = matrix_service.get_matrix_id(matrix) diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index ee9162241d..0e34b5f867 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -1,6 +1,7 @@ import json import typing as t from abc import ABCMeta +from enum import Enum import numpy as np from pydantic import BaseModel, Extra, Field, root_validator, validator @@ -23,7 +24,6 @@ from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand from antarest.study.storage.variantstudy.model.model import CommandDTO -TERM_MATRICES = ["less_term_matrix", "equal_term_matrix", "greater_term_matrix"] DEFAULT_GROUP = "default" MatrixType = t.List[t.List[MatrixData]] @@ -35,6 +35,12 @@ } +class TermMatrices(Enum): + LESS = "less_term_matrix" + GREATER = "greater_term_matrix" + EQUAL = "equal_term_matrix" + + def check_matrix_values(time_step: BindingConstraintFrequency, values: MatrixType, version: int) -> None: """ Check the binding constraint's matrix values for the specified time step. @@ -216,7 +222,7 @@ def to_dto(self) -> CommandDTO: args["group"] = self.group matrix_service = self.command_context.matrix_service - for matrix_name in TERM_MATRICES + ["values"]: + for matrix_name in [m.value for m in TermMatrices] + ["values"]: matrix_attr = getattr(self, matrix_name, None) if matrix_attr is not None: args[matrix_name] = matrix_service.get_matrix_id(matrix_attr) @@ -356,11 +362,16 @@ def apply_binding_constraint( if version < 870: study_data.tree.save(self.values, ["input", "bindingconstraints", bd_id]) - for matrix_term, matrix_name, matrix_alias in zip( - [self.less_term_matrix, self.equal_term_matrix, self.greater_term_matrix], - TERM_MATRICES, - ["lt", "eq", "gt"], - ): + operator_matrices_map = { + BindingConstraintOperator.EQUAL: [(self.equal_term_matrix, "eq")], + BindingConstraintOperator.GREATER: [(self.greater_term_matrix, "gt")], + BindingConstraintOperator.LESS: [(self.less_term_matrix, "lt")], + BindingConstraintOperator.BOTH: [(self.less_term_matrix, "lt"), (self.greater_term_matrix, "gt")], + } + + current_operator = self.operator or BindingConstraintOperator(binding_constraints[new_key]["operator"]) + + for matrix_term, matrix_alias in operator_matrices_map[current_operator]: if matrix_term: if not isinstance(matrix_term, str): # pragma: no cover raise TypeError(repr(matrix_term)) @@ -442,7 +453,7 @@ def _create_diff(self, other: "ICommand") -> t.List["ICommand"]: args[prop] = other_command[prop] matrix_service = self.command_context.matrix_service - for matrix_name in ["values"] + TERM_MATRICES: + for matrix_name in ["values"] + [m.value for m in TermMatrices]: self_matrix = getattr(self, matrix_name) # matrix, ID or `None` other_matrix = getattr(other, matrix_name) # matrix, ID or `None` self_matrix_id = None if self_matrix is None else matrix_service.get_matrix_id(self_matrix) diff --git a/antarest/study/storage/variantstudy/model/command/remove_area.py b/antarest/study/storage/variantstudy/model/command/remove_area.py index 39b1058a01..f39c8aac9c 100644 --- a/antarest/study/storage/variantstudy/model/command/remove_area.py +++ b/antarest/study/storage/variantstudy/model/command/remove_area.py @@ -1,11 +1,11 @@ import contextlib import logging -from typing import Any, Dict, List, Tuple +import typing as t +from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.variantstudy.business.utils_binding_constraint import ( remove_area_cluster_from_binding_constraints, ) @@ -44,7 +44,7 @@ def _remove_area_from_sets_in_config(self, study_data_config: FileStudyTreeConfi set_.areas.remove(self.id) study_data_config.sets[id_] = set_ - def _apply_config(self, study_data_config: FileStudyTreeConfig) -> Tuple[CommandOutput, Dict[str, Any]]: + def _apply_config(self, study_data_config: FileStudyTreeConfig) -> t.Tuple[CommandOutput, t.Dict[str, t.Any]]: del study_data_config.areas[self.id] self._remove_area_from_links_in_config(study_data_config) @@ -237,8 +237,10 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: study_data.tree.delete(["input", "hydro", "common", "capacity", f"waterValues_{self.id}"]) if study_data.config.version >= 810: - study_data.tree.delete(["input", "renewables", "clusters", self.id]) - study_data.tree.delete(["input", "renewables", "series", self.id]) + with contextlib.suppress(ChildNotFoundError): + # renewables folder only exist in tree if study.renewable-generation-modelling is "clusters" + study_data.tree.delete(["input", "renewables", "clusters", self.id]) + study_data.tree.delete(["input", "renewables", "series", self.id]) if study_data.config.version >= 860: study_data.tree.delete(["input", "st-storage", "clusters", self.id]) @@ -278,8 +280,8 @@ def match_signature(self) -> str: def match(self, other: ICommand, equal: bool = False) -> bool: return isinstance(other, RemoveArea) and self.id == other.id - def _create_diff(self, other: "ICommand") -> List["ICommand"]: + def _create_diff(self, other: "ICommand") -> t.List["ICommand"]: return [] - def get_inner_matrices(self) -> List[str]: + def get_inner_matrices(self) -> t.List[str]: return [] diff --git a/antarest/study/storage/variantstudy/model/command/replace_matrix.py b/antarest/study/storage/variantstudy/model/command/replace_matrix.py index 4b66584c39..6a51ca86b1 100644 --- a/antarest/study/storage/variantstudy/model/command/replace_matrix.py +++ b/antarest/study/storage/variantstudy/model/command/replace_matrix.py @@ -1,13 +1,13 @@ -from typing import Any, Dict, List, Tuple, Union +import typing as t from pydantic import validator +from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON from antarest.core.utils.utils import assert_this from antarest.matrixstore.model import MatrixData from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixNode from antarest.study.storage.variantstudy.business.utils import AliasDecoder, strip_matrix_protocol, validate_matrix from antarest.study.storage.variantstudy.model.command.common import CommandName, CommandOutput @@ -30,11 +30,11 @@ class ReplaceMatrix(ICommand): # ================== target: str - matrix: Union[List[List[MatrixData]], str] + matrix: t.Union[t.List[t.List[MatrixData]], str] _validate_matrix = validator("matrix", each_item=True, always=True, allow_reuse=True)(validate_matrix) - def _apply_config(self, study_data: FileStudyTreeConfig) -> Tuple[CommandOutput, Dict[str, Any]]: + def _apply_config(self, study_data: FileStudyTreeConfig) -> t.Tuple[CommandOutput, t.Dict[str, t.Any]]: return ( CommandOutput( status=True, @@ -93,9 +93,9 @@ def match(self, other: ICommand, equal: bool = False) -> bool: return self.target == other.target and self.matrix == other.matrix return self.target == other.target - def _create_diff(self, other: "ICommand") -> List["ICommand"]: + def _create_diff(self, other: "ICommand") -> t.List["ICommand"]: return [other] - def get_inner_matrices(self) -> List[str]: + def get_inner_matrices(self) -> t.List[str]: assert_this(isinstance(self.matrix, str)) return [strip_matrix_protocol(self.matrix)] diff --git a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py index 3f84ecd334..6c1d9bafae 100644 --- a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py @@ -1,22 +1,108 @@ import json -from typing import Any, Dict, List, Mapping, Optional, Tuple +import typing as t from antarest.core.model import JSON from antarest.matrixstore.model import MatrixData -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy +from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode from antarest.study.storage.variantstudy.model.command.common import CommandName, CommandOutput from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( DEFAULT_GROUP, - TERM_MATRICES, AbstractBindingConstraintCommand, + TermMatrices, create_binding_constraint_config, ) from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand from antarest.study.storage.variantstudy.model.model import CommandDTO -MatrixType = List[List[MatrixData]] +MatrixType = t.List[t.List[MatrixData]] + +ALIAS_OPERATOR_MAP = { + BindingConstraintOperator.EQUAL: "eq", + BindingConstraintOperator.LESS: "lt", + BindingConstraintOperator.GREATER: "gt", +} + + +def _update_matrices_names( + file_study: FileStudy, + binding_constraint_id: str, + existing_operator: BindingConstraintOperator, + new_operator: BindingConstraintOperator, +) -> None: + """ + Update the matrix file name according to the new operator. + + Args: + file_study: the file study + binding_constraint_id: the binding constraint ID + existing_operator: the existing operator + new_operator: the new operator + + Raises: + NotImplementedError: if the case is not handled + """ + + parent_folder_node = file_study.tree.get_node(["input", "bindingconstraints"]) + matrix_lt = parent_folder_node.get_node([f"{binding_constraint_id}_lt"]) + assert isinstance(matrix_lt, LazyNode), f"Node type not handled yet: LazyNode expected, got {type(matrix_lt)}" + matrix_eq = parent_folder_node.get_node([f"{binding_constraint_id}_eq"]) + assert isinstance(matrix_eq, LazyNode), f"Node type not handled yet: LazyNode expected, got {type(matrix_eq)}" + matrix_gt = parent_folder_node.get_node([f"{binding_constraint_id}_gt"]) + assert isinstance(matrix_gt, LazyNode), f"Node type not handled yet: LazyNode expected, got {type(matrix_gt)}" + + # Due to legacy matrices generation, we need to check if the new matrix file already exists + # and if it does, we need to first remove it before renaming the existing matrix file + + handled_operators = [ + BindingConstraintOperator.EQUAL, + BindingConstraintOperator.LESS, + BindingConstraintOperator.GREATER, + BindingConstraintOperator.BOTH, + ] + + if (existing_operator not in handled_operators) or (new_operator not in handled_operators): + raise NotImplementedError( + f"Case not handled yet: existing_operator={existing_operator}, new_operator={new_operator}" + ) + elif existing_operator == new_operator: + return # nothing to do + elif existing_operator != BindingConstraintOperator.BOTH and new_operator != BindingConstraintOperator.BOTH: + matrix_node = parent_folder_node.get_node([f"{binding_constraint_id}_{ALIAS_OPERATOR_MAP[existing_operator]}"]) + assert isinstance( + matrix_node, LazyNode + ), f"Node type not handled yet: LazyNode expected, got {type(matrix_node)}" + new_matrix_node = parent_folder_node.get_node([f"{binding_constraint_id}_{ALIAS_OPERATOR_MAP[new_operator]}"]) + assert isinstance( + new_matrix_node, LazyNode + ), f"Node type not handled yet: LazyNode expected, got {type(new_matrix_node)}" + matrix_node.rename_file(new_matrix_node) + elif new_operator == BindingConstraintOperator.BOTH: + if existing_operator == BindingConstraintOperator.EQUAL: + matrix_eq.rename_file(matrix_lt) + matrix_gt.delete() + # copy the matrix lt to gt + matrix_lt.copy_file(matrix_gt) + elif existing_operator == BindingConstraintOperator.LESS: + matrix_gt.delete() + matrix_lt.copy_file(matrix_gt) + else: + matrix_lt.delete() + matrix_gt.copy_file(matrix_lt) + else: + if new_operator == BindingConstraintOperator.EQUAL: + # we may retrieve the mean of the two matrices, but here we just copy the lt matrix + matrix_lt.rename_file(matrix_eq) + matrix_gt.delete() + elif new_operator == BindingConstraintOperator.LESS: + matrix_gt.delete() + else: + matrix_lt.delete() class UpdateBindingConstraint(AbstractBindingConstraintCommand): @@ -36,10 +122,10 @@ class UpdateBindingConstraint(AbstractBindingConstraintCommand): # Properties of the `UPDATE_BINDING_CONSTRAINT` command: id: str - def _apply_config(self, study_data: FileStudyTreeConfig) -> Tuple[CommandOutput, Dict[str, Any]]: + def _apply_config(self, study_data: FileStudyTreeConfig) -> t.Tuple[CommandOutput, t.Dict[str, t.Any]]: return CommandOutput(status=True), {} - def _find_binding_config(self, binding_constraints: Mapping[str, JSON]) -> Optional[Tuple[str, JSON]]: + def _find_binding_config(self, binding_constraints: t.Mapping[str, JSON]) -> t.Optional[t.Tuple[str, JSON]]: """ Find the binding constraint with the given ID in the list of binding constraints, and returns its index and configuration, or `None` if it does not exist. @@ -65,7 +151,15 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: index, actual_cfg = index_and_cfg - updated_matrices = [term for term in TERM_MATRICES if hasattr(self, term) and getattr(self, term)] + # rename matrices if the operator has changed for version >= 870 + if self.operator and study_data.config.version >= 870: + existing_operator = BindingConstraintOperator(actual_cfg.get("operator")) + new_operator = BindingConstraintOperator(self.operator) + _update_matrices_names(study_data, self.id, existing_operator, new_operator) + + updated_matrices = [ + term for term in [m.value for m in TermMatrices] if hasattr(self, term) and getattr(self, term) + ] study_version = study_data.config.version time_step = self.time_step or BindingConstraintFrequency(actual_cfg.get("type")) self.validates_and_fills_matrices( @@ -90,7 +184,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: return super().apply_binding_constraint(study_data, binding_constraints, index, self.id, old_groups=old_groups) def to_dto(self) -> CommandDTO: - matrices = ["values"] + TERM_MATRICES + matrices = ["values"] + [m.value for m in TermMatrices] matrix_service = self.command_context.matrix_service excluded_fields = frozenset(ICommand.__fields__) @@ -104,7 +198,7 @@ def to_dto(self) -> CommandDTO: def match_signature(self) -> str: return str(self.command_name.value + MATCH_SIGNATURE_SEPARATOR + self.id) - def _create_diff(self, other: "ICommand") -> List["ICommand"]: + def _create_diff(self, other: "ICommand") -> t.List["ICommand"]: return [other] def match(self, other: "ICommand", equal: bool = False) -> bool: diff --git a/docker-compose.yml b/docker-compose.yml index a18e3bc817..9c0f877db2 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -61,5 +61,5 @@ services: - 80:80 volumes: - ./resources/deploy/nginx.conf:/etc/nginx/conf.d/default.conf:ro - - ./webapp/build:/www - - ./resources/deploy/web.config.json:/www/config.json:ro \ No newline at end of file + - ./webapp/dist:/www + - ./resources/deploy/web.config.json:/www/config.json:ro diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md index 031fddd8b1..52bcb0774f 100644 --- a/docs/CHANGELOG.md +++ b/docs/CHANGELOG.md @@ -1,28 +1,46 @@ Antares Web Changelog ===================== +v2.17.4 (2024-07-29) +-------------------- + +### Features + +* **output:** enhance output synthesis view [`2088`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2088) +* **ui-study:** add button to display 'digest' file on successful tasks in task list [`2101`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2101) +* **ui-bc:** increases constraint terms field size [`2102`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2102) +* **bc:** avoid unnecessary creation of RHS matrices for binding constraints [`2077`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2077) +* **ui-results:** add button to display 'digest' file in result list [`2103`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2103) + +### Bug Fixes + +* **area:** allow removal when aggregated mode used [`2094`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2094) +* **ui-map:** prevent name field to overflow dialog box and add more space [`2102`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2102) + +**Full Changelog**: https://github.com/AntaresSimulatorTeam/AntaREST/compare/v2.17.3...v2.17.4 + v2.17.3 (2024-07-18) -------------------- ### Features -* **api**: do not allow areas, links or thermals deletion when referenced in a binding constraint [`2061`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2061) -* **outputs**: build outputs tree based on filesystem [`2064`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2064) -* **api-raw**: raise a 404 Not Found error when a resource is missing in the study [`2078`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2078) +* **api:** do not allow areas, links or thermals deletion when referenced in a binding constraint [`2061`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2061) +* **outputs:** build outputs tree based on filesystem [`2064`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2064) +* **api-raw:** raise a 404 Not Found error when a resource is missing in the study [`2078`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2078) ### Bug Fixes -* **ui-clusters**: improve cell number values accuracy by using rounding instead of truncating [`2087`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2087) -* **ui-commons**: prompt from Form displayed on dialog validation [`2089`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2089) +* **ui-clusters:** improve cell number values accuracy by using rounding instead of truncating [`2087`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2087) +* **ui-commons:** prompt from Form displayed on dialog validation [`2089`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2089) ### Continuous integration -* **workflows**: update Actions in GitHub workflows [`2080`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2080) +* **workflows:** update Actions in GitHub workflows [`2080`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2080) ### Documentation -* user-guide: updating Binding Constraints Commands documentation and metadata for search [`2082`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2082) -* user-guide: improve the user guide and add "How to Create a New Study?" topic [`2081`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2081) +* **user-guide:** updating Binding Constraints Commands documentation and metadata for search [`2082`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2082) +* **user-guide:** improve the user guide and add "How to Create a New Study?" topic [`2081`](https://github.com/AntaresSimulatorTeam/AntaREST/pull/2081) **Full Changelog**: https://github.com/AntaresSimulatorTeam/AntaREST/compare/v2.17.2...v.2.17.3 diff --git a/requirements.txt b/requirements.txt index 76c4f75c54..5a543c02fc 100644 --- a/requirements.txt +++ b/requirements.txt @@ -22,12 +22,14 @@ pydantic~=1.9.0 PyQt5~=5.15.6 python-json-logger~=2.0.7 python-multipart~=0.0.5 -PyYAML~=5.4.1 +PyYAML~=5.4.1; python_version <= '3.9' +PyYAML~=5.3.1; python_version > '3.9' redis~=4.1.2 requests~=2.27.1 SQLAlchemy~=1.4.46 starlette~=0.17.1 -tables==3.6.1 +tables==3.6.1; python_version <= '3.8' +tables==3.9.2; python_version > '3.8' typing_extensions~=4.7.1 uvicorn[standard]~=0.15.0 xlsxwriter~=3.2.0 diff --git a/resources/deploy/config.prod.yaml b/resources/deploy/config.prod.yaml index e69de29bb2..cf9087a2af 100644 --- a/resources/deploy/config.prod.yaml +++ b/resources/deploy/config.prod.yaml @@ -0,0 +1,87 @@ +security: + disabled: false + jwt: + key: secretkeytochange + login: + admin: + pwd: admin + external_auth: + url: "" + default_group_role: 10 + +db: + url: "postgresql://postgres:somepass@postgresql:5432/postgres" + admin_url: "postgresql://postgres:somepass@postgresql:5432/postgres" + pool_recycle: 3600 + +storage: + tmp_dir: /antarest_tmp_dir + archive_dir: /studies/archives + matrixstore: /matrixstore + matrix_gc_dry_run: true + workspaces: + default: # required, no filters applied, this folder is not watched + path: /workspaces/internal_studies/ + # other workspaces can be added + # if a directory is to be ignored by the watcher, place a file named AW_NO_SCAN inside + tmp: + path: /workspaces/studies/ + # filter_in: ['.*'] # default to '.*' + # filter_out: [] # default to empty + # groups: [] # default empty + +launcher: + default: local + + local: + binaries: + 800: /antares_simulator/antares-8.2-solver + enable_nb_cores_detection: true + +# slurm: +# local_workspace: path/to/workspace +# username: username +# hostname: 0.0.0.0 +# port: 22 +# private_key_file: path/to/key +# key_password: key_password +# password: password_is_optional_but_necessary_if_key_is_absent +# default_wait_time: 900 +# default_time_limit: 172800 +# enable_nb_cores_detection: False +# nb_cores: +# min: 1 +# default: 22 +# max: 24 +# default_json_db_name: launcher_db.json +# slurm_script_path: /path/to/launchantares_v1.1.3.sh +# db_primary_key: name +# antares_versions_on_remote_server : +# - "610" +# - "700" +# - "710" +# - "720" +# - "800" + + +debug: false + +root_path: "api" + +#tasks: +# max_workers: 5 +server: + worker_threadpool_size: 12 +# services: +# - watcher + +logging: + level: INFO +# logfile: /logs/antarest.log +# json: true + +# Uncomment these lines to use redis as a backend for the eventbus +# It is required to use redis when using this application on multiple workers in a preforked model like gunicorn for instance +redis: + host: redis + port: 6379 diff --git a/setup.py b/setup.py index 7e6da75281..4f1738d43f 100644 --- a/setup.py +++ b/setup.py @@ -6,7 +6,7 @@ setup( name="AntaREST", - version="2.17.3", + version="2.17.4", description="Antares Server", long_description=Path("README.md").read_text(encoding="utf-8"), long_description_content_type="text/markdown", diff --git a/sonar-project.properties b/sonar-project.properties index e7a18f5010..9d90f3cc19 100644 --- a/sonar-project.properties +++ b/sonar-project.properties @@ -6,5 +6,5 @@ sonar.exclusions=antarest/gui.py,antarest/main.py sonar.python.coverage.reportPaths=coverage.xml sonar.python.version=3.8 sonar.javascript.lcov.reportPaths=webapp/coverage/lcov.info -sonar.projectVersion=2.17.3 +sonar.projectVersion=2.17.4 sonar.coverage.exclusions=antarest/gui.py,antarest/main.py,antarest/singleton_services.py,antarest/worker/archive_worker_service.py,webapp/**/* \ No newline at end of file diff --git a/tests/integration/prepare_proxy.py b/tests/integration/prepare_proxy.py new file mode 100644 index 0000000000..0556df8a87 --- /dev/null +++ b/tests/integration/prepare_proxy.py @@ -0,0 +1,343 @@ +import io +import typing as t + +import pandas as pd +from starlette.testclient import TestClient + +from antarest.core.tasks.model import TaskStatus +from tests.integration.utils import wait_task_completion + + +class PreparerProxy: + """ + Proxy to prepare the test environment for integration tests + + Attributes: + client: The client to be used for requests. + user_access_token: The access token of the user. + headers: The headers to be used for requests. + """ + + def __init__(self, client: TestClient, user_access_token: str): + """ + Initialize the proxy. + + Args: + client: The client to be used for requests. + user_access_token: The access token of the user. + """ + self.client = client + self.user_access_token = user_access_token + self.headers = {"Authorization": f"Bearer {user_access_token}"} + + def create_study(self, name: str, *, version: int = 870) -> str: + """ + Create a new study in the managed workspace. + + Args: + name: The name of the study. + version: The version of the study. Defaults to 870. + + Returns: + The ID of the created study. + """ + res = self.client.post( + "/v1/studies", + params={"name": name, "version": version}, # type: ignore + headers=self.headers, + ) + assert res.status_code == 201, res.json() + return t.cast(str, res.json()) + + def copy_study_and_upgrade(self, ref_study_id: str, target_version: int) -> str: + """ + Copy a study in the managed workspace and upgrade it to a specific version. + + Args: + ref_study_id: The ID of the study to copy. + target_version: The version to upgrade the copied study to. Defaults to 820. + + Returns: + The ID of the copied and upgraded study. + """ + # Prepare a managed study to test specific matrices for version 8.2 + res = self.client.post( + f"/v1/studies/{ref_study_id}/copy", + params={"dest": "copied-820", "use_task": False}, # type: ignore + headers=self.headers, + ) + res.raise_for_status() + study_id = t.cast(str, res.json()) + + res = self.client.put( + f"/v1/studies/{study_id}/upgrade", + params={"target_version": target_version}, + headers=self.headers, + ) + res.raise_for_status() + task_id = res.json() + assert task_id + + task = wait_task_completion(self.client, self.user_access_token, task_id, timeout=20) + assert task.status == TaskStatus.COMPLETED + return study_id + + def upload_matrix(self, study_id: str, matrix_path: str, df: pd.DataFrame) -> None: + """ + Upload a matrix to the study. + + Args: + study_id: The ID of the study to upload the matrix to. + matrix_path: The path to the matrix in the study. + df: The data to upload. + """ + tsv = io.BytesIO() + df.to_csv(tsv, sep="\t", index=False, header=False) + tsv.seek(0) + # noinspection SpellCheckingInspection + res = self.client.put( + f"/v1/studies/{study_id}/raw", + params={"path": matrix_path, "create_missing": True}, # type: ignore + headers=self.headers, + files={"file": tsv, "create_missing": "true"}, # type: ignore + ) + res.raise_for_status() + + def download_matrix(self, study_id: str, matrix_path: str) -> pd.DataFrame: + """ + Download a matrix from the study. + + Args: + study_id: The ID of the study to download the matrix from. + matrix_path: The path to the matrix in the study. + + Returns: + pd.DataFrame: The downloaded data. + """ + res = self.client.get( + f"/v1/studies/{study_id}/raw", + params={"depth": 1, "formatted": True, "path": matrix_path}, # type: ignore + headers=self.headers, + ) + res.raise_for_status() + return pd.DataFrame(**res.json()) + + def create_variant(self, parent_id: str, *, name: str) -> str: + """ + Create a variant of a study. + + Args: + parent_id: The ID of the parent study. + name: The name of the variant. + + Returns: + str: The ID of the created variant. + """ + res = self.client.post( + f"/v1/studies/{parent_id}/variants", + headers=self.headers, + params={"name": name}, + ) + res.raise_for_status() + variant_id = t.cast(str, res.json()) + return variant_id + + def generate_snapshot(self, variant_id: str, denormalize: bool = False, from_scratch: bool = True) -> None: + """ + Generate a snapshot for a variant. + + Args: + variant_id: The ID of the variant study. + denormalize: Whether to denormalize the snapshot (replace the matrix links by the actual data). + from_scratch: Whether to generate the snapshot from scratch (recompute the data). + """ + # Generate a snapshot for the variant + res = self.client.put( + f"/v1/studies/{variant_id}/generate", + headers=self.headers, + params={"denormalize": denormalize, "from_scratch": from_scratch}, + ) + res.raise_for_status() + task_id = res.json() + assert task_id + + task = wait_task_completion(self.client, self.user_access_token, task_id, timeout=20) + assert task.status == TaskStatus.COMPLETED + + def create_area(self, study_id: str, *, name: str, country: str = "FR") -> t.Dict[str, t.Any]: + """ + Create an area in a study. + + Args: + study_id: The ID of the parent study. + name: Name of the area. + country: Country of the area. + + Returns: + The area properties. + """ + res = self.client.post( + f"/v1/studies/{study_id}/areas", + headers=self.headers, + json={"name": name, "type": "AREA", "metadata": {"country": country}}, + ) + res.raise_for_status() + properties = t.cast(t.Dict[str, t.Any], res.json()) + return properties + + def update_general_data(self, study_id: str, **data: t.Any) -> None: + """ + Update the general data of a study. + + Args: + study_id: The ID of the study. + **data: The data to update. + """ + res = self.client.put( + f"/v1/studies/{study_id}/config/general/form", + json=data, + headers=self.headers, + ) + res.raise_for_status() + + def create_link(self, study_id: str, area1_id: str, area2_id: str) -> t.Dict[str, t.Any]: + """ + Create a link between two areas in a study. + + Args: + study_id: The ID of the study. + area1_id: The ID of the first area. + area2_id: The ID of the second area. + + Returns: + The link properties. + """ + # Create a link between the two areas + res = self.client.post( + f"/v1/studies/{study_id}/links", + headers=self.headers, + json={"area1": area1_id, "area2": area2_id}, + ) + assert res.status_code == 200, res.json() + properties = t.cast(t.Dict[str, t.Any], res.json()) + properties["id"] = f"{area1_id}%{area2_id}" + return properties + + def create_thermal(self, study_id: str, area1_id: str, *, name: str, **kwargs: t.Any) -> t.Dict[str, t.Any]: + """ + Create a thermal cluster in an area. + + Args: + study_id: The ID of the study. + area1_id: The ID of the area. + name: The name of the cluster. + **kwargs: Additional cluster data. + + Returns: + The cluster properties. + """ + res = self.client.post( + f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", + headers=self.headers, + json={"name": name, **kwargs}, + ) + res.raise_for_status() + properties = t.cast(t.Dict[str, t.Any], res.json()) + return properties + + def get_thermals(self, study_id: str, area1_id: str) -> t.List[t.Dict[str, t.Any]]: + """ + Get the thermal clusters of an area in a study. + + Args: + study_id: The ID of the study. + area1_id: The ID of the area. + + Returns: + The list of cluster properties. + """ + res = self.client.get(f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", headers=self.headers) + res.raise_for_status() + clusters_list = t.cast(t.List[t.Dict[str, t.Any]], res.json()) + return clusters_list + + def create_renewable(self, study_id: str, area1_id: str, *, name: str, **kwargs: t.Any) -> str: + """ + Create a renewable cluster in an area. + + Args: + study_id: The ID of the study. + area1_id: The ID of the area. + name: The name of the cluster. + **kwargs: Additional cluster data. + """ + res = self.client.post( + f"/v1/studies/{study_id}/areas/{area1_id}/clusters/renewable", + headers=self.headers, + json={"name": name, **kwargs}, + ) + res.raise_for_status() + cluster_id = t.cast(str, res.json()["id"]) + return cluster_id + + def get_renewables(self, study_id: str, area1_id: str) -> t.List[t.Dict[str, t.Any]]: + """ + Get the renewable clusters of an area in a study. + + Args: + study_id: The ID of the study. + area1_id: The ID of the area. + + Returns: + The list of cluster properties. + """ + res = self.client.get(f"/v1/studies/{study_id}/areas/{area1_id}/clusters/renewable", headers=self.headers) + res.raise_for_status() + clusters_list = t.cast(t.List[t.Dict[str, t.Any]], res.json()) + return clusters_list + + def create_binding_constraint(self, study_id: str, *, name: str, **kwargs: t.Any) -> t.Dict[str, t.Any]: + """ + Create a binding constraint in a study. + + Args: + study_id: The ID of the study. + name: The name of the constraint. + **kwargs: Additional constraint data. + + Returns: + The binding constraint properties. + """ + res = self.client.post( + f"/v1/studies/{study_id}/bindingconstraints", + headers=self.headers, + json={"name": name, **kwargs}, + ) + res.raise_for_status() + properties = t.cast(t.Dict[str, t.Any], res.json()) + return properties + + def get_binding_constraints(self, study_id: str) -> t.List[t.Dict[str, t.Any]]: + """ + Get the binding constraints of a study. + + Args: + study_id: The ID of the study. + + Returns: + The list of constraint properties. + """ + res = self.client.get(f"/v1/studies/{study_id}/bindingconstraints", headers=self.headers) + res.raise_for_status() + binding_constraints_list = t.cast(t.List[t.Dict[str, t.Any]], res.json()) + return binding_constraints_list + + def drop_all_commands(self, variant_id: str) -> None: + """ + Drop all commands of a variant. + + Args: + variant_id: The ID of the variant. + """ + res = self.client.delete(f"/v1/studies/{variant_id}/commands", headers=self.headers) + res.raise_for_status() diff --git a/tests/integration/raw_studies_blueprint/test_download_matrices.py b/tests/integration/raw_studies_blueprint/test_download_matrices.py index 0f4e764089..1eaab62cd6 100644 --- a/tests/integration/raw_studies_blueprint/test_download_matrices.py +++ b/tests/integration/raw_studies_blueprint/test_download_matrices.py @@ -101,6 +101,9 @@ def update_general_data(self, internal_study_id: str, **data: t.Any): res.raise_for_status() +from tests.integration.prepare_proxy import PreparerProxy + + @pytest.mark.integration_test class TestDownloadMatrices: """ @@ -116,13 +119,13 @@ def test_download_matrices(self, client: TestClient, user_access_token: str, int preparer = PreparerProxy(client, user_access_token) - study_820_id = preparer.copy_upgrade_study(internal_study_id, target_version=820) + study_820_id = preparer.copy_study_and_upgrade(internal_study_id, target_version=820) # Create Variant variant_id = preparer.create_variant(study_820_id, name="New Variant") # Create a new area to implicitly create normalized matrices - area_id = preparer.create_area(variant_id, name="Mayenne", country="France") + area_id = preparer.create_area(variant_id, name="Mayenne", country="France")["id"] # Change study start_date preparer.update_general_data(variant_id, firstMonth="July") @@ -131,7 +134,7 @@ def test_download_matrices(self, client: TestClient, user_access_token: str, int preparer.generate_snapshot(variant_id) # Prepare a managed study to test specific matrices for version 8.6 - study_860_id = preparer.copy_upgrade_study(internal_study_id, target_version=860) + study_860_id = preparer.copy_study_and_upgrade(internal_study_id, target_version=860) # Import a Min Gen. matrix: shape=(8760, 3), with random integers between 0 and 1000 generator = np.random.default_rng(11) diff --git a/tests/integration/study_data_blueprint/test_binding_constraints.py b/tests/integration/study_data_blueprint/test_binding_constraints.py index 7e3c613e16..aba3d397ac 100644 --- a/tests/integration/study_data_blueprint/test_binding_constraints.py +++ b/tests/integration/study_data_blueprint/test_binding_constraints.py @@ -1,12 +1,23 @@ -import io import re import numpy as np import pandas as pd import pytest +from requests.exceptions import HTTPError from starlette.testclient import TestClient from antarest.study.business.binding_constraint_management import ClusterTerm, ConstraintTerm, LinkTerm +from tests.integration.prepare_proxy import PreparerProxy + +MATRIX_SIZES = {"hourly": 8784, "daily": 366, "weekly": 366} + + +REQUIRED_MATRICES = { + "less": {"lt"}, + "equal": {"eq"}, + "greater": {"gt"}, + "both": {"lt", "gt"}, +} class TestLinkTerm: @@ -68,14 +79,6 @@ def test_constraint_id__other(self) -> None: assert term.generate_id() == "foo" -def _upload_matrix(client: TestClient, study_id: str, matrix_path: str, df: pd.DataFrame) -> None: - tsv = io.BytesIO() - df.to_csv(tsv, sep="\t", index=False, header=False) - tsv.seek(0) - res = client.put(f"/v1/studies/{study_id}/raw", params={"path": matrix_path}, files={"file": tsv}) - res.raise_for_status() - - @pytest.mark.unit_test class TestBindingConstraints: """ @@ -90,49 +93,22 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st # STUDY PREPARATION # ============================= - # Create a Study - res = client.post("/v1/studies", params={"name": "foo", "version": "860"}) - assert res.status_code == 201, res.json() - study_id = res.json() - - # Create Areas - res = client.post(f"/v1/studies/{study_id}/areas", json={"name": "Area 1", "type": "AREA"}) - assert res.status_code == 200, res.json() - area1_id = res.json()["id"] - assert area1_id == "area 1" - - res = client.post(f"/v1/studies/{study_id}/areas", json={"name": "Area 2", "type": "AREA"}) - assert res.status_code == 200, res.json() - area2_id = res.json()["id"] - assert area2_id == "area 2" - - # Create a link between the two areas - res = client.post(f"/v1/studies/{study_id}/links", json={"area1": area1_id, "area2": area2_id}) - assert res.status_code == 200, res.json() + preparer = PreparerProxy(client, user_access_token) + study_id = preparer.create_study("foo", version=860) + area1_id = preparer.create_area(study_id, name="Area 1")["id"] + area2_id = preparer.create_area(study_id, name="Area 2")["id"] + link_id = preparer.create_link(study_id, area1_id=area1_id, area2_id=area2_id)["id"] # Create a cluster in area1 - res = client.post( - f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", - json={"name": "Cluster 1", "group": "Nuclear"}, - ) - assert res.status_code == 200, res.json() - cluster_id = res.json()["id"] - assert cluster_id == "Cluster 1" - - # Get clusters list to check created cluster in area1 - res = client.get(f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal") - clusters_list = res.json() - assert res.status_code == 200, res.json() + cluster_id = preparer.create_thermal(study_id, area1_id, name="Cluster 1", group="Nuclear")["id"] + clusters_list = preparer.get_thermals(study_id, area1_id) assert len(clusters_list) == 1 assert clusters_list[0]["id"] == cluster_id assert clusters_list[0]["name"] == "Cluster 1" assert clusters_list[0]["group"] == "Nuclear" if study_type == "variant": - # Create Variant - res = client.post(f"/v1/studies/{study_id}/variants", params={"name": "Variant 1"}) - assert res.status_code in {200, 201}, res.json() - study_id = res.json() + study_id = preparer.create_variant(study_id, name="Variant 1") # ============================= # CREATION @@ -176,23 +152,18 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st assert res.status_code in {200, 201}, res.json() # Creates a binding constraint with the new API - res = client.post( - f"/v1/studies/{study_id}/bindingconstraints", - json={ - "name": "binding_constraint_3", - "enabled": True, - "timeStep": "hourly", - "operator": "less", - "terms": [], - "comments": "New API", - }, + preparer.create_binding_constraint( + study_id, + name="binding_constraint_3", + enabled=True, + timeStep="hourly", + operator="less", + terms=[], + comments="New API", ) - assert res.status_code in {200, 201}, res.json() # Get Binding Constraint list - res = client.get(f"/v1/studies/{study_id}/bindingconstraints") - binding_constraints_list = res.json() - assert res.status_code == 200, res.json() + binding_constraints_list = preparer.get_binding_constraints(study_id) assert len(binding_constraints_list) == 3 # Group section should not exist as the study version is prior to 8.7 assert "group" not in binding_constraints_list[0] @@ -275,7 +246,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st expected = [ { "data": {"area1": area1_id, "area2": area2_id}, - "id": f"{area1_id}%{area2_id}", + "id": link_id, "offset": 2, "weight": 1.0, }, @@ -303,7 +274,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st expected = [ { "data": {"area1": area1_id, "area2": area2_id}, - "id": f"{area1_id}%{area2_id}", + "id": link_id, "offset": 2, "weight": 1.0, }, @@ -341,7 +312,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st } # Remove Constraint term - res = client.delete(f"/v1/studies/{study_id}/bindingconstraints/{bc_id}/term/{area1_id}%{area2_id}") + res = client.delete(f"/v1/studies/{study_id}/bindingconstraints/{bc_id}/term/{link_id}") assert res.status_code == 200, res.json() # Check updated terms, the deleted term should no longer exist. @@ -550,39 +521,17 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # STUDY PREPARATION # ============================= - res = client.post("/v1/studies", params={"name": "foo"}) - assert res.status_code == 201, res.json() - study_id = res.json() + preparer = PreparerProxy(client, user_access_token) + study_id = preparer.create_study("foo", version=870) if study_type == "variant": - # Create Variant - res = client.post(f"/v1/studies/{study_id}/variants", params={"name": "Variant 1"}) - assert res.status_code in {200, 201} - study_id = res.json() + study_id = preparer.create_variant(study_id, name="Variant 1") - # Create Areas - res = client.post(f"/v1/studies/{study_id}/areas", json={"name": "Area 1", "type": "AREA"}) - assert res.status_code == 200, res.json() - area1_id = res.json()["id"] - assert area1_id == "area 1" - - res = client.post(f"/v1/studies/{study_id}/areas", json={"name": "Area 2", "type": "AREA"}) - assert res.status_code == 200, res.json() - area2_id = res.json()["id"] - assert area2_id == "area 2" - - # Create a link between the two areas - res = client.post(f"/v1/studies/{study_id}/links", json={"area1": area1_id, "area2": area2_id}) - assert res.status_code == 200, res.json() - - # Create a cluster in area1 - res = client.post( - f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", - json={"name": "Cluster 1", "group": "Nuclear"}, - ) - assert res.status_code == 200, res.json() - cluster_id = res.json()["id"] - assert cluster_id == "Cluster 1" + # Create Areas, link and cluster + area1_id = preparer.create_area(study_id, name="Area 1")["id"] + area2_id = preparer.create_area(study_id, name="Area 2")["id"] + link_id = preparer.create_link(study_id, area1_id=area1_id, area2_id=area2_id)["id"] + cluster_id = preparer.create_thermal(study_id, area1_id, name="Cluster 1", group="Nuclear")["id"] # ============================= # CREATION @@ -591,27 +540,34 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # Creation of a bc without group bc_id_wo_group = "binding_constraint_1" args = {"enabled": True, "timeStep": "hourly", "operator": "less", "terms": [], "comments": "New API"} - res = client.post(f"/v1/studies/{study_id}/bindingconstraints", json={"name": bc_id_wo_group, **args}) - assert res.status_code in {200, 201} - assert res.json()["group"] == "default" + operator_1 = "lt" + properties = preparer.create_binding_constraint(study_id, name=bc_id_wo_group, **args) + assert properties["group"] == "default" # Creation of bc with a group bc_id_w_group = "binding_constraint_2" - res = client.post( - f"/v1/studies/{study_id}/bindingconstraints", - json={"name": bc_id_w_group, "group": "specific_grp", **args}, - ) - assert res.status_code in {200, 201} - assert res.json()["group"] == "specific_grp" + args["operator"], operator_2 = "greater", "gt" + properties = preparer.create_binding_constraint(study_id, name=bc_id_w_group, group="specific_grp", **args) + assert properties["group"] == "specific_grp" # Creation of bc with a matrix bc_id_w_matrix = "binding_constraint_3" matrix_lt3 = np.ones((8784, 3)) + args["operator"], operator_3 = "equal", "eq" + # verify that trying to create a binding constraint with a less_term_matrix will + # while using an `equal` operator will raise an error 422 res = client.post( f"/v1/studies/{study_id}/bindingconstraints", json={"name": bc_id_w_matrix, "less_term_matrix": matrix_lt3.tolist(), **args}, ) - assert res.status_code in {200, 201}, res.json() + assert res.status_code == 422, res.json() + + # now we create the binding constraint with the correct matrix + res = client.post( + f"/v1/studies/{study_id}/bindingconstraints", + json={"name": bc_id_w_matrix, "equal_term_matrix": matrix_lt3.tolist(), **args}, + ) + res.raise_for_status() if study_type == "variant": res = client.get(f"/v1/studies/{study_id}/commands") @@ -619,21 +575,34 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study less_term_matrix = last_cmd_args["less_term_matrix"] equal_term_matrix = last_cmd_args["equal_term_matrix"] greater_term_matrix = last_cmd_args["greater_term_matrix"] - assert greater_term_matrix == equal_term_matrix != less_term_matrix + assert greater_term_matrix == less_term_matrix != equal_term_matrix # Check that raw matrices are created - for term in ["lt", "gt", "eq"]: - path = f"input/bindingconstraints/{bc_id_w_matrix}_{term}" - res = client.get( - f"/v1/studies/{study_id}/raw", - params={"path": path, "depth": 1, "formatted": True}, # type: ignore - ) - assert res.status_code == 200, res.json() - data = res.json()["data"] - if term == "lt": - assert data == matrix_lt3.tolist() - else: - assert data == np.zeros((matrix_lt3.shape[0], 1)).tolist() + for bc_id, operator in zip( + [bc_id_wo_group, bc_id_w_matrix, bc_id_w_group], [operator_1, operator_2, operator_3] + ): + for term in zip( + [ + bc_id_wo_group, + bc_id_w_matrix, + ], + ["lt", "gt", "eq"], + ): + path = f"input/bindingconstraints/{bc_id}_{term}" + res = client.get( + f"/v1/studies/{study_id}/raw", + params={"path": path, "depth": 1, "formatted": True}, # type: ignore + ) + # as we save only the operator matrix, we should have a matrix only for the operator + if term != operator: + assert res.status_code == 404, res.json() + continue + assert res.status_code == 200, res.json() + data = res.json()["data"] + if term == "lt": + assert data == matrix_lt3.tolist() + else: + assert data == np.zeros((matrix_lt3.shape[0], 1)).tolist() # ============================= # CONSTRAINT TERM MANAGEMENT @@ -671,7 +640,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study description = res.json()["description"] assert exception == "DuplicateConstraintTerm" assert bc_id_w_group in description, "Error message should contain the binding constraint ID" - assert f"{area1_id}%{area2_id}" in description, "Error message should contain the duplicate term ID" + assert link_id in description, "Error message should contain the duplicate term ID" # Get binding constraints list to check added terms res = client.get(f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}") @@ -681,7 +650,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study expected = [ { "data": {"area1": area1_id, "area2": area2_id}, - "id": f"{area1_id}%{area2_id}", + "id": link_id, "offset": 2, "weight": 1.0, }, @@ -699,7 +668,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", json=[ { - "id": f"{area1_id}%{area2_id}", + "id": link_id, "weight": 4.4, "offset": 1, }, @@ -720,7 +689,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study expected = [ { "data": {"area1": area1_id, "area2": area2_id}, - "id": f"{area1_id}%{area2_id}", + "id": link_id, "offset": 1, "weight": 4.4, }, @@ -746,13 +715,32 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study assert res.status_code == 200, res.json() assert res.json()["group"] == grp_name - # Update matrix_term + # check that updating of a binding constraint that has an operator "equal" + # with a greater matrix will raise an error 422 + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", + json={"greater_term_matrix": matrix_lt3.tolist()}, + ) + assert res.status_code == 422, res.json() + assert "greater_term_matrix" in res.json()["description"] + assert "equal" in res.json()["description"] + assert res.json()["exception"] == "InvalidFieldForVersionError" + + # update the binding constraint operator first + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", + json={"operator": "greater"}, + ) + assert res.status_code == 200, res.json() + + # update the binding constraint matrix res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", json={"greater_term_matrix": matrix_lt3.tolist()}, ) assert res.status_code == 200, res.json() + # check that the matrix has been updated res = client.get( f"/v1/studies/{study_id}/raw", params={"path": f"input/bindingconstraints/{bc_id_w_matrix}_gt"}, @@ -784,17 +772,44 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # Check that the matrices are daily/weekly matrices expected_matrix = np.zeros((366, 1)) - for term_alias in ["lt", "gt", "eq"]: - res = client.get( - f"/v1/studies/{study_id}/raw", - params={ - "path": f"input/bindingconstraints/{bc_id_w_matrix}_{term_alias}", - "depth": 1, - "formatted": True, - }, # type: ignore - ) + for operator in ["less", "equal", "greater", "both"]: + if operator != "both": + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", + json={"operator": operator, f"{operator}_term_matrix": expected_matrix.tolist()}, + ) + else: + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", + json={ + "operator": operator, + "greater_term_matrix": expected_matrix.tolist(), + "less_term_matrix": expected_matrix.tolist(), + }, + ) assert res.status_code == 200, res.json() - assert res.json()["data"] == expected_matrix.tolist() + for term_operator, term_alias in zip(["less", "equal", "greater"], ["lt", "eq", "gt"]): + res = client.get( + f"/v1/studies/{study_id}/raw", + params={ + "path": f"input/bindingconstraints/{bc_id_w_matrix}_{term_alias}", + "depth": 1, + "formatted": True, + }, # type: ignore + ) + # check that update is made if no conflict between the operator and the matrix term alias + if term_operator == operator or (operator == "both" and term_operator in ["less", "greater"]): + assert res.status_code == 200, res.json() + assert res.json()["data"] == expected_matrix.tolist() + else: + assert res.status_code == 404, res.json() + + # set binding constraint operator to "less" + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", + json={"operator": "less"}, + ) + assert res.status_code == 200, res.json() # ============================= # DELETE @@ -805,28 +820,30 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study assert res.status_code == 200, res.json() # Asserts that the deletion worked - res = client.get(f"/v1/studies/{study_id}/bindingconstraints") - assert len(res.json()) == 2 + binding_constraints_list = preparer.get_binding_constraints(study_id) + assert len(binding_constraints_list) == 2 # ============================= # ERRORS # ============================= # Creation with wrong matrix according to version - res = client.post( - f"/v1/studies/{study_id}/bindingconstraints", - json={ - "name": "binding_constraint_700", - "enabled": True, - "timeStep": "hourly", - "operator": "less", - "terms": [], - "comments": "New API", - "values": [[]], - }, - ) - assert res.status_code == 422, res.json() - assert res.json()["description"] == "You cannot fill 'values' as it refers to the matrix before v8.7" + for operator in ["less", "equal", "greater", "both"]: + args["operator"] = operator + res = client.post( + f"/v1/studies/{study_id}/bindingconstraints", + json={ + "name": "binding_constraint_4", + "enabled": True, + "timeStep": "hourly", + "operator": operator, + "terms": [], + "comments": "New API", + "values": [[]], + }, + ) + assert res.status_code == 422 + assert res.json()["description"] == "You cannot fill 'values' as it refers to the matrix before v8.7" # Update with old matrices res = client.put( @@ -862,14 +879,16 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # # Creation of 1 BC # Update raw with wrong columns size -> OK but validation should fail - # - matrix_lt3 = np.ones((8784, 3)) + # update the args operator field to "greater" + args["operator"] = "greater" + + matrix_gt3 = np.ones((8784, 3)) res = client.post( f"/v1/studies/{study_id}/bindingconstraints", json={ "name": "First BC", - "less_term_matrix": matrix_lt3.tolist(), + "greater_term_matrix": matrix_gt3.tolist(), "group": "Group 1", **args, }, @@ -879,12 +898,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study generator = np.random.default_rng(11) random_matrix = pd.DataFrame(generator.integers(0, 10, size=(4, 1))) - _upload_matrix( - client, - study_id, - f"input/bindingconstraints/{first_bc_id}_gt", - random_matrix, - ) + preparer.upload_matrix(study_id, f"input/bindingconstraints/{first_bc_id}_gt", random_matrix) # Validation should fail res = client.get(f"/v1/studies/{study_id}/constraint-groups/Group 1/validate") @@ -896,7 +910,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # So, we correct the shape of the matrix res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{first_bc_id}", - json={"greater_term_matrix": matrix_lt3.tolist()}, + json={"greater_term_matrix": matrix_gt3.tolist()}, ) assert res.status_code in {200, 201}, res.json() @@ -944,6 +958,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # third_bd group changes to group1 -> Fails validation # + args["operator"] = "less" matrix_lt4 = np.ones((8784, 4)) res = client.post( f"/v1/studies/{study_id}/bindingconstraints", @@ -972,9 +987,15 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study assert re.search(r"the most common width in the group is 3", description, flags=re.IGNORECASE) assert re.search(r"'third bc_lt' has 4 columns", description, flags=re.IGNORECASE) + # first change `second_bc` operator to greater + client.put( + f"v1/studies/{study_id}/bindingconstraints/{second_bc_id}", + json={"operator": "greater"}, + ) + # So, we correct the shape of the matrix of the Second BC res = client.put( - f"/v1/studies/{study_id}/bindingconstraints/{third_bd_id}", + f"/v1/studies/{study_id}/bindingconstraints/{second_bc_id}", json={"greater_term_matrix": matrix_lt3.tolist()}, ) assert res.status_code in {200, 201}, res.json() @@ -1002,6 +1023,12 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # This should succeed but cause the validation endpoint to fail. assert res.status_code in {200, 201}, res.json() + # reset `second_bc` operator to less + client.put( + f"v1/studies/{study_id}/bindingconstraints/{second_bc_id}", + json={"operator": "less"}, + ) + # Collect all the binding constraints groups res = client.get(f"/v1/studies/{study_id}/constraint-groups") assert res.status_code in {200, 201}, res.json() @@ -1031,3 +1058,153 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study assert re.search(r"'Group 1':", description, flags=re.IGNORECASE) assert re.search(r"the most common width in the group is 3", description, flags=re.IGNORECASE) assert re.search(r"'third bc_lt' has 4 columns", description, flags=re.IGNORECASE) + + @pytest.mark.parametrize("study_version", [870]) + @pytest.mark.parametrize("denormalize", [True, False]) + def test_rhs_matrices( + self, client: TestClient, user_access_token: str, study_version: int, denormalize: bool + ) -> None: + """ + The goal of this test is to verify that there are no unnecessary RHS matrices created + in the case of **creation** or **update** of a binding constraint. + This test only concerns studies in **version >= 8.7** for which we have a specific matrix + for each operation: "less", "equal", "greater" or "both". + + To perform this test, we will create a raw study "Base Study" with a "France" area + and a single thermal cluster "Nuclear". + We will then create a variant study "Variant Study" based on the raw study "Base Study" + to apply binding constraint creation or update commands. + + The use of a variant and commands allows to check the behavior for both variant studies + and raw studies by generating the variant snapshot. + + To verify the expected behaviors, we must control the number and naming of the matrices + after generating the snapshot. + In the case of an update and depending on the values of the `operator` and `time_step` parameters, + we must also control the preservation or zeroing of the matrix values. + """ + client.headers = {"Authorization": f"Bearer {user_access_token}"} # type: ignore + + # ======================= + # RAW STUDY PREPARATION + # ======================= + + preparer = PreparerProxy(client, user_access_token) + study_id = preparer.create_study("Base Study", version=study_version) + area_id = preparer.create_area(study_id, name="France")["id"] + cluster_id = preparer.create_thermal(study_id, area_id, name="Nuclear", group="Nuclear")["id"] + + # ============================= + # VARIANT STUDY CREATION + # ============================= + + variant_id = preparer.create_variant(study_id, name="Variant Study") + + # ============================= + # CREATION W/O MATRICES + # ============================= + + all_time_steps = set(MATRIX_SIZES) + all_operators = set(REQUIRED_MATRICES) + + for bc_time_step in all_time_steps: + for bc_operator in all_operators: + bc_name = f"BC_{bc_time_step}_{bc_operator}" + # Creation of a binding constraint without matrices using a command + res = client.post( + f"/v1/studies/{variant_id}/commands", + json=[ + { + "action": "create_binding_constraint", + "args": { + "name": bc_name, + "type": bc_time_step, + "operator": bc_operator, + "coeffs": {f"{area_id}.{cluster_id.lower()}": [1, 2]}, + }, + } + ], + ) + assert res.status_code == 200, res.json() + + preparer.generate_snapshot(variant_id, denormalize=denormalize) + + # Check the matrices size, values and existence + for bc_time_step in all_time_steps: + for bc_operator in all_operators: + bc_name = f"BC_{bc_time_step}_{bc_operator}" + bc_id = bc_name.lower() + + required_matrices = REQUIRED_MATRICES[bc_operator] + for matrix in required_matrices: + df = preparer.download_matrix(variant_id, f"input/bindingconstraints/{bc_id}_{matrix}") + assert df.shape == (MATRIX_SIZES[bc_time_step], 1) + assert (df == 0).all().all() + + superfluous_matrices = {"lt", "gt", "eq"} - required_matrices + for matrix in superfluous_matrices: + try: + preparer.download_matrix(variant_id, f"input/bindingconstraints/{bc_id}_{matrix}") + except HTTPError as e: + assert e.response.status_code == 404 + else: + assert False, "The matrix should not exist" + + # drop all commands to avoid conflicts with the next test + preparer.drop_all_commands(variant_id) + + # ============================= + # CREATION WITH MATRICES + # ============================= + + # random matrices + matrices_by_time_steps = { + time_step: np.random.rand(size, 1).astype(np.float64) for time_step, size in MATRIX_SIZES.items() + } + + for bc_time_step in all_time_steps: + for bc_operator in all_operators: + bc_name = f"BC_{bc_time_step}_{bc_operator}" + matrix = matrices_by_time_steps[bc_time_step].tolist() + args = { + "name": bc_name, + "type": bc_time_step, + "operator": bc_operator, + "coeffs": {f"{area_id}.{cluster_id.lower()}": [1, 2]}, + } + if bc_operator == "less": + args["lessTermMatrix"] = matrix + elif bc_operator == "greater": + args["greaterTermMatrix"] = matrix + elif bc_operator == "equal": + args["equalTermMatrix"] = matrix + else: + args["lessTermMatrix"] = args["greaterTermMatrix"] = matrix + res = client.post( + f"/v1/studies/{variant_id}/commands", + json=[{"action": "create_binding_constraint", "args": args}], + ) + assert res.status_code == 200, res.json() + + preparer.generate_snapshot(variant_id, denormalize=denormalize) + + # Check the matrices size, values and existence + for bc_time_step in all_time_steps: + for bc_operator in all_operators: + bc_name = f"BC_{bc_time_step}_{bc_operator}" + bc_id = bc_name.lower() + + required_matrices = REQUIRED_MATRICES[bc_operator] + for matrix in required_matrices: + df = preparer.download_matrix(variant_id, f"input/bindingconstraints/{bc_id}_{matrix}") + assert df.shape == (MATRIX_SIZES[bc_time_step], 1) + assert np.allclose(df.values, matrices_by_time_steps[bc_time_step], atol=1e-6) + + superfluous_matrices = {"lt", "gt", "eq"} - required_matrices + for matrix in superfluous_matrices: + try: + preparer.download_matrix(variant_id, f"input/bindingconstraints/{bc_id}_{matrix}") + except HTTPError as e: + assert e.response.status_code == 404 + else: + assert False, "The matrix should not exist" diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index f0eb8491f1..55c1073168 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -625,8 +625,11 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: ] res = client.put(f"/v1/studies/{study_id}/layers/1?name=test2") + assert res.status_code in {200, 201}, res.json() res = client.put(f"/v1/studies/{study_id}/layers/1", json=["area 1"]) + assert res.status_code in {200, 201}, res.json() res = client.put(f"/v1/studies/{study_id}/layers/1", json=["area 2"]) + assert res.status_code in {200, 201}, res.json() res = client.get(f"/v1/studies/{study_id}/layers") assert res.json() == [ LayerInfoDTO(id="0", name="All", areas=["area 1", "area 2"]).dict(), diff --git a/tests/storage/business/test_xpansion_manager.py b/tests/storage/business/test_xpansion_manager.py index bb5651bcbd..100bddd286 100644 --- a/tests/storage/business/test_xpansion_manager.py +++ b/tests/storage/business/test_xpansion_manager.py @@ -10,6 +10,7 @@ from fastapi import UploadFile from pandas.errors import ParserError +from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON from antarest.study.business.xpansion_management import ( FileCurrentlyUsedInSettings, @@ -26,7 +27,6 @@ from antarest.study.model import RawStudy from antarest.study.storage.rawstudy.model.filesystem.config.files import build from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import FileStudyTree from antarest.study.storage.rawstudy.raw_study_service import RawStudyService from antarest.study.storage.storage_service import StudyStorageService diff --git a/tests/storage/integration/data/digest_file.py b/tests/storage/integration/data/digest_file.py new file mode 100644 index 0000000000..363d2819bd --- /dev/null +++ b/tests/storage/integration/data/digest_file.py @@ -0,0 +1,2 @@ +# fmt: off +digest_file = {'columns': [str(i) for i in range(54)], 'data': [['', 'digest', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'VARIABLES', 'AREAS', 'LINKS', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '53', '4', '0', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', 'OV. COST', 'OP. COST', 'MRG. PRICE', 'CO2 EMIS.', 'BALANCE', 'ROW BAL.', 'PSP', 'MISC. NDG', 'LOAD', 'H. ROR', 'WIND', 'SOLAR', 'NUCLEAR', 'LIGNITE', 'COAL', 'GAS', 'OIL', 'MIX. FUEL', 'MISC. DTG', 'H. STOR', 'H. PUMP', 'H. LEV', 'H. INFL', 'H. OVFL', 'H. VAL', 'H. COST', 'UNSP. ENRG', 'SPIL. ENRG', 'LOLD', 'LOLP', 'AVL DTG', 'DTG MRG', 'MAX MRG', 'NP COST', '01_solar', '02_wind_on', '03_wind_off', '04_res', '05_nuclear', '06_coal', '07_gas', '08_non-res', '09_hydro_pump', 'NODU', '01_solar', '02_wind_on', '03_wind_off', '04_res', '05_nuclear', '06_coal', '07_gas', '08_non-res', '09_hydro_pump'], ['', '', 'Euro', 'Euro', 'Euro', 'Tons', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', 'MWh', '%', 'MWh', '%', 'Euro/MWh', 'Euro', 'MWh', 'MWh', 'Hours', '%', 'MWh', 'MWh', 'MWh', 'Euro', 'NP Cost - Euro', 'NP Cost - Euro', 'NP Cost - Euro', 'NP Cost - Euro', 'NP Cost - Euro', 'NP Cost - Euro', 'NP Cost - Euro', 'NP Cost - Euro', 'NP Cost - Euro', ' ', 'NODU', 'NODU', 'NODU', 'NODU', 'NODU', 'NODU', 'NODU', 'NODU', 'NODU'], ['', '', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'values', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'values', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP', 'EXP'], ['', 'de', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', 'N/A', '0', '0', 'N/A', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0'], ['', 'es', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', 'N/A', '0', '0', 'N/A', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0'], ['', 'fr', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', 'N/A', '0', '0', 'N/A', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0'], ['', 'it', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', 'N/A', '0', '0', 'N/A', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0', '0'], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'digest', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'VARIABLES', 'AREAS', 'LINKS', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '0', '0', '0', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'Links (FLOW LIN.)', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', 'From...', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '...To', 'de', 'es', 'fr', 'it', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'de', 'X', '--', '0', '--', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'es', '--', 'X', '0', '--', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'fr', '0', '0', 'X', '0', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'it', '--', '--', '0', 'X', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'Links (FLOW QUAD.)', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '', 'From...', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', '...To', 'de', 'es', 'fr', 'it', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'de', 'X', '--', '0', '--', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'es', '--', 'X', '0', '--', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'fr', '0', '0', 'X', '0', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', ''], ['', 'it', '--', '--', '0', 'X', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '', '']]} diff --git a/tests/storage/integration/test_STA_mini.py b/tests/storage/integration/test_STA_mini.py index c7ce7c03f1..6f0d830ccd 100644 --- a/tests/storage/integration/test_STA_mini.py +++ b/tests/storage/integration/test_STA_mini.py @@ -20,6 +20,7 @@ from tests.helpers import assert_study from tests.storage.integration.data.de_details_hourly import de_details_hourly from tests.storage.integration.data.de_fr_values_hourly import de_fr_values_hourly +from tests.storage.integration.data.digest_file import digest_file from tests.storage.integration.data.set_id_annual import set_id_annual from tests.storage.integration.data.set_values_monthly import set_values_monthly @@ -377,7 +378,12 @@ def test_sta_mini_input(storage_service, url: str, expected_output: dict): ), ( "/v1/studies/STA-mini/raw?path=output/20201014-1422eco-hello/economy/mc-all/grid/areas", - b"id\tname\nde\tDE\nes\tES\nfr\tFR\nit\tIT\n", + {"columns": ["id", "name"], "data": [["de", "DE"], ["es", "ES"], ["fr", "FR"], ["it", "IT"]]}, + ), + ("/v1/studies/STA-mini/raw?path=output/20201014-1422eco-hello/economy/mc-all/grid/digest", digest_file), + ( + "/v1/studies/STA-mini/raw?path=output/20201014-1422eco-hello/economy/mc-all/grid/links", + {"columns": ["upstream", "downstream"], "data": [["de", "fr"], ["es", "fr"], ["fr", "it"]]}, ), ( "/v1/studies/STA-mini/raw?path=output/20201014-1422eco-hello/economy/mc-all/links/de/fr", diff --git a/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py b/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py index 6b7bcbaa01..b6ac49fce1 100644 --- a/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py +++ b/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py @@ -5,11 +5,11 @@ import pytest +from antarest.core.exceptions import ChildNotFoundError from antarest.matrixstore.service import ISimpleMatrixService from antarest.matrixstore.uri_resolver_service import UriResolverService from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix diff --git a/tests/storage/repository/filesystem/matrix/test_output_series_matrix.py b/tests/storage/repository/filesystem/matrix/test_output_series_matrix.py index c93f999627..e6eb256c51 100644 --- a/tests/storage/repository/filesystem/matrix/test_output_series_matrix.py +++ b/tests/storage/repository/filesystem/matrix/test_output_series_matrix.py @@ -4,8 +4,8 @@ import pandas as pd import pytest +from antarest.core.exceptions import ChildNotFoundError, MustNotModifyOutputException from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.matrix.head_writer import AreaHeadWriter from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import OutputSeriesMatrix @@ -84,33 +84,13 @@ def test_load__file_not_found(self, my_study_config: FileStudyTreeConfig) -> Non assert "not found" in err_msg.lower() def test_save(self, my_study_config: FileStudyTreeConfig) -> None: - serializer = Mock() - serializer.build_date.return_value = pd.DataFrame( - { - 0: ["DE", "", "", "", ""], - 1: ["hourly", "", "index", 1, 2], - 2: ["", "", "day", "1", "1"], - 3: ["", "", "month", "JAN", "JAN"], - 4: ["", "", "hourly", "00:00", "01:00"], - } - ) - node = OutputSeriesMatrix( context=Mock(), config=my_study_config, freq=MatrixFrequency.DAILY, - date_serializer=serializer, + date_serializer=Mock(), head_writer=AreaHeadWriter(area="de", data_type="va", freq="hourly"), ) - matrix = pd.DataFrame( - data={ - ("01_solar", "MWh", "EXP"): [27000, 48000], - ("02_wind_on", "MWh", "EXP"): [600, 34400], - }, - index=["01/01", "01/02"], - ) - - node.dump(matrix.to_dict(orient="split")) # type: ignore - actual = my_study_config.path.read_text() - assert actual == MATRIX_DAILY_DATA + with pytest.raises(MustNotModifyOutputException, match="Should not modify output file"): + node.dump(data={}) diff --git a/tests/storage/repository/filesystem/test_folder_node.py b/tests/storage/repository/filesystem/test_folder_node.py index 7927927d7e..ae017d7007 100644 --- a/tests/storage/repository/filesystem/test_folder_node.py +++ b/tests/storage/repository/filesystem/test_folder_node.py @@ -6,9 +6,9 @@ import pytest +from antarest.core.exceptions import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import StudyFactory -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode from antarest.study.storage.rawstudy.model.filesystem.inode import INode from antarest.study.storage.rawstudy.model.filesystem.raw_file_node import RawFileNode diff --git a/tests/storage/repository/filesystem/test_lazy_node.py b/tests/storage/repository/filesystem/test_lazy_node.py index f899d32fa3..a2c72415f5 100644 --- a/tests/storage/repository/filesystem/test_lazy_node.py +++ b/tests/storage/repository/filesystem/test_lazy_node.py @@ -2,6 +2,8 @@ from typing import List, Optional from unittest.mock import Mock +import pytest + from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode @@ -138,3 +140,113 @@ def test_save_txt(tmp_path: Path): assert file.read_text() == content assert not link.exists() resolver.resolve.assert_called_once_with(content) + + +@pytest.mark.parametrize("target_is_link", [True, False]) +def test_rename_file(tmp_path: Path, target_is_link: bool): + file = tmp_path / "my-study/lazy.txt" + file.parent.mkdir() + + link = file.parent / f"{file.name}.link" + link.write_text("Link: Mock File Content") + + resolver = Mock() + resolver.resolve.return_value = None + + resolver2 = Mock() + resolver2.resolve.return_value = None + + config = FileStudyTreeConfig(study_path=file, path=file, version=-1, study_id="") + context = ContextServer(matrix=Mock(), resolver=resolver) + node = MockLazyNode(context=context, config=config) + + renaming_file = file.parent / "lazy_rename.txt" + renaming_link = file.parent / f"{renaming_file.name}.link" + config2 = FileStudyTreeConfig(study_path=renaming_file, path=renaming_file, version=-1, study_id="") + context2 = ContextServer(matrix=Mock(), resolver=resolver2) + target = MockLazyNode(context=context2, config=config2) + + if target_is_link: + assert not renaming_link.exists() + assert link.exists() + assert not file.exists() + assert not renaming_file.exists() + + node.rename_file(target) + + assert not link.exists() + assert renaming_link.exists() + assert not file.exists() + assert not renaming_file.exists() + assert renaming_link.read_text() == "Link: Mock File Content" + + else: + content = "No Link: Mock File Content" + node.save(content) + assert file.read_text() == content + assert not link.exists() + assert not renaming_file.exists() + resolver.resolve.assert_called_once_with(content) + + node.rename_file(target) + + assert not link.exists() + assert not file.exists() + assert renaming_file.exists() + assert not renaming_link.exists() + assert renaming_file.read_text() == "No Link: Mock File Content" + + +@pytest.mark.parametrize("target_is_link", [True, False]) +def test_copy_file(tmp_path: Path, target_is_link: bool): + file = tmp_path / "my-study/lazy.txt" + file.parent.mkdir() + + link = file.parent / f"{file.name}.link" + link.write_text("Link: Mock File Content") + + resolver = Mock() + resolver.resolve.return_value = None + + resolver2 = Mock() + resolver2.resolve.return_value = None + + config = FileStudyTreeConfig(study_path=file, path=file, version=-1, study_id="") + context = ContextServer(matrix=Mock(), resolver=resolver) + node = MockLazyNode(context=context, config=config) + + copied_file = file.parent / "lazy_copy.txt" + copied_link = file.parent / f"{copied_file.name}.link" + config2 = FileStudyTreeConfig(study_path=copied_file, path=copied_file, version=-1, study_id="") + context2 = ContextServer(matrix=Mock(), resolver=resolver2) + target = MockLazyNode(context=context2, config=config2) + + if target_is_link: + assert not copied_link.exists() + assert link.exists() + assert not file.exists() + assert not copied_file.exists() + + node.copy_file(target) + + assert link.exists() + assert copied_link.exists() + assert not file.exists() + assert not copied_file.exists() + assert copied_link.read_text() == "Link: Mock File Content" + + else: + content = "No Link: Mock File Content" + node.save(content) + assert file.read_text() == content + assert not link.exists() + assert not copied_file.exists() + resolver.resolve.assert_called_once_with(content) + + node.copy_file(target) + + assert not link.exists() + assert file.exists() + assert copied_file.exists() + assert not copied_link.exists() + assert copied_file.read_text() == "No Link: Mock File Content" diff --git a/tests/variantstudy/model/command/test_manage_binding_constraints.py b/tests/variantstudy/model/command/test_manage_binding_constraints.py index dd5e8a917e..f2dc3ccaf5 100644 --- a/tests/variantstudy/model/command/test_manage_binding_constraints.py +++ b/tests/variantstudy/model/command/test_manage_binding_constraints.py @@ -26,7 +26,10 @@ from antarest.study.storage.variantstudy.model.command.remove_area import RemoveArea from antarest.study.storage.variantstudy.model.command.remove_binding_constraint import RemoveBindingConstraint from antarest.study.storage.variantstudy.model.command.remove_link import RemoveLink -from antarest.study.storage.variantstudy.model.command.update_binding_constraint import UpdateBindingConstraint +from antarest.study.storage.variantstudy.model.command.update_binding_constraint import ( + UpdateBindingConstraint, + _update_matrices_names, +) from antarest.study.storage.variantstudy.model.command.update_scenario_builder import UpdateScenarioBuilder from antarest.study.storage.variantstudy.model.command_context import CommandContext @@ -72,10 +75,7 @@ def test_manage_binding_constraint(empty_study: FileStudy, command_context: Comm matrix_links = [ # fmt: off "bd 1_lt.txt.link", - "bd 1_eq.txt.link", - "bd 1_gt.txt.link", "bd 2_lt.txt.link", - "bd 2_eq.txt.link", "bd 2_gt.txt.link", # fmt: on ] @@ -514,3 +514,94 @@ def test_create_diff(command_context: CommandContext): base = RemoveBindingConstraint(id="foo", command_context=command_context) other_match = RemoveBindingConstraint(id="foo", command_context=command_context) assert base.create_diff(other_match) == [] + + +@pytest.mark.parametrize( + "existing_operator, new_operator", + [ + (BindingConstraintOperator.LESS, BindingConstraintOperator.LESS), + (BindingConstraintOperator.LESS, BindingConstraintOperator.GREATER), + (BindingConstraintOperator.LESS, BindingConstraintOperator.BOTH), + (BindingConstraintOperator.LESS, BindingConstraintOperator.EQUAL), + (BindingConstraintOperator.GREATER, BindingConstraintOperator.LESS), + (BindingConstraintOperator.GREATER, BindingConstraintOperator.GREATER), + (BindingConstraintOperator.GREATER, BindingConstraintOperator.BOTH), + (BindingConstraintOperator.GREATER, BindingConstraintOperator.EQUAL), + (BindingConstraintOperator.BOTH, BindingConstraintOperator.LESS), + (BindingConstraintOperator.BOTH, BindingConstraintOperator.GREATER), + (BindingConstraintOperator.BOTH, BindingConstraintOperator.BOTH), + (BindingConstraintOperator.BOTH, BindingConstraintOperator.EQUAL), + (BindingConstraintOperator.EQUAL, BindingConstraintOperator.LESS), + (BindingConstraintOperator.EQUAL, BindingConstraintOperator.GREATER), + (BindingConstraintOperator.EQUAL, BindingConstraintOperator.BOTH), + (BindingConstraintOperator.EQUAL, BindingConstraintOperator.EQUAL), + ], +) +@pytest.mark.parametrize("empty_study", ["empty_study_870.zip"], indirect=True) +def test__update_matrices_names( + empty_study: FileStudy, + command_context: CommandContext, + existing_operator: BindingConstraintOperator, + new_operator: BindingConstraintOperator, +): + study_path = empty_study.config.study_path + + all_file_templates = {"{bc_id}_eq.txt.link", "{bc_id}_gt.txt.link", "{bc_id}_lt.txt.link"} + + operator_matrix_file_map = { + BindingConstraintOperator.EQUAL: ["{bc_id}_eq.txt.link"], + BindingConstraintOperator.GREATER: ["{bc_id}_gt.txt.link"], + BindingConstraintOperator.LESS: ["{bc_id}_lt.txt.link"], + BindingConstraintOperator.BOTH: ["{bc_id}_lt.txt.link", "{bc_id}_gt.txt.link"], + } + + area1 = "area1" + area2 = "area2" + cluster = "cluster" + CreateArea(area_name=area1, command_context=command_context).apply(empty_study) + CreateArea(area_name=area2, command_context=command_context).apply(empty_study) + CreateLink(area1=area1, area2=area2, command_context=command_context).apply(empty_study) + CreateCluster(area_id=area1, cluster_name=cluster, parameters={}, command_context=command_context).apply( + empty_study + ) + + # create a binding constraint + _ = CreateBindingConstraint( + name="BD_RENAME_MATRICES", + time_step=BindingConstraintFrequency.HOURLY, + operator=existing_operator, + coeffs={"area1%area2": [800, 30]}, + command_context=command_context, + ).apply(empty_study) + + # check that the matrices are created + file_templates = set(operator_matrix_file_map[existing_operator]) + superfluous_templates = all_file_templates - file_templates + existing_matrices = [file_template.format(bc_id="bd_rename_matrices") for file_template in file_templates] + superfluous_matrices = [file_template.format(bc_id="bd_rename_matrices") for file_template in superfluous_templates] + for matrix_link in existing_matrices: + link_path = study_path / f"input/bindingconstraints/{matrix_link}" + assert link_path.exists(), f"Missing matrix link: {matrix_link!r}" + for matrix_link in superfluous_matrices: + link_path = study_path / f"input/bindingconstraints/{matrix_link}" + assert not link_path.exists(), f"Superfluous matrix link: {matrix_link!r}" + + # update matrices names + _update_matrices_names( + file_study=empty_study, + binding_constraint_id="bd_rename_matrices", + existing_operator=existing_operator, + new_operator=new_operator, + ) + + # check that the matrices are renamed + file_templates = set(operator_matrix_file_map[new_operator]) + superfluous_templates = all_file_templates - file_templates + new_matrices = [file_template.format(bc_id="bd_rename_matrices") for file_template in file_templates] + superfluous_matrices = [file_template.format(bc_id="bd_rename_matrices") for file_template in superfluous_templates] + for matrix_link in new_matrices: + link_path = study_path / f"input/bindingconstraints/{matrix_link}" + assert link_path.exists(), f"Missing matrix link: {matrix_link!r}" + for matrix_link in superfluous_matrices: + link_path = study_path / f"input/bindingconstraints/{matrix_link}" + assert not link_path.exists(), f"Superfluous matrix link: {matrix_link!r}" diff --git a/tests/variantstudy/model/command/test_remove_area.py b/tests/variantstudy/model/command/test_remove_area.py index af9a0b26f7..8849bffbd3 100644 --- a/tests/variantstudy/model/command/test_remove_area.py +++ b/tests/variantstudy/model/command/test_remove_area.py @@ -24,9 +24,7 @@ class TestRemoveArea: - @pytest.mark.parametrize("empty_study", ["empty_study_810.zip", "empty_study_840.zip"], indirect=True) - def test_apply(self, empty_study: FileStudy, command_context: CommandContext): - # noinspection SpellCheckingInspection + def _set_up(self, empty_study: FileStudy, command_context: CommandContext): empty_study.tree.save( { "input": { @@ -56,6 +54,19 @@ def test_apply(self, empty_study: FileStudy, command_context: CommandContext): create_area_command: ICommand = CreateArea(area_name=area_name, command_context=command_context) output = create_area_command.apply(study_data=empty_study) assert output.status, output.message + return empty_study, area_id + + @pytest.mark.parametrize("empty_study", ["empty_study_810.zip"], indirect=True) + def test_remove_with_aggregated(self, empty_study: FileStudy, command_context: CommandContext): + (empty_study, area_id) = self._set_up(empty_study, command_context) + remove_area_command = RemoveArea(id=area_id, command_context=command_context) + output = remove_area_command.apply(study_data=empty_study) + assert output.status, output.message + + @pytest.mark.parametrize("empty_study", ["empty_study_810.zip", "empty_study_840.zip"], indirect=True) + def test_apply(self, empty_study: FileStudy, command_context: CommandContext): + # noinspection SpellCheckingInspection + (empty_study, area_id) = self._set_up(empty_study, command_context) create_district_command = CreateDistrict( name="foo", diff --git a/tests/variantstudy/model/command/test_update_config.py b/tests/variantstudy/model/command/test_update_config.py index 99c71bd6d7..999adb6c70 100644 --- a/tests/variantstudy/model/command/test_update_config.py +++ b/tests/variantstudy/model/command/test_update_config.py @@ -3,10 +3,10 @@ import pytest +from antarest.core.exceptions import ChildNotFoundError from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.variantstudy.business.command_reverter import CommandReverter from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.remove_area import RemoveArea diff --git a/webapp/package-lock.json b/webapp/package-lock.json index 15225c0021..24af412910 100644 --- a/webapp/package-lock.json +++ b/webapp/package-lock.json @@ -1,12 +1,12 @@ { "name": "antares-web", - "version": "2.17.3", + "version": "2.17.4", "lockfileVersion": 3, "requires": true, "packages": { "": { "name": "antares-web", - "version": "2.17.3", + "version": "2.17.4", "dependencies": { "@emotion/react": "11.11.1", "@emotion/styled": "11.11.0", diff --git a/webapp/package.json b/webapp/package.json index 98bfa9a549..96afbec9c6 100644 --- a/webapp/package.json +++ b/webapp/package.json @@ -1,6 +1,6 @@ { "name": "antares-web", - "version": "2.17.3", + "version": "2.17.4", "private": true, "type": "module", "scripts": { diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index e1e1d053d3..0c7a27dc7e 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -83,6 +83,7 @@ "global.error.failedtoretrievejobs": "Failed to retrieve job information", "global.error.failedtoretrievelogs": "Failed to retrieve job logs", "global.error.failedtoretrievedownloads": "Failed to retrieve downloads list", + "global.error.fileNotFound": "File not found", "global.error.create": "Creation failed", "global.error.delete": "Deletion failed", "global.area.add": "Add an area", @@ -112,7 +113,7 @@ "button.save": "Save", "button.explore": "Explore", "button.newCommand": "New command", - "common.nocontent": "No content", + "common.noContent": "No content", "common.underConstruction": "Page under construction", "common.dialog.import.dropzoneText": "Drop a file here, or click to select a file", "common.dialog.import.importSuccess": "File imported successfully", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index f14b30405a..ed667e5be3 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -83,6 +83,7 @@ "global.error.failedtoretrievejobs": "Échec de la récupération des tâches", "global.error.failedtoretrievelogs": "Échec de la récupération des logs", "global.error.failedtoretrievedownloads": "Échec de la récupération des exports", + "global.error.fileNotFound": "Fichier introuvable", "global.error.create": "La création a échoué", "global.error.delete": "La suppression a échoué", "global.area.add": "Ajouter une zone", @@ -112,7 +113,7 @@ "button.save": "Sauvegarder", "button.explore": "Explorer", "button.newCommand": "Nouvelle commande", - "common.nocontent": "Pas de contenu", + "common.noContent": "Pas de contenu", "common.underConstruction": "Page en construction", "common.dialog.import.dropzoneText": "Glisser un fichier ici, ou cliquer pour en sélectionner un", "common.dialog.import.importSuccess": "Fichier importé avec succès", diff --git a/webapp/src/components/App/Singlestudy/Commands/Edition/index.tsx b/webapp/src/components/App/Singlestudy/Commands/Edition/index.tsx index 790772e9fa..15934c4ad2 100644 --- a/webapp/src/components/App/Singlestudy/Commands/Edition/index.tsx +++ b/webapp/src/components/App/Singlestudy/Commands/Edition/index.tsx @@ -51,7 +51,7 @@ import { } from "../../../../../services/webSockets"; import ConfirmationDialog from "../../../../common/dialogs/ConfirmationDialog"; import CheckBoxFE from "../../../../common/fieldEditors/CheckBoxFE"; -import SimpleContent from "../../../../common/page/SimpleContent"; +import EmptyView from "../../../../common/page/SimpleContent"; const logError = debug("antares:variantedition:error"); @@ -506,7 +506,7 @@ function EditionView(props: Props) { loaded && ( - + ) diff --git a/webapp/src/components/App/Singlestudy/HomeView/InformationView/LauncherHistory/JobStepper.tsx b/webapp/src/components/App/Singlestudy/HomeView/InformationView/LauncherHistory/JobStepper.tsx index 9adaffe1a9..85f2edbf64 100644 --- a/webapp/src/components/App/Singlestudy/HomeView/InformationView/LauncherHistory/JobStepper.tsx +++ b/webapp/src/components/App/Singlestudy/HomeView/InformationView/LauncherHistory/JobStepper.tsx @@ -4,6 +4,7 @@ import StepLabel from "@mui/material/StepLabel"; import FiberManualRecordIcon from "@mui/icons-material/FiberManualRecord"; import BlockIcon from "@mui/icons-material/Block"; import ContentCopyIcon from "@mui/icons-material/ContentCopy"; +import EqualizerIcon from "@mui/icons-material/Equalizer"; import { StepIconProps, Tooltip, Typography } from "@mui/material"; import moment from "moment"; import { useState } from "react"; @@ -29,6 +30,8 @@ import { } from "./style"; import ConfirmationDialog from "../../../../../common/dialogs/ConfirmationDialog"; import LinearProgressWithLabel from "../../../../../common/LinearProgressWithLabel"; +import DigestDialog from "../../../../../common/dialogs/DigestDialog"; +import type { EmptyObject } from "../../../../../../utils/tsUtils"; export const ColorStatus = { running: "warning.main", @@ -37,6 +40,15 @@ export const ColorStatus = { failed: "error.main", }; +const iconStyle = { + m: 0.5, + height: 22, + cursor: "pointer", + "&:hover": { + color: "action.hover", + }, +}; + function QontoStepIcon(props: { className: string | undefined; status: JobStatus; @@ -55,6 +67,13 @@ function QontoStepIcon(props: { ); } +type DialogState = + | { + type: "killJob" | "digest"; + job: LaunchJob; + } + | EmptyObject; + interface Props { jobs: LaunchJob[]; jobsProgress: LaunchJobsProgress; @@ -65,29 +84,32 @@ export default function VerticalLinearStepper(props: Props) { const [t] = useTranslation(); const { enqueueSnackbar } = useSnackbar(); const enqueueErrorSnackbar = useEnqueueErrorSnackbar(); - const [openConfirmationDialog, setOpenConfirmationDialog] = - useState(false); - const [jobIdKill, setJobIdKill] = useState(); + const [dialogState, setDialogState] = useState({}); - const openConfirmModal = (jobId: string) => { - setOpenConfirmationDialog(true); - setJobIdKill(jobId); - }; + //////////////////////////////////////////////////////////////// + // Utils + //////////////////////////////////////////////////////////////// - const killTask = (jobId: string) => { - (async () => { - try { - await killStudy(jobId); - } catch (e) { - enqueueErrorSnackbar(t("study.failtokilltask"), e as AxiosError); - } - setOpenConfirmationDialog(false); - })(); + const closeDialog = () => setDialogState({}); + + //////////////////////////////////////////////////////////////// + // Actions + //////////////////////////////////////////////////////////////// + + const killTask = async (jobId: LaunchJob["id"]) => { + closeDialog(); + + try { + await killStudy(jobId); + } catch (e) { + enqueueErrorSnackbar(t("study.failtokilltask"), e as AxiosError); + } }; - const copyId = (jobId: string): void => { + const copyId = (jobId: LaunchJob["id"]) => { try { navigator.clipboard.writeText(jobId); + enqueueSnackbar(t("study.success.jobIdCopy"), { variant: "success", }); @@ -96,6 +118,10 @@ export default function VerticalLinearStepper(props: Props) { } }; + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + return ( copyId(job.id)} - sx={{ - m: 0.5, - height: "22px", - cursor: "pointer", - "&:hover": { - color: "action.hover", - }, - }} + sx={iconStyle} /> + {job.status === "success" && ( + + setDialogState({ type: "digest", job })} + sx={iconStyle} + /> + + )} {job.status === "running" && ( openConfirmModal(job.id)} + onClick={() => + setDialogState({ type: "killJob", job }) + } sx={{ - m: 0.5, - height: "22px", - cursor: "pointer", + ...iconStyle, color: "error.light", "&:hover": { color: "error.dark" }, }} @@ -178,16 +205,24 @@ export default function VerticalLinearStepper(props: Props) { ))} - {openConfirmationDialog && ( + {dialogState.type === "killJob" && ( setOpenConfirmationDialog(false)} - onConfirm={() => killTask(jobIdKill as string)} - alert="warning" open + alert="warning" + onConfirm={() => killTask(dialogState.job.id)} + onCancel={closeDialog} > {t("study.question.killJob")} )} + {dialogState.type === "digest" && ( + + )} ); } diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ScenarioBuilderDialog/Table.tsx b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ScenarioBuilderDialog/Table.tsx index 6ad04bdcfd..4abd82eb73 100644 --- a/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ScenarioBuilderDialog/Table.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Configuration/General/dialogs/ScenarioBuilderDialog/Table.tsx @@ -7,7 +7,7 @@ import { updateScenarioBuilderConfig, } from "./utils"; import { SubmitHandlerPlus } from "../../../../../../../common/Form/types"; -import SimpleContent from "../../../../../../../common/page/SimpleContent"; +import EmptyView from "../../../../../../../common/page/SimpleContent"; import useEnqueueErrorSnackbar from "../../../../../../../../hooks/useEnqueueErrorSnackbar"; import { toError } from "../../../../../../../../utils/fnUtils"; import { useOutletContext } from "react-router"; @@ -53,7 +53,7 @@ function Table({ config, type, areaId }: Props) { //////////////////////////////////////////////////////////////// if (Object.keys(config).length === 0) { - return ; + return ; } return ( diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/index.tsx index 8868838934..be71efeb87 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Areas/index.tsx @@ -1,6 +1,6 @@ import { useOutletContext } from "react-router"; import { StudyMetadata } from "../../../../../../common/types"; -import SimpleContent from "../../../../../common/page/SimpleContent"; +import EmptyView from "../../../../../common/page/SimpleContent"; import AreaPropsView from "./AreaPropsView"; import AreasTab from "./AreasTab"; import useStudySynthesis from "../../../../../../redux/hooks/useStudySynthesis"; @@ -54,7 +54,7 @@ function Areas() { renewablesClustering={renewablesClustering !== "aggregated"} /> ) : ( - + ) } /> diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintTerm/OptionsList.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintTerm/OptionsList.tsx index 1747cf09c3..3927abadbd 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintTerm/OptionsList.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/BindingConstView/ConstraintTerm/OptionsList.tsx @@ -3,6 +3,7 @@ import { useTranslation } from "react-i18next"; import { AllClustersAndLinks } from "../../../../../../../../common/types"; import SelectSingle from "../../../../../../../common/SelectSingle"; import { ConstraintTerm, generateTermId, isTermExist } from "../utils"; +import { Box } from "@mui/material"; interface Option { id: string; @@ -108,7 +109,7 @@ export default function OptionsList({ //////////////////////////////////////////////////////////////// return ( - <> + handleAreaChange(value as string)} sx={{ - maxWidth: 200, - mr: 1, + minWidth: 300, }} /> - + ); } diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/index.tsx index 259a0351d9..f4348d786c 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/BindingConstraints/index.tsx @@ -2,7 +2,7 @@ import { Box } from "@mui/material"; import { useOutletContext } from "react-router"; import { StudyMetadata } from "../../../../../../common/types"; import SimpleLoader from "../../../../../common/loaders/SimpleLoader"; -import SimpleContent from "../../../../../common/page/SimpleContent"; +import EmptyView from "../../../../../common/page/SimpleContent"; import BindingConstPropsView from "./BindingConstPropsView"; import { getBindingConst, @@ -75,12 +75,12 @@ function BindingConstraints() { {data.length > 0 && currentConstraintId ? ( ) : ( - + )} )} - ifRejected={(error) => } + ifRejected={(error) => } /> ); } diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Links/index.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Links/index.tsx index e86d4b5a16..fdc2a487c9 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Links/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Links/index.tsx @@ -1,6 +1,6 @@ import { useOutletContext } from "react-router"; import { StudyMetadata } from "../../../../../../common/types"; -import SimpleContent from "../../../../../common/page/SimpleContent"; +import EmptyView from "../../../../../common/page/SimpleContent"; import LinkPropsView from "./LinkPropsView"; import useStudySynthesis from "../../../../../../redux/hooks/useStudySynthesis"; import { getCurrentLink } from "../../../../../../redux/selectors"; @@ -44,7 +44,7 @@ function Links() { currentLink ? ( ) : ( - + ) } /> diff --git a/webapp/src/components/App/Singlestudy/explore/Modelization/Map/CreateAreaDialog.tsx b/webapp/src/components/App/Singlestudy/explore/Modelization/Map/CreateAreaDialog.tsx index 8218217056..0233fd313c 100644 --- a/webapp/src/components/App/Singlestudy/explore/Modelization/Map/CreateAreaDialog.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Modelization/Map/CreateAreaDialog.tsx @@ -6,6 +6,7 @@ import { SubmitHandlerPlus } from "../../../../../common/Form/types"; import useAppSelector from "../../../../../../redux/hooks/useAppSelector"; import { getAreas } from "../../../../../../redux/selectors"; import { validateString } from "../../../../../../utils/validationUtils"; +import Fieldset from "../../../../../common/Fieldset"; interface Props { studyId: string; @@ -41,6 +42,7 @@ function CreateAreaDialog(props: Props) { {({ control }) => ( - - validateString(v, { existingValues: existingAreas }), - }} - /> +
+ + validateString(v, { existingValues: existingAreas }), + }} + /> +
)}
); diff --git a/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx b/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx index 8f4d0ce2f6..daa10520c2 100644 --- a/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Results/ResultDetails/index.tsx @@ -1,6 +1,5 @@ import { Box, - Paper, Skeleton, ToggleButton, ToggleButtonGroup, @@ -138,11 +137,11 @@ function ResultDetails() { }, ); - const { data: synthesis } = usePromise( + const synthesisRes = usePromise( () => { if (outputId && selectedItem && isSynthesis) { const path = `output/${outputId}/economy/mc-all/grid/${selectedItem.id}`; - return getStudyData(study.id, path); + return getStudyData(study.id, path); } return Promise.resolve(null); }, @@ -266,14 +265,22 @@ function ResultDetails() { overflow: "auto", }} > - - {synthesis} - + ( + + )} + ifResolved={(matrix) => + matrix && ( + + ) + } + /> ) : ( (); const { t } = useTranslation(); const navigate = useNavigate(); const enqueueErrorSnackbar = useEnqueueErrorSnackbar(); - const [outputToDelete, setOutputToDelete] = useState(); + const [dialogState, setDialogState] = useState({}); const { data: studyJobs, isLoading: studyJobsLoading } = usePromiseWithSnackbarError(() => getStudyJobs(study.id), { @@ -150,12 +171,7 @@ function Results() { { handler().catch((e) => { enqueueErrorSnackbar( @@ -170,6 +186,12 @@ function Results() { ); }; + //////////////////////////////////////////////////////////////// + // Utils + //////////////////////////////////////////////////////////////// + + const closeDialog = () => setDialogState({}); + //////////////////////////////////////////////////////////////// // Event Handlers //////////////////////////////////////////////////////////////// @@ -180,12 +202,10 @@ function Results() { }); }; - const handleDeleteOutput = async () => { - if (outputToDelete) { - await deleteOutput(study.id, outputToDelete); - setOutputToDelete(undefined); - reloadOutputs(); - } + const handleDeleteOutput = async (outputName: string) => { + closeDialog(); + await deleteOutput(study.id, outputName); + reloadOutputs(); }; //////////////////////////////////////////////////////////////// @@ -350,12 +370,7 @@ function Results() { { if (row.job) { downloadJobOutput(row.job.id); @@ -365,6 +380,7 @@ function Results() { )} + {row.job && ( )} + {row.job?.status === "success" && ( + + { + setDialogState({ + type: "digest", + data: row.job as LaunchJob, + }); + }} + sx={iconStyle} + /> + + )} { - setOutputToDelete(row.name); + setDialogState({ + type: "confirmDelete", + data: row.name, + }); }} /> @@ -420,13 +450,23 @@ function Results() { - setOutputToDelete(undefined)} - > - {t("results.question.deleteOutput", { outputname: outputToDelete })} - + {dialogState.type === "confirmDelete" && ( + handleDeleteOutput(dialogState.data)} + onCancel={closeDialog} + > + {t("results.question.deleteOutput", { outputname: dialogState.data })} + + )} + {dialogState.type === "digest" && ( + + )} ); } diff --git a/webapp/src/components/App/Singlestudy/explore/Xpansion/Candidates/index.tsx b/webapp/src/components/App/Singlestudy/explore/Xpansion/Candidates/index.tsx index 022c40d9e8..42e7307f1d 100644 --- a/webapp/src/components/App/Singlestudy/explore/Xpansion/Candidates/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Xpansion/Candidates/index.tsx @@ -28,7 +28,7 @@ import CreateCandidateDialog from "./CreateCandidateDialog"; import CandidateForm from "./CandidateForm"; import usePromiseWithSnackbarError from "../../../../../../hooks/usePromiseWithSnackbarError"; import DataViewerDialog from "../../../../../common/dialogs/DataViewerDialog"; -import SimpleContent from "../../../../../common/page/SimpleContent"; +import EmptyView from "../../../../../common/page/SimpleContent"; import SplitView from "../../../../../common/SplitView"; function Candidates() { @@ -210,7 +210,7 @@ function Candidates() { }; if (isRejected) { - return ; + return ; } return ( diff --git a/webapp/src/components/common/MatrixInput/index.tsx b/webapp/src/components/common/MatrixInput/index.tsx index b1be7dcdd7..c48e67d508 100644 --- a/webapp/src/components/common/MatrixInput/index.tsx +++ b/webapp/src/components/common/MatrixInput/index.tsx @@ -15,7 +15,7 @@ import usePromiseWithSnackbarError from "../../../hooks/usePromiseWithSnackbarEr import { editMatrix, getStudyMatrixIndex } from "../../../services/api/matrix"; import { Root, Content, Header } from "./style"; import SimpleLoader from "../loaders/SimpleLoader"; -import SimpleContent from "../page/SimpleContent"; +import EmptyView from "../page/SimpleContent"; import EditableMatrix from "../EditableMatrix"; import ImportDialog from "../dialogs/ImportDialog"; import MatrixAssignDialog from "./MatrixAssignDialog"; @@ -183,7 +183,7 @@ function MatrixInput({ isPercentDisplayEnabled={enablePercentDisplay} /> ) : ( - !isLoading && + !isLoading && )} {openImportDialog && ( diff --git a/webapp/src/components/common/TableMode.tsx b/webapp/src/components/common/TableMode.tsx index e5770d79d2..7d40d5511f 100644 --- a/webapp/src/components/common/TableMode.tsx +++ b/webapp/src/components/common/TableMode.tsx @@ -14,7 +14,7 @@ import { SubmitHandlerPlus } from "./Form/types"; import TableForm from "./TableForm"; import UsePromiseCond from "./utils/UsePromiseCond"; import GridOffIcon from "@mui/icons-material/GridOff"; -import SimpleContent from "./page/SimpleContent"; +import EmptyView from "./page/SimpleContent"; export interface TableModeProps { studyId: StudyMetadata["id"]; @@ -75,7 +75,7 @@ function TableMode(props: TableModeProps) { autoSubmit={false} /> ) : ( - } title="study.results.noData" /> + ) } /> diff --git a/webapp/src/components/common/dialogs/ConfirmationDialog.tsx b/webapp/src/components/common/dialogs/ConfirmationDialog.tsx index bcdb009238..769fe36a62 100644 --- a/webapp/src/components/common/dialogs/ConfirmationDialog.tsx +++ b/webapp/src/components/common/dialogs/ConfirmationDialog.tsx @@ -30,9 +30,25 @@ function ConfirmationDialog(props: ConfirmationDialogProps) { const { t } = useTranslation(); + //////////////////////////////////////////////////////////////// + // Event Handlers + //////////////////////////////////////////////////////////////// + + const handleClose = ( + ...args: Parameters> + ) => { + onCancel(); + onClose?.(...args); + }; + + //////////////////////////////////////////////////////////////// + // JSX + //////////////////////////////////////////////////////////////// + return ( diff --git a/webapp/src/components/common/dialogs/DigestDialog.tsx b/webapp/src/components/common/dialogs/DigestDialog.tsx new file mode 100644 index 0000000000..c735986144 --- /dev/null +++ b/webapp/src/components/common/dialogs/DigestDialog.tsx @@ -0,0 +1,73 @@ +import { Skeleton } from "@mui/material"; +import OkDialog, { OkDialogProps } from "./OkDialog"; +import EditableMatrix from "../EditableMatrix"; +import UsePromiseCond from "../utils/UsePromiseCond"; +import type { LaunchJob } from "../../../common/types"; +import { getStudyData } from "../../../services/api/study"; +import usePromise from "../../../hooks/usePromise"; +import { useTranslation } from "react-i18next"; +import { AxiosError } from "axios"; +import EmptyView from "../page/SimpleContent"; +import SearchOffIcon from "@mui/icons-material/SearchOff"; + +// TODO: redesign DataViewerDialog to use path, then remove this component + +export interface DigestDialogProps + extends Pick { + studyId: LaunchJob["studyId"]; + outputId: LaunchJob["outputId"]; +} + +function DigestDialog({ + studyId, + outputId, + ...dialogProps +}: DigestDialogProps) { + const { t } = useTranslation(); + + const synthesisRes = usePromise( + () => + getStudyData(studyId, `output/${outputId}/economy/mc-all/grid/digest`), + { + deps: [studyId, outputId], + }, + ); + + return ( + + } + ifRejected={(error) => { + if (error instanceof AxiosError && error.response?.status === 404) { + return ( + + ); + } + return ; + }} + ifResolved={(matrix) => + matrix && ( + + ) + } + /> + + ); +} + +export default DigestDialog; diff --git a/webapp/src/components/common/page/SimpleContent.tsx b/webapp/src/components/common/page/SimpleContent.tsx index 4ddc78e65c..6be0cd51b0 100644 --- a/webapp/src/components/common/page/SimpleContent.tsx +++ b/webapp/src/components/common/page/SimpleContent.tsx @@ -1,51 +1,31 @@ -import { ReactNode } from "react"; import { useTranslation } from "react-i18next"; import LiveHelpRoundedIcon from "@mui/icons-material/LiveHelpRounded"; -import { Box, styled } from "@mui/material"; +import { Box } from "@mui/material"; +import { SvgIconComponent } from "@mui/icons-material"; -const Root = styled(Box)(({ theme }) => ({ - flex: 1, - width: "100%", - height: "100%", - display: "flex", - flexFlow: "column nowrap", - justifyContent: "center", - alignItems: "center", - overflowX: "hidden", - overflowY: "auto", - position: "relative", - "&& div": { - paddingTop: theme.spacing(1), - paddingBottom: theme.spacing(1), - }, -})); - -interface Props { +export interface EmptyViewProps { title?: string; - icon?: ReactNode; - callToAction?: ReactNode; + icon?: SvgIconComponent; } -function SimpleContent(props: Props) { - const { title = "common.nocontent", icon, callToAction } = props; - const [t] = useTranslation(); +function EmptyView(props: EmptyViewProps) { + const { title, icon: Icon = LiveHelpRoundedIcon } = props; + const { t } = useTranslation(); return ( - -
{icon}
-
{t(title)}
-
{callToAction}
-
+ + {Icon && } +
{title || t("common.noContent")}
+
); } -SimpleContent.defaultProps = { - icon: ( - - ), - callToAction:
, -}; - -export default SimpleContent; +export default EmptyView; diff --git a/webapp/src/components/common/utils/UsePromiseCond.tsx b/webapp/src/components/common/utils/UsePromiseCond.tsx index c103341375..2ee30ef8da 100644 --- a/webapp/src/components/common/utils/UsePromiseCond.tsx +++ b/webapp/src/components/common/utils/UsePromiseCond.tsx @@ -1,7 +1,7 @@ import * as R from "ramda"; import { PromiseStatus, UsePromiseResponse } from "../../../hooks/usePromise"; import SimpleLoader from "../loaders/SimpleLoader"; -import SimpleContent from "../page/SimpleContent"; +import EmptyView from "../page/SimpleContent"; export type Response = Pick< UsePromiseResponse, @@ -48,7 +48,7 @@ function UsePromiseCond(props: UsePromiseCondProps) { const { response, ifPending = () => , - ifRejected = (error) => , + ifRejected = (error) => , ifResolved, keepLastResolvedOnReload = false, } = props; diff --git a/webapp/src/utils/tsUtils.ts b/webapp/src/utils/tsUtils.ts index 7acf6465a2..f368e9b8a4 100644 --- a/webapp/src/utils/tsUtils.ts +++ b/webapp/src/utils/tsUtils.ts @@ -6,6 +6,12 @@ import { O } from "ts-toolbelt"; // eslint-disable-next-line @typescript-eslint/no-explicit-any export type PromiseAny = Promise; +/** + * Allow to define an empty object. + * Don't use `{}` as a type. `{}` actually means "any non-nullish value". + */ +export type EmptyObject = Record; + /** * Make all properties in T optional, except for those specified by K. */