From beaa4c1677798de2498854f5a99cfaa6081906b2 Mon Sep 17 00:00:00 2001 From: mabw-rte <41002227+mabw-rte@users.noreply.github.com> Date: Thu, 25 Jul 2024 16:40:37 +0200 Subject: [PATCH] feat(bc): avoid unnecessary creation of RHS matrices for binding constraints (#2077) We now move or copy matrices to their right place when the BC operator is changed. --- antarest/core/exceptions.py | 5 + .../business/areas/properties_management.py | 38 +- .../business/areas/st_storage_management.py | 2 +- .../business/binding_constraint_management.py | 89 ++-- .../study/business/table_mode_management.py | 2 +- .../study/business/xpansion_management.py | 4 +- antarest/study/service.py | 2 +- .../rawstudy/model/filesystem/folder_node.py | 9 +- .../rawstudy/model/filesystem/lazy_node.py | 64 ++- .../filesystem/matrix/input_series_matrix.py | 2 +- .../filesystem/matrix/output_series_matrix.py | 3 +- .../study/storage/study_download_utils.py | 3 +- .../variantstudy/business/command_reverter.py | 6 +- .../command/create_binding_constraint.py | 27 +- .../variantstudy/model/command/remove_area.py | 10 +- .../model/command/replace_matrix.py | 12 +- .../command/update_binding_constraint.py | 112 +++- tests/integration/prepare_proxy.py | 343 ++++++++++++ .../test_download_matrices.py | 9 +- .../test_binding_constraints.py | 489 ++++++++++++------ .../storage/business/test_xpansion_manager.py | 2 +- .../matrix/test_input_series_matrix.py | 2 +- .../matrix/test_output_series_matrix.py | 3 +- .../repository/filesystem/test_folder_node.py | 2 +- .../repository/filesystem/test_lazy_node.py | 112 ++++ .../test_manage_binding_constraints.py | 99 +++- .../model/command/test_update_config.py | 2 +- 27 files changed, 1166 insertions(+), 287 deletions(-) create mode 100644 tests/integration/prepare_proxy.py diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py index 157ad3c97e..6fce8f0213 100644 --- a/antarest/core/exceptions.py +++ b/antarest/core/exceptions.py @@ -603,3 +603,8 @@ def __init__(self) -> None: HTTPStatus.BAD_REQUEST, "You cannot scan the default internal workspace", ) + + +class ChildNotFoundError(HTTPException): + def __init__(self, message: str) -> None: + super().__init__(HTTPStatus.NOT_FOUND, message) diff --git a/antarest/study/business/areas/properties_management.py b/antarest/study/business/areas/properties_management.py index 2014c554dc..0bccdad784 100644 --- a/antarest/study/business/areas/properties_management.py +++ b/antarest/study/business/areas/properties_management.py @@ -1,13 +1,13 @@ import re +import typing as t from builtins import sorted -from typing import Any, Dict, Iterable, List, Optional, Set, cast from pydantic import root_validator +from antarest.core.exceptions import ChildNotFoundError from antarest.study.business.utils import FieldInfo, FormFieldsBaseModel, execute_or_add_commands from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.config.area import AdequacyPatchMode -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig @@ -21,35 +21,35 @@ DEFAULT_FILTER_VALUE = FILTER_OPTIONS -def sort_filter_options(options: Iterable[str]) -> List[str]: +def sort_filter_options(options: t.Iterable[str]) -> t.List[str]: return sorted( options, key=lambda x: FILTER_OPTIONS.index(x), ) -def encode_filter(value: str) -> Set[str]: +def encode_filter(value: str) -> t.Set[str]: stripped = value.strip() return set(re.split(r"\s*,\s*", stripped) if stripped else []) -def decode_filter(encoded_value: Set[str], current_filter: Optional[str] = None) -> str: +def decode_filter(encoded_value: t.Set[str], current_filter: t.Optional[str] = None) -> str: return ", ".join(sort_filter_options(encoded_value)) class PropertiesFormFields(FormFieldsBaseModel): - energy_cost_unsupplied: Optional[float] - energy_cost_spilled: Optional[float] - non_dispatch_power: Optional[bool] - dispatch_hydro_power: Optional[bool] - other_dispatch_power: Optional[bool] - filter_synthesis: Optional[Set[str]] - filter_by_year: Optional[Set[str]] + energy_cost_unsupplied: t.Optional[float] + energy_cost_spilled: t.Optional[float] + non_dispatch_power: t.Optional[bool] + dispatch_hydro_power: t.Optional[bool] + other_dispatch_power: t.Optional[bool] + filter_synthesis: t.Optional[t.Set[str]] + filter_by_year: t.Optional[t.Set[str]] # version 830 - adequacy_patch_mode: Optional[AdequacyPatchMode] + adequacy_patch_mode: t.Optional[AdequacyPatchMode] @root_validator - def validation(cls, values: Dict[str, Any]) -> Dict[str, Any]: + def validation(cls, values: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]: filters = { "filter_synthesis": values.get("filter_synthesis"), "filter_by_year": values.get("filter_by_year"), @@ -63,7 +63,7 @@ def validation(cls, values: Dict[str, Any]) -> Dict[str, Any]: return values -FIELDS_INFO: Dict[str, FieldInfo] = { +FIELDS_INFO: t.Dict[str, FieldInfo] = { "energy_cost_unsupplied": { "path": THERMAL_PATH.format(field="unserverdenergycost"), "default_value": 0.0, @@ -116,9 +116,9 @@ def get_field_values( file_study = self.storage_service.get_storage(study).get_raw(study) study_ver = file_study.config.version - def get_value(field_info: FieldInfo) -> Any: - start_ver = cast(int, field_info.get("start_version", 0)) - end_ver = cast(int, field_info.get("end_version", study_ver)) + def get_value(field_info: FieldInfo) -> t.Any: + start_ver = t.cast(int, field_info.get("start_version", 0)) + end_ver = t.cast(int, field_info.get("end_version", study_ver)) is_in_version = start_ver <= study_ver <= end_ver if not is_in_version: return None @@ -139,7 +139,7 @@ def set_field_values( area_id: str, field_values: PropertiesFormFields, ) -> None: - commands: List[UpdateConfig] = [] + commands: t.List[UpdateConfig] = [] file_study = self.storage_service.get_storage(study).get_raw(study) context = self.storage_service.variant_study_service.command_factory.command_context diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index 373f8c3ea4..776f57a039 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -11,6 +11,7 @@ from antarest.core.exceptions import ( AreaNotFound, + ChildNotFoundError, DuplicateSTStorage, STStorageConfigNotFound, STStorageMatrixNotFound, @@ -29,7 +30,6 @@ create_st_storage_config, ) from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.create_st_storage import CreateSTStorage from antarest.study.storage.variantstudy.model.command.remove_st_storage import RemoveSTStorage diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index 7f42bb7f59..50220da54a 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -1,5 +1,4 @@ import collections -import itertools import json import logging import typing as t @@ -46,11 +45,11 @@ from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( DEFAULT_GROUP, EXPECTED_MATRIX_SHAPES, - TERM_MATRICES, BindingConstraintMatrices, BindingConstraintPropertiesBase, CreateBindingConstraint, OptionalProperties, + TermMatrices, ) from antarest.study.storage.variantstudy.model.command.remove_binding_constraint import RemoveBindingConstraint from antarest.study.storage.variantstudy.model.command.update_binding_constraint import UpdateBindingConstraint @@ -59,6 +58,14 @@ logger = logging.getLogger(__name__) +OPERATOR_CONFLICT_MAP = { + BindingConstraintOperator.EQUAL: [TermMatrices.LESS.value, TermMatrices.GREATER.value], + BindingConstraintOperator.GREATER: [TermMatrices.LESS.value, TermMatrices.EQUAL.value], + BindingConstraintOperator.LESS: [TermMatrices.EQUAL.value, TermMatrices.GREATER.value], + BindingConstraintOperator.BOTH: [TermMatrices.EQUAL.value], +} + + class LinkTerm(BaseModel): """ DTO for a constraint term on a link between two areas. @@ -246,7 +253,7 @@ class ConstraintCreation(ConstraintInput): @root_validator(pre=True) def check_matrices_dimensions(cls, values: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]: - for _key in ["time_step"] + TERM_MATRICES: + for _key in ["time_step"] + [m.value for m in TermMatrices]: _camel = to_camel_case(_key) values[_key] = values.pop(_camel, values.get(_key)) @@ -264,7 +271,7 @@ def check_matrices_dimensions(cls, values: t.Dict[str, t.Any]) -> t.Dict[str, t. # Collect the matrix shapes matrix_shapes = {} - for _field_name in ["values"] + TERM_MATRICES: + for _field_name in ["values"] + [m.value for m in TermMatrices]: if _matrix := values.get(_field_name): _array = np.array(_matrix) # We only store the shape if the array is not empty @@ -330,32 +337,35 @@ def _get_references_by_widths( The height of the matrices may vary depending on the time step, but the width should be consistent within a group of binding constraints. """ - if int(file_study.config.version) < 870: - matrix_id_fmts = {"{bc_id}"} - else: - matrix_id_fmts = {"{bc_id}_eq", "{bc_id}_lt", "{bc_id}_gt"} + operator_matrix_file_map = { + BindingConstraintOperator.EQUAL: ["{bc_id}_eq"], + BindingConstraintOperator.GREATER: ["{bc_id}_gt"], + BindingConstraintOperator.LESS: ["{bc_id}_lt"], + BindingConstraintOperator.BOTH: ["{bc_id}_lt", "{bc_id}_gt"], + } references_by_width: t.Dict[int, t.List[t.Tuple[str, str]]] = {} - _total = len(bcs) * len(matrix_id_fmts) - for _index, (bc, fmt) in enumerate(itertools.product(bcs, matrix_id_fmts), 1): - bc_id = bc.id - matrix_id = fmt.format(bc_id=bc.id) - logger.info(f"⏲ Validating BC '{bc_id}': {matrix_id=} [{_index}/{_total}]") - obj = file_study.tree.get(url=["input", "bindingconstraints", matrix_id]) - matrix = np.array(obj["data"], dtype=float) - # We ignore empty matrices as there are default matrices for the simulator. - if not matrix.size: - continue - - matrix_height = matrix.shape[0] - expected_height = EXPECTED_MATRIX_SHAPES[bc.time_step][0] - if matrix_height != expected_height: - raise WrongMatrixHeightError( - f"The binding constraint '{bc.name}' should have {expected_height} rows, currently: {matrix_height}" - ) - matrix_width = matrix.shape[1] - if matrix_width > 1: - references_by_width.setdefault(matrix_width, []).append((bc_id, matrix_id)) + _total = len(bcs) + for _index, bc in enumerate(bcs): + matrices_name = operator_matrix_file_map[bc.operator] if file_study.config.version >= 870 else ["{bc_id}"] + for matrix_name in matrices_name: + matrix_id = matrix_name.format(bc_id=bc.id) + logger.info(f"⏲ Validating BC '{bc.id}': {matrix_id=} [{_index+1}/{_total}]") + obj = file_study.tree.get(url=["input", "bindingconstraints", matrix_id]) + matrix = np.array(obj["data"], dtype=float) + # We ignore empty matrices as there are default matrices for the simulator. + if not matrix.size: + continue + + matrix_height = matrix.shape[0] + expected_height = EXPECTED_MATRIX_SHAPES[bc.time_step][0] + if matrix_height != expected_height: + raise WrongMatrixHeightError( + f"The binding constraint '{bc.name}' should have {expected_height} rows, currently: {matrix_height}" + ) + matrix_width = matrix.shape[1] + if matrix_width > 1: + references_by_width.setdefault(matrix_width, []).append((bc.id, matrix_id)) return references_by_width @@ -684,7 +694,8 @@ def create_binding_constraint( if bc_id in {bc.id for bc in self.get_binding_constraints(study)}: raise DuplicateConstraintName(f"A binding constraint with the same name already exists: {bc_id}.") - check_attributes_coherence(data, version) + # TODO: the default operator should be fixed somewhere so this condition can be consistent + check_attributes_coherence(data, version, data.operator or BindingConstraintOperator.EQUAL) new_constraint = {"name": data.name, **json.loads(data.json(exclude={"terms", "name"}, exclude_none=True))} args = { @@ -718,8 +729,9 @@ def update_binding_constraint( ) -> ConstraintOutput: file_study = self.storage_service.get_storage(study).get_raw(study) existing_constraint = self.get_binding_constraint(study, binding_constraint_id) + study_version = int(study.version) - check_attributes_coherence(data, study_version) + check_attributes_coherence(data, study_version, data.operator or existing_constraint.operator) upd_constraint = { "id": binding_constraint_id, @@ -740,7 +752,7 @@ def update_binding_constraint( # Validates the matrices. Needed when the study is a variant because we only append the command to the list if isinstance(study, VariantStudy): - updated_matrices = [term for term in TERM_MATRICES if getattr(data, term)] + updated_matrices = [term for term in [m.value for m in TermMatrices] if getattr(data, term)] time_step = data.time_step or existing_constraint.time_step command.validates_and_fills_matrices( time_step=time_step, specific_matrices=updated_matrices, version=study_version, create=False @@ -912,13 +924,17 @@ def _replace_matrices_according_to_frequency_and_version( BindingConstraintFrequency.DAILY.value: default_bc_weekly_daily_87, BindingConstraintFrequency.WEEKLY.value: default_bc_weekly_daily_87, }[data.time_step].tolist() - for term in TERM_MATRICES: + for term in [m.value for m in TermMatrices]: if term not in args: args[term] = matrix return args -def check_attributes_coherence(data: t.Union[ConstraintCreation, ConstraintInput], study_version: int) -> None: +def check_attributes_coherence( + data: t.Union[ConstraintCreation, ConstraintInput], + study_version: int, + operator: BindingConstraintOperator, +) -> None: if study_version < 870: if data.group: raise InvalidFieldForVersionError( @@ -928,3 +944,10 @@ def check_attributes_coherence(data: t.Union[ConstraintCreation, ConstraintInput raise InvalidFieldForVersionError("You cannot fill a 'matrix_term' as these values refer to v8.7+ studies") elif data.values: raise InvalidFieldForVersionError("You cannot fill 'values' as it refers to the matrix before v8.7") + conflicting_matrices = [ + getattr(data, matrix) for matrix in OPERATOR_CONFLICT_MAP[operator] if getattr(data, matrix) + ] + if conflicting_matrices: + raise InvalidFieldForVersionError( + f"You cannot fill matrices '{OPERATOR_CONFLICT_MAP[operator]}' while using the operator '{operator}'" + ) diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 65687af9c9..bc31683139 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -4,6 +4,7 @@ import numpy as np import pandas as pd +from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON from antarest.study.business.area_management import AreaManager, AreaOutput from antarest.study.business.areas.renewable_management import RenewableClusterInput, RenewableManager @@ -13,7 +14,6 @@ from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.link_management import LinkManager, LinkOutput from antarest.study.model import RawStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError _TableIndex = str # row name _TableColumn = str # column name diff --git a/antarest/study/business/xpansion_management.py b/antarest/study/business/xpansion_management.py index 22c612af9a..66d25860dd 100644 --- a/antarest/study/business/xpansion_management.py +++ b/antarest/study/business/xpansion_management.py @@ -9,14 +9,14 @@ from fastapi import HTTPException, UploadFile from pydantic import BaseModel, Extra, Field, ValidationError, root_validator, validator -from antarest.core.exceptions import BadZipBinary +from antarest.core.exceptions import BadZipBinary, ChildNotFoundError from antarest.core.model import JSON from antarest.study.business.all_optional_meta import AllOptionalMetaclass from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.model import Study from antarest.study.storage.rawstudy.model.filesystem.bucket_node import BucketNode from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError, FolderNode +from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode from antarest.study.storage.rawstudy.model.filesystem.root.user.expansion.expansion import Expansion from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.utils import fix_study_root diff --git a/antarest/study/service.py b/antarest/study/service.py index 494e7d2f2f..5a2bfcda2a 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -21,6 +21,7 @@ from antarest.core.config import Config from antarest.core.exceptions import ( BadEditInstructionException, + ChildNotFoundError, CommandApplicationError, IncorrectPathError, NotAManagedStudyException, @@ -106,7 +107,6 @@ ) from antarest.study.storage.matrix_profile import adjust_matrix_columns_index from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfigDTO -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode from antarest.study.storage.rawstudy.model.filesystem.inode import INode from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix diff --git a/antarest/study/storage/rawstudy/model/filesystem/folder_node.py b/antarest/study/storage/rawstudy/model/filesystem/folder_node.py index 3ea51c098d..ba1d859ce3 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/folder_node.py +++ b/antarest/study/storage/rawstudy/model/filesystem/folder_node.py @@ -1,10 +1,8 @@ import shutil import typing as t from abc import ABC, abstractmethod -from http import HTTPStatus - -from fastapi import HTTPException +from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON, SUB_JSON from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer @@ -15,11 +13,6 @@ class FilterError(Exception): pass -class ChildNotFoundError(HTTPException): - def __init__(self, message: str) -> None: - super().__init__(HTTPStatus.NOT_FOUND, message) - - class FolderNode(INode[JSON, SUB_JSON, JSON], ABC): # noinspection SpellCheckingInspection """ diff --git a/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py b/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py index 98f0c74a40..7e47affbc9 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py +++ b/antarest/study/storage/rawstudy/model/filesystem/lazy_node.py @@ -1,10 +1,12 @@ +import shutil +import typing as t from abc import ABC, abstractmethod from dataclasses import dataclass from datetime import datetime, timedelta from pathlib import Path -from typing import Any, Dict, Generic, List, Optional, Tuple, Union, cast from zipfile import ZipFile +from antarest.core.exceptions import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.inode import G, INode, S, V @@ -12,16 +14,16 @@ @dataclass class SimpleCache: - value: Any + value: t.Any expiration_date: datetime -class LazyNode(INode, ABC, Generic[G, S, V]): # type: ignore +class LazyNode(INode, ABC, t.Generic[G, S, V]): # type: ignore """ Abstract left with implemented a lazy loading for its daughter implementation. """ - ZIP_FILELIST_CACHE: Dict[str, SimpleCache] = {} + ZIP_FILELIST_CACHE: t.Dict[str, SimpleCache] = {} def __init__( self, @@ -33,7 +35,7 @@ def __init__( def _get_real_file_path( self, - ) -> Tuple[Path, Any]: + ) -> t.Tuple[Path, t.Any]: tmp_dir = None if self.config.zip_path: path, tmp_dir = self._extract_file_to_tmp_dir() @@ -58,12 +60,12 @@ def file_exists(self) -> bool: def _get( self, - url: Optional[List[str]] = None, + url: t.Optional[t.List[str]] = None, depth: int = -1, expanded: bool = False, formatted: bool = True, get_node: bool = False, - ) -> Union[Union[str, G], INode[G, S, V]]: + ) -> t.Union[t.Union[str, G], INode[G, S, V]]: self._assert_url_end(url) if get_node: @@ -74,7 +76,7 @@ def _get( if expanded: return link else: - return cast(G, self.context.resolver.resolve(link, formatted)) + return t.cast(G, self.context.resolver.resolve(link, formatted)) if expanded: return self.get_lazy_content() @@ -83,35 +85,51 @@ def _get( def get( self, - url: Optional[List[str]] = None, + url: t.Optional[t.List[str]] = None, depth: int = -1, expanded: bool = False, formatted: bool = True, - ) -> Union[str, G]: + ) -> t.Union[str, G]: output = self._get(url, depth, expanded, formatted, get_node=False) assert not isinstance(output, INode) return output def get_node( self, - url: Optional[List[str]] = None, + url: t.Optional[t.List[str]] = None, ) -> INode[G, S, V]: output = self._get(url, get_node=True) assert isinstance(output, INode) return output - def delete(self, url: Optional[List[str]] = None) -> None: + def delete(self, url: t.Optional[t.List[str]] = None) -> None: self._assert_url_end(url) if self.get_link_path().exists(): self.get_link_path().unlink() elif self.config.path.exists(): self.config.path.unlink() + def _infer_path(self) -> Path: + if self.get_link_path().exists(): + return self.get_link_path() + elif self.config.path.exists(): + return self.config.path + else: + raise ChildNotFoundError( + f"Neither link file {self.get_link_path} nor matrix file {self.config.path} exists" + ) + + def _infer_target_path(self, is_link: bool) -> Path: + if is_link: + return self.get_link_path() + else: + return self.config.path + def get_link_path(self) -> Path: path = self.config.path.parent / (self.config.path.name + ".link") return path - def save(self, data: Union[str, bytes, S], url: Optional[List[str]] = None) -> None: + def save(self, data: t.Union[str, bytes, S], url: t.Optional[t.List[str]] = None) -> None: self._assert_not_in_zipped_file() self._assert_url_end(url) @@ -121,14 +139,24 @@ def save(self, data: Union[str, bytes, S], url: Optional[List[str]] = None) -> N self.config.path.unlink() return None - self.dump(cast(S, data), url) + self.dump(t.cast(S, data), url) if self.get_link_path().exists(): self.get_link_path().unlink() return None + def rename_file(self, target: "LazyNode[t.Any, t.Any, t.Any]") -> None: + target_path = target._infer_target_path(self.get_link_path().exists()) + target_path.unlink(missing_ok=True) + self._infer_path().rename(target_path) + + def copy_file(self, target: "LazyNode[t.Any, t.Any, t.Any]") -> None: + target_path = target._infer_target_path(self.get_link_path().exists()) + target_path.unlink(missing_ok=True) + shutil.copy(self._infer_path(), target_path) + def get_lazy_content( self, - url: Optional[List[str]] = None, + url: t.Optional[t.List[str]] = None, depth: int = -1, expanded: bool = False, ) -> str: @@ -137,7 +165,7 @@ def get_lazy_content( @abstractmethod def load( self, - url: Optional[List[str]] = None, + url: t.Optional[t.List[str]] = None, depth: int = -1, expanded: bool = False, formatted: bool = True, @@ -148,7 +176,7 @@ def load( Args: url: data path to retrieve depth: after url is reached, node expand tree until matches depth asked - expanded: context parameter to determine if current node become from a expansion + expanded: context parameter to determine if current node comes from an expansion formatted: ask for raw file transformation Returns: @@ -157,7 +185,7 @@ def load( raise NotImplementedError() @abstractmethod - def dump(self, data: S, url: Optional[List[str]] = None) -> None: + def dump(self, data: S, url: t.Optional[t.List[str]] = None) -> None: """ Store data on tree. diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py index 4cda0b4027..a68b0f521e 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py +++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/input_series_matrix.py @@ -7,11 +7,11 @@ from numpy import typing as npt from pandas.errors import EmptyDataError +from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON from antarest.core.utils.utils import StopWatch from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency, MatrixNode logger = logging.getLogger(__name__) diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py index ff1384391a..70317c6255 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py +++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/output_series_matrix.py @@ -5,11 +5,10 @@ import pandas as pd from pandas import DataFrame -from antarest.core.exceptions import MustNotModifyOutputException +from antarest.core.exceptions import ChildNotFoundError, MustNotModifyOutputException from antarest.core.model import JSON from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode from antarest.study.storage.rawstudy.model.filesystem.matrix.date_serializer import ( FactoryDateSerializer, diff --git a/antarest/study/storage/study_download_utils.py b/antarest/study/storage/study_download_utils.py index 6ca846ca30..4c08b0c14b 100644 --- a/antarest/study/storage/study_download_utils.py +++ b/antarest/study/storage/study_download_utils.py @@ -13,6 +13,7 @@ from fastapi import HTTPException +from antarest.core.exceptions import ChildNotFoundError from antarest.study.model import ( ExportFormat, MatrixAggregationResult, @@ -24,7 +25,7 @@ ) from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, EnrModelling, FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError, FilterError, FolderNode +from antarest.study.storage.rawstudy.model.filesystem.folder_node import FilterError, FolderNode from antarest.study.storage.rawstudy.model.filesystem.inode import INode from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import OutputSeriesMatrix diff --git a/antarest/study/storage/variantstudy/business/command_reverter.py b/antarest/study/storage/variantstudy/business/command_reverter.py index 089589576f..c60cfad601 100644 --- a/antarest/study/storage/variantstudy/business/command_reverter.py +++ b/antarest/study/storage/variantstudy/business/command_reverter.py @@ -2,14 +2,14 @@ import typing as t from pathlib import Path +from antarest.core.exceptions import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.variantstudy.model.command.common import CommandName from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( - TERM_MATRICES, CreateBindingConstraint, + TermMatrices, ) from antarest.study.storage.variantstudy.model.command.create_cluster import CreateCluster from antarest.study.storage.variantstudy.model.command.create_district import CreateDistrict @@ -115,7 +115,7 @@ def _revert_update_binding_constraint( } matrix_service = command.command_context.matrix_service - for matrix_name in ["values"] + TERM_MATRICES: + for matrix_name in ["values"] + [m.value for m in TermMatrices]: matrix = getattr(command, matrix_name) if matrix is not None: args[matrix_name] = matrix_service.get_matrix_id(matrix) diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index ee9162241d..0e34b5f867 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -1,6 +1,7 @@ import json import typing as t from abc import ABCMeta +from enum import Enum import numpy as np from pydantic import BaseModel, Extra, Field, root_validator, validator @@ -23,7 +24,6 @@ from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand from antarest.study.storage.variantstudy.model.model import CommandDTO -TERM_MATRICES = ["less_term_matrix", "equal_term_matrix", "greater_term_matrix"] DEFAULT_GROUP = "default" MatrixType = t.List[t.List[MatrixData]] @@ -35,6 +35,12 @@ } +class TermMatrices(Enum): + LESS = "less_term_matrix" + GREATER = "greater_term_matrix" + EQUAL = "equal_term_matrix" + + def check_matrix_values(time_step: BindingConstraintFrequency, values: MatrixType, version: int) -> None: """ Check the binding constraint's matrix values for the specified time step. @@ -216,7 +222,7 @@ def to_dto(self) -> CommandDTO: args["group"] = self.group matrix_service = self.command_context.matrix_service - for matrix_name in TERM_MATRICES + ["values"]: + for matrix_name in [m.value for m in TermMatrices] + ["values"]: matrix_attr = getattr(self, matrix_name, None) if matrix_attr is not None: args[matrix_name] = matrix_service.get_matrix_id(matrix_attr) @@ -356,11 +362,16 @@ def apply_binding_constraint( if version < 870: study_data.tree.save(self.values, ["input", "bindingconstraints", bd_id]) - for matrix_term, matrix_name, matrix_alias in zip( - [self.less_term_matrix, self.equal_term_matrix, self.greater_term_matrix], - TERM_MATRICES, - ["lt", "eq", "gt"], - ): + operator_matrices_map = { + BindingConstraintOperator.EQUAL: [(self.equal_term_matrix, "eq")], + BindingConstraintOperator.GREATER: [(self.greater_term_matrix, "gt")], + BindingConstraintOperator.LESS: [(self.less_term_matrix, "lt")], + BindingConstraintOperator.BOTH: [(self.less_term_matrix, "lt"), (self.greater_term_matrix, "gt")], + } + + current_operator = self.operator or BindingConstraintOperator(binding_constraints[new_key]["operator"]) + + for matrix_term, matrix_alias in operator_matrices_map[current_operator]: if matrix_term: if not isinstance(matrix_term, str): # pragma: no cover raise TypeError(repr(matrix_term)) @@ -442,7 +453,7 @@ def _create_diff(self, other: "ICommand") -> t.List["ICommand"]: args[prop] = other_command[prop] matrix_service = self.command_context.matrix_service - for matrix_name in ["values"] + TERM_MATRICES: + for matrix_name in ["values"] + [m.value for m in TermMatrices]: self_matrix = getattr(self, matrix_name) # matrix, ID or `None` other_matrix = getattr(other, matrix_name) # matrix, ID or `None` self_matrix_id = None if self_matrix is None else matrix_service.get_matrix_id(self_matrix) diff --git a/antarest/study/storage/variantstudy/model/command/remove_area.py b/antarest/study/storage/variantstudy/model/command/remove_area.py index 5a90bfa33b..f39c8aac9c 100644 --- a/antarest/study/storage/variantstudy/model/command/remove_area.py +++ b/antarest/study/storage/variantstudy/model/command/remove_area.py @@ -1,11 +1,11 @@ import contextlib import logging -from typing import Any, Dict, List, Tuple +import typing as t +from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.variantstudy.business.utils_binding_constraint import ( remove_area_cluster_from_binding_constraints, ) @@ -44,7 +44,7 @@ def _remove_area_from_sets_in_config(self, study_data_config: FileStudyTreeConfi set_.areas.remove(self.id) study_data_config.sets[id_] = set_ - def _apply_config(self, study_data_config: FileStudyTreeConfig) -> Tuple[CommandOutput, Dict[str, Any]]: + def _apply_config(self, study_data_config: FileStudyTreeConfig) -> t.Tuple[CommandOutput, t.Dict[str, t.Any]]: del study_data_config.areas[self.id] self._remove_area_from_links_in_config(study_data_config) @@ -280,8 +280,8 @@ def match_signature(self) -> str: def match(self, other: ICommand, equal: bool = False) -> bool: return isinstance(other, RemoveArea) and self.id == other.id - def _create_diff(self, other: "ICommand") -> List["ICommand"]: + def _create_diff(self, other: "ICommand") -> t.List["ICommand"]: return [] - def get_inner_matrices(self) -> List[str]: + def get_inner_matrices(self) -> t.List[str]: return [] diff --git a/antarest/study/storage/variantstudy/model/command/replace_matrix.py b/antarest/study/storage/variantstudy/model/command/replace_matrix.py index 4b66584c39..6a51ca86b1 100644 --- a/antarest/study/storage/variantstudy/model/command/replace_matrix.py +++ b/antarest/study/storage/variantstudy/model/command/replace_matrix.py @@ -1,13 +1,13 @@ -from typing import Any, Dict, List, Tuple, Union +import typing as t from pydantic import validator +from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON from antarest.core.utils.utils import assert_this from antarest.matrixstore.model import MatrixData from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixNode from antarest.study.storage.variantstudy.business.utils import AliasDecoder, strip_matrix_protocol, validate_matrix from antarest.study.storage.variantstudy.model.command.common import CommandName, CommandOutput @@ -30,11 +30,11 @@ class ReplaceMatrix(ICommand): # ================== target: str - matrix: Union[List[List[MatrixData]], str] + matrix: t.Union[t.List[t.List[MatrixData]], str] _validate_matrix = validator("matrix", each_item=True, always=True, allow_reuse=True)(validate_matrix) - def _apply_config(self, study_data: FileStudyTreeConfig) -> Tuple[CommandOutput, Dict[str, Any]]: + def _apply_config(self, study_data: FileStudyTreeConfig) -> t.Tuple[CommandOutput, t.Dict[str, t.Any]]: return ( CommandOutput( status=True, @@ -93,9 +93,9 @@ def match(self, other: ICommand, equal: bool = False) -> bool: return self.target == other.target and self.matrix == other.matrix return self.target == other.target - def _create_diff(self, other: "ICommand") -> List["ICommand"]: + def _create_diff(self, other: "ICommand") -> t.List["ICommand"]: return [other] - def get_inner_matrices(self) -> List[str]: + def get_inner_matrices(self) -> t.List[str]: assert_this(isinstance(self.matrix, str)) return [strip_matrix_protocol(self.matrix)] diff --git a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py index 3f84ecd334..6c1d9bafae 100644 --- a/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/update_binding_constraint.py @@ -1,22 +1,108 @@ import json -from typing import Any, Dict, List, Mapping, Optional, Tuple +import typing as t from antarest.core.model import JSON from antarest.matrixstore.model import MatrixData -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy +from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode from antarest.study.storage.variantstudy.model.command.common import CommandName, CommandOutput from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( DEFAULT_GROUP, - TERM_MATRICES, AbstractBindingConstraintCommand, + TermMatrices, create_binding_constraint_config, ) from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand from antarest.study.storage.variantstudy.model.model import CommandDTO -MatrixType = List[List[MatrixData]] +MatrixType = t.List[t.List[MatrixData]] + +ALIAS_OPERATOR_MAP = { + BindingConstraintOperator.EQUAL: "eq", + BindingConstraintOperator.LESS: "lt", + BindingConstraintOperator.GREATER: "gt", +} + + +def _update_matrices_names( + file_study: FileStudy, + binding_constraint_id: str, + existing_operator: BindingConstraintOperator, + new_operator: BindingConstraintOperator, +) -> None: + """ + Update the matrix file name according to the new operator. + + Args: + file_study: the file study + binding_constraint_id: the binding constraint ID + existing_operator: the existing operator + new_operator: the new operator + + Raises: + NotImplementedError: if the case is not handled + """ + + parent_folder_node = file_study.tree.get_node(["input", "bindingconstraints"]) + matrix_lt = parent_folder_node.get_node([f"{binding_constraint_id}_lt"]) + assert isinstance(matrix_lt, LazyNode), f"Node type not handled yet: LazyNode expected, got {type(matrix_lt)}" + matrix_eq = parent_folder_node.get_node([f"{binding_constraint_id}_eq"]) + assert isinstance(matrix_eq, LazyNode), f"Node type not handled yet: LazyNode expected, got {type(matrix_eq)}" + matrix_gt = parent_folder_node.get_node([f"{binding_constraint_id}_gt"]) + assert isinstance(matrix_gt, LazyNode), f"Node type not handled yet: LazyNode expected, got {type(matrix_gt)}" + + # Due to legacy matrices generation, we need to check if the new matrix file already exists + # and if it does, we need to first remove it before renaming the existing matrix file + + handled_operators = [ + BindingConstraintOperator.EQUAL, + BindingConstraintOperator.LESS, + BindingConstraintOperator.GREATER, + BindingConstraintOperator.BOTH, + ] + + if (existing_operator not in handled_operators) or (new_operator not in handled_operators): + raise NotImplementedError( + f"Case not handled yet: existing_operator={existing_operator}, new_operator={new_operator}" + ) + elif existing_operator == new_operator: + return # nothing to do + elif existing_operator != BindingConstraintOperator.BOTH and new_operator != BindingConstraintOperator.BOTH: + matrix_node = parent_folder_node.get_node([f"{binding_constraint_id}_{ALIAS_OPERATOR_MAP[existing_operator]}"]) + assert isinstance( + matrix_node, LazyNode + ), f"Node type not handled yet: LazyNode expected, got {type(matrix_node)}" + new_matrix_node = parent_folder_node.get_node([f"{binding_constraint_id}_{ALIAS_OPERATOR_MAP[new_operator]}"]) + assert isinstance( + new_matrix_node, LazyNode + ), f"Node type not handled yet: LazyNode expected, got {type(new_matrix_node)}" + matrix_node.rename_file(new_matrix_node) + elif new_operator == BindingConstraintOperator.BOTH: + if existing_operator == BindingConstraintOperator.EQUAL: + matrix_eq.rename_file(matrix_lt) + matrix_gt.delete() + # copy the matrix lt to gt + matrix_lt.copy_file(matrix_gt) + elif existing_operator == BindingConstraintOperator.LESS: + matrix_gt.delete() + matrix_lt.copy_file(matrix_gt) + else: + matrix_lt.delete() + matrix_gt.copy_file(matrix_lt) + else: + if new_operator == BindingConstraintOperator.EQUAL: + # we may retrieve the mean of the two matrices, but here we just copy the lt matrix + matrix_lt.rename_file(matrix_eq) + matrix_gt.delete() + elif new_operator == BindingConstraintOperator.LESS: + matrix_gt.delete() + else: + matrix_lt.delete() class UpdateBindingConstraint(AbstractBindingConstraintCommand): @@ -36,10 +122,10 @@ class UpdateBindingConstraint(AbstractBindingConstraintCommand): # Properties of the `UPDATE_BINDING_CONSTRAINT` command: id: str - def _apply_config(self, study_data: FileStudyTreeConfig) -> Tuple[CommandOutput, Dict[str, Any]]: + def _apply_config(self, study_data: FileStudyTreeConfig) -> t.Tuple[CommandOutput, t.Dict[str, t.Any]]: return CommandOutput(status=True), {} - def _find_binding_config(self, binding_constraints: Mapping[str, JSON]) -> Optional[Tuple[str, JSON]]: + def _find_binding_config(self, binding_constraints: t.Mapping[str, JSON]) -> t.Optional[t.Tuple[str, JSON]]: """ Find the binding constraint with the given ID in the list of binding constraints, and returns its index and configuration, or `None` if it does not exist. @@ -65,7 +151,15 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: index, actual_cfg = index_and_cfg - updated_matrices = [term for term in TERM_MATRICES if hasattr(self, term) and getattr(self, term)] + # rename matrices if the operator has changed for version >= 870 + if self.operator and study_data.config.version >= 870: + existing_operator = BindingConstraintOperator(actual_cfg.get("operator")) + new_operator = BindingConstraintOperator(self.operator) + _update_matrices_names(study_data, self.id, existing_operator, new_operator) + + updated_matrices = [ + term for term in [m.value for m in TermMatrices] if hasattr(self, term) and getattr(self, term) + ] study_version = study_data.config.version time_step = self.time_step or BindingConstraintFrequency(actual_cfg.get("type")) self.validates_and_fills_matrices( @@ -90,7 +184,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: return super().apply_binding_constraint(study_data, binding_constraints, index, self.id, old_groups=old_groups) def to_dto(self) -> CommandDTO: - matrices = ["values"] + TERM_MATRICES + matrices = ["values"] + [m.value for m in TermMatrices] matrix_service = self.command_context.matrix_service excluded_fields = frozenset(ICommand.__fields__) @@ -104,7 +198,7 @@ def to_dto(self) -> CommandDTO: def match_signature(self) -> str: return str(self.command_name.value + MATCH_SIGNATURE_SEPARATOR + self.id) - def _create_diff(self, other: "ICommand") -> List["ICommand"]: + def _create_diff(self, other: "ICommand") -> t.List["ICommand"]: return [other] def match(self, other: "ICommand", equal: bool = False) -> bool: diff --git a/tests/integration/prepare_proxy.py b/tests/integration/prepare_proxy.py new file mode 100644 index 0000000000..0556df8a87 --- /dev/null +++ b/tests/integration/prepare_proxy.py @@ -0,0 +1,343 @@ +import io +import typing as t + +import pandas as pd +from starlette.testclient import TestClient + +from antarest.core.tasks.model import TaskStatus +from tests.integration.utils import wait_task_completion + + +class PreparerProxy: + """ + Proxy to prepare the test environment for integration tests + + Attributes: + client: The client to be used for requests. + user_access_token: The access token of the user. + headers: The headers to be used for requests. + """ + + def __init__(self, client: TestClient, user_access_token: str): + """ + Initialize the proxy. + + Args: + client: The client to be used for requests. + user_access_token: The access token of the user. + """ + self.client = client + self.user_access_token = user_access_token + self.headers = {"Authorization": f"Bearer {user_access_token}"} + + def create_study(self, name: str, *, version: int = 870) -> str: + """ + Create a new study in the managed workspace. + + Args: + name: The name of the study. + version: The version of the study. Defaults to 870. + + Returns: + The ID of the created study. + """ + res = self.client.post( + "/v1/studies", + params={"name": name, "version": version}, # type: ignore + headers=self.headers, + ) + assert res.status_code == 201, res.json() + return t.cast(str, res.json()) + + def copy_study_and_upgrade(self, ref_study_id: str, target_version: int) -> str: + """ + Copy a study in the managed workspace and upgrade it to a specific version. + + Args: + ref_study_id: The ID of the study to copy. + target_version: The version to upgrade the copied study to. Defaults to 820. + + Returns: + The ID of the copied and upgraded study. + """ + # Prepare a managed study to test specific matrices for version 8.2 + res = self.client.post( + f"/v1/studies/{ref_study_id}/copy", + params={"dest": "copied-820", "use_task": False}, # type: ignore + headers=self.headers, + ) + res.raise_for_status() + study_id = t.cast(str, res.json()) + + res = self.client.put( + f"/v1/studies/{study_id}/upgrade", + params={"target_version": target_version}, + headers=self.headers, + ) + res.raise_for_status() + task_id = res.json() + assert task_id + + task = wait_task_completion(self.client, self.user_access_token, task_id, timeout=20) + assert task.status == TaskStatus.COMPLETED + return study_id + + def upload_matrix(self, study_id: str, matrix_path: str, df: pd.DataFrame) -> None: + """ + Upload a matrix to the study. + + Args: + study_id: The ID of the study to upload the matrix to. + matrix_path: The path to the matrix in the study. + df: The data to upload. + """ + tsv = io.BytesIO() + df.to_csv(tsv, sep="\t", index=False, header=False) + tsv.seek(0) + # noinspection SpellCheckingInspection + res = self.client.put( + f"/v1/studies/{study_id}/raw", + params={"path": matrix_path, "create_missing": True}, # type: ignore + headers=self.headers, + files={"file": tsv, "create_missing": "true"}, # type: ignore + ) + res.raise_for_status() + + def download_matrix(self, study_id: str, matrix_path: str) -> pd.DataFrame: + """ + Download a matrix from the study. + + Args: + study_id: The ID of the study to download the matrix from. + matrix_path: The path to the matrix in the study. + + Returns: + pd.DataFrame: The downloaded data. + """ + res = self.client.get( + f"/v1/studies/{study_id}/raw", + params={"depth": 1, "formatted": True, "path": matrix_path}, # type: ignore + headers=self.headers, + ) + res.raise_for_status() + return pd.DataFrame(**res.json()) + + def create_variant(self, parent_id: str, *, name: str) -> str: + """ + Create a variant of a study. + + Args: + parent_id: The ID of the parent study. + name: The name of the variant. + + Returns: + str: The ID of the created variant. + """ + res = self.client.post( + f"/v1/studies/{parent_id}/variants", + headers=self.headers, + params={"name": name}, + ) + res.raise_for_status() + variant_id = t.cast(str, res.json()) + return variant_id + + def generate_snapshot(self, variant_id: str, denormalize: bool = False, from_scratch: bool = True) -> None: + """ + Generate a snapshot for a variant. + + Args: + variant_id: The ID of the variant study. + denormalize: Whether to denormalize the snapshot (replace the matrix links by the actual data). + from_scratch: Whether to generate the snapshot from scratch (recompute the data). + """ + # Generate a snapshot for the variant + res = self.client.put( + f"/v1/studies/{variant_id}/generate", + headers=self.headers, + params={"denormalize": denormalize, "from_scratch": from_scratch}, + ) + res.raise_for_status() + task_id = res.json() + assert task_id + + task = wait_task_completion(self.client, self.user_access_token, task_id, timeout=20) + assert task.status == TaskStatus.COMPLETED + + def create_area(self, study_id: str, *, name: str, country: str = "FR") -> t.Dict[str, t.Any]: + """ + Create an area in a study. + + Args: + study_id: The ID of the parent study. + name: Name of the area. + country: Country of the area. + + Returns: + The area properties. + """ + res = self.client.post( + f"/v1/studies/{study_id}/areas", + headers=self.headers, + json={"name": name, "type": "AREA", "metadata": {"country": country}}, + ) + res.raise_for_status() + properties = t.cast(t.Dict[str, t.Any], res.json()) + return properties + + def update_general_data(self, study_id: str, **data: t.Any) -> None: + """ + Update the general data of a study. + + Args: + study_id: The ID of the study. + **data: The data to update. + """ + res = self.client.put( + f"/v1/studies/{study_id}/config/general/form", + json=data, + headers=self.headers, + ) + res.raise_for_status() + + def create_link(self, study_id: str, area1_id: str, area2_id: str) -> t.Dict[str, t.Any]: + """ + Create a link between two areas in a study. + + Args: + study_id: The ID of the study. + area1_id: The ID of the first area. + area2_id: The ID of the second area. + + Returns: + The link properties. + """ + # Create a link between the two areas + res = self.client.post( + f"/v1/studies/{study_id}/links", + headers=self.headers, + json={"area1": area1_id, "area2": area2_id}, + ) + assert res.status_code == 200, res.json() + properties = t.cast(t.Dict[str, t.Any], res.json()) + properties["id"] = f"{area1_id}%{area2_id}" + return properties + + def create_thermal(self, study_id: str, area1_id: str, *, name: str, **kwargs: t.Any) -> t.Dict[str, t.Any]: + """ + Create a thermal cluster in an area. + + Args: + study_id: The ID of the study. + area1_id: The ID of the area. + name: The name of the cluster. + **kwargs: Additional cluster data. + + Returns: + The cluster properties. + """ + res = self.client.post( + f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", + headers=self.headers, + json={"name": name, **kwargs}, + ) + res.raise_for_status() + properties = t.cast(t.Dict[str, t.Any], res.json()) + return properties + + def get_thermals(self, study_id: str, area1_id: str) -> t.List[t.Dict[str, t.Any]]: + """ + Get the thermal clusters of an area in a study. + + Args: + study_id: The ID of the study. + area1_id: The ID of the area. + + Returns: + The list of cluster properties. + """ + res = self.client.get(f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", headers=self.headers) + res.raise_for_status() + clusters_list = t.cast(t.List[t.Dict[str, t.Any]], res.json()) + return clusters_list + + def create_renewable(self, study_id: str, area1_id: str, *, name: str, **kwargs: t.Any) -> str: + """ + Create a renewable cluster in an area. + + Args: + study_id: The ID of the study. + area1_id: The ID of the area. + name: The name of the cluster. + **kwargs: Additional cluster data. + """ + res = self.client.post( + f"/v1/studies/{study_id}/areas/{area1_id}/clusters/renewable", + headers=self.headers, + json={"name": name, **kwargs}, + ) + res.raise_for_status() + cluster_id = t.cast(str, res.json()["id"]) + return cluster_id + + def get_renewables(self, study_id: str, area1_id: str) -> t.List[t.Dict[str, t.Any]]: + """ + Get the renewable clusters of an area in a study. + + Args: + study_id: The ID of the study. + area1_id: The ID of the area. + + Returns: + The list of cluster properties. + """ + res = self.client.get(f"/v1/studies/{study_id}/areas/{area1_id}/clusters/renewable", headers=self.headers) + res.raise_for_status() + clusters_list = t.cast(t.List[t.Dict[str, t.Any]], res.json()) + return clusters_list + + def create_binding_constraint(self, study_id: str, *, name: str, **kwargs: t.Any) -> t.Dict[str, t.Any]: + """ + Create a binding constraint in a study. + + Args: + study_id: The ID of the study. + name: The name of the constraint. + **kwargs: Additional constraint data. + + Returns: + The binding constraint properties. + """ + res = self.client.post( + f"/v1/studies/{study_id}/bindingconstraints", + headers=self.headers, + json={"name": name, **kwargs}, + ) + res.raise_for_status() + properties = t.cast(t.Dict[str, t.Any], res.json()) + return properties + + def get_binding_constraints(self, study_id: str) -> t.List[t.Dict[str, t.Any]]: + """ + Get the binding constraints of a study. + + Args: + study_id: The ID of the study. + + Returns: + The list of constraint properties. + """ + res = self.client.get(f"/v1/studies/{study_id}/bindingconstraints", headers=self.headers) + res.raise_for_status() + binding_constraints_list = t.cast(t.List[t.Dict[str, t.Any]], res.json()) + return binding_constraints_list + + def drop_all_commands(self, variant_id: str) -> None: + """ + Drop all commands of a variant. + + Args: + variant_id: The ID of the variant. + """ + res = self.client.delete(f"/v1/studies/{variant_id}/commands", headers=self.headers) + res.raise_for_status() diff --git a/tests/integration/raw_studies_blueprint/test_download_matrices.py b/tests/integration/raw_studies_blueprint/test_download_matrices.py index 0f4e764089..1eaab62cd6 100644 --- a/tests/integration/raw_studies_blueprint/test_download_matrices.py +++ b/tests/integration/raw_studies_blueprint/test_download_matrices.py @@ -101,6 +101,9 @@ def update_general_data(self, internal_study_id: str, **data: t.Any): res.raise_for_status() +from tests.integration.prepare_proxy import PreparerProxy + + @pytest.mark.integration_test class TestDownloadMatrices: """ @@ -116,13 +119,13 @@ def test_download_matrices(self, client: TestClient, user_access_token: str, int preparer = PreparerProxy(client, user_access_token) - study_820_id = preparer.copy_upgrade_study(internal_study_id, target_version=820) + study_820_id = preparer.copy_study_and_upgrade(internal_study_id, target_version=820) # Create Variant variant_id = preparer.create_variant(study_820_id, name="New Variant") # Create a new area to implicitly create normalized matrices - area_id = preparer.create_area(variant_id, name="Mayenne", country="France") + area_id = preparer.create_area(variant_id, name="Mayenne", country="France")["id"] # Change study start_date preparer.update_general_data(variant_id, firstMonth="July") @@ -131,7 +134,7 @@ def test_download_matrices(self, client: TestClient, user_access_token: str, int preparer.generate_snapshot(variant_id) # Prepare a managed study to test specific matrices for version 8.6 - study_860_id = preparer.copy_upgrade_study(internal_study_id, target_version=860) + study_860_id = preparer.copy_study_and_upgrade(internal_study_id, target_version=860) # Import a Min Gen. matrix: shape=(8760, 3), with random integers between 0 and 1000 generator = np.random.default_rng(11) diff --git a/tests/integration/study_data_blueprint/test_binding_constraints.py b/tests/integration/study_data_blueprint/test_binding_constraints.py index 7e3c613e16..aba3d397ac 100644 --- a/tests/integration/study_data_blueprint/test_binding_constraints.py +++ b/tests/integration/study_data_blueprint/test_binding_constraints.py @@ -1,12 +1,23 @@ -import io import re import numpy as np import pandas as pd import pytest +from requests.exceptions import HTTPError from starlette.testclient import TestClient from antarest.study.business.binding_constraint_management import ClusterTerm, ConstraintTerm, LinkTerm +from tests.integration.prepare_proxy import PreparerProxy + +MATRIX_SIZES = {"hourly": 8784, "daily": 366, "weekly": 366} + + +REQUIRED_MATRICES = { + "less": {"lt"}, + "equal": {"eq"}, + "greater": {"gt"}, + "both": {"lt", "gt"}, +} class TestLinkTerm: @@ -68,14 +79,6 @@ def test_constraint_id__other(self) -> None: assert term.generate_id() == "foo" -def _upload_matrix(client: TestClient, study_id: str, matrix_path: str, df: pd.DataFrame) -> None: - tsv = io.BytesIO() - df.to_csv(tsv, sep="\t", index=False, header=False) - tsv.seek(0) - res = client.put(f"/v1/studies/{study_id}/raw", params={"path": matrix_path}, files={"file": tsv}) - res.raise_for_status() - - @pytest.mark.unit_test class TestBindingConstraints: """ @@ -90,49 +93,22 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st # STUDY PREPARATION # ============================= - # Create a Study - res = client.post("/v1/studies", params={"name": "foo", "version": "860"}) - assert res.status_code == 201, res.json() - study_id = res.json() - - # Create Areas - res = client.post(f"/v1/studies/{study_id}/areas", json={"name": "Area 1", "type": "AREA"}) - assert res.status_code == 200, res.json() - area1_id = res.json()["id"] - assert area1_id == "area 1" - - res = client.post(f"/v1/studies/{study_id}/areas", json={"name": "Area 2", "type": "AREA"}) - assert res.status_code == 200, res.json() - area2_id = res.json()["id"] - assert area2_id == "area 2" - - # Create a link between the two areas - res = client.post(f"/v1/studies/{study_id}/links", json={"area1": area1_id, "area2": area2_id}) - assert res.status_code == 200, res.json() + preparer = PreparerProxy(client, user_access_token) + study_id = preparer.create_study("foo", version=860) + area1_id = preparer.create_area(study_id, name="Area 1")["id"] + area2_id = preparer.create_area(study_id, name="Area 2")["id"] + link_id = preparer.create_link(study_id, area1_id=area1_id, area2_id=area2_id)["id"] # Create a cluster in area1 - res = client.post( - f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", - json={"name": "Cluster 1", "group": "Nuclear"}, - ) - assert res.status_code == 200, res.json() - cluster_id = res.json()["id"] - assert cluster_id == "Cluster 1" - - # Get clusters list to check created cluster in area1 - res = client.get(f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal") - clusters_list = res.json() - assert res.status_code == 200, res.json() + cluster_id = preparer.create_thermal(study_id, area1_id, name="Cluster 1", group="Nuclear")["id"] + clusters_list = preparer.get_thermals(study_id, area1_id) assert len(clusters_list) == 1 assert clusters_list[0]["id"] == cluster_id assert clusters_list[0]["name"] == "Cluster 1" assert clusters_list[0]["group"] == "Nuclear" if study_type == "variant": - # Create Variant - res = client.post(f"/v1/studies/{study_id}/variants", params={"name": "Variant 1"}) - assert res.status_code in {200, 201}, res.json() - study_id = res.json() + study_id = preparer.create_variant(study_id, name="Variant 1") # ============================= # CREATION @@ -176,23 +152,18 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st assert res.status_code in {200, 201}, res.json() # Creates a binding constraint with the new API - res = client.post( - f"/v1/studies/{study_id}/bindingconstraints", - json={ - "name": "binding_constraint_3", - "enabled": True, - "timeStep": "hourly", - "operator": "less", - "terms": [], - "comments": "New API", - }, + preparer.create_binding_constraint( + study_id, + name="binding_constraint_3", + enabled=True, + timeStep="hourly", + operator="less", + terms=[], + comments="New API", ) - assert res.status_code in {200, 201}, res.json() # Get Binding Constraint list - res = client.get(f"/v1/studies/{study_id}/bindingconstraints") - binding_constraints_list = res.json() - assert res.status_code == 200, res.json() + binding_constraints_list = preparer.get_binding_constraints(study_id) assert len(binding_constraints_list) == 3 # Group section should not exist as the study version is prior to 8.7 assert "group" not in binding_constraints_list[0] @@ -275,7 +246,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st expected = [ { "data": {"area1": area1_id, "area2": area2_id}, - "id": f"{area1_id}%{area2_id}", + "id": link_id, "offset": 2, "weight": 1.0, }, @@ -303,7 +274,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st expected = [ { "data": {"area1": area1_id, "area2": area2_id}, - "id": f"{area1_id}%{area2_id}", + "id": link_id, "offset": 2, "weight": 1.0, }, @@ -341,7 +312,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st } # Remove Constraint term - res = client.delete(f"/v1/studies/{study_id}/bindingconstraints/{bc_id}/term/{area1_id}%{area2_id}") + res = client.delete(f"/v1/studies/{study_id}/bindingconstraints/{bc_id}/term/{link_id}") assert res.status_code == 200, res.json() # Check updated terms, the deleted term should no longer exist. @@ -550,39 +521,17 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # STUDY PREPARATION # ============================= - res = client.post("/v1/studies", params={"name": "foo"}) - assert res.status_code == 201, res.json() - study_id = res.json() + preparer = PreparerProxy(client, user_access_token) + study_id = preparer.create_study("foo", version=870) if study_type == "variant": - # Create Variant - res = client.post(f"/v1/studies/{study_id}/variants", params={"name": "Variant 1"}) - assert res.status_code in {200, 201} - study_id = res.json() + study_id = preparer.create_variant(study_id, name="Variant 1") - # Create Areas - res = client.post(f"/v1/studies/{study_id}/areas", json={"name": "Area 1", "type": "AREA"}) - assert res.status_code == 200, res.json() - area1_id = res.json()["id"] - assert area1_id == "area 1" - - res = client.post(f"/v1/studies/{study_id}/areas", json={"name": "Area 2", "type": "AREA"}) - assert res.status_code == 200, res.json() - area2_id = res.json()["id"] - assert area2_id == "area 2" - - # Create a link between the two areas - res = client.post(f"/v1/studies/{study_id}/links", json={"area1": area1_id, "area2": area2_id}) - assert res.status_code == 200, res.json() - - # Create a cluster in area1 - res = client.post( - f"/v1/studies/{study_id}/areas/{area1_id}/clusters/thermal", - json={"name": "Cluster 1", "group": "Nuclear"}, - ) - assert res.status_code == 200, res.json() - cluster_id = res.json()["id"] - assert cluster_id == "Cluster 1" + # Create Areas, link and cluster + area1_id = preparer.create_area(study_id, name="Area 1")["id"] + area2_id = preparer.create_area(study_id, name="Area 2")["id"] + link_id = preparer.create_link(study_id, area1_id=area1_id, area2_id=area2_id)["id"] + cluster_id = preparer.create_thermal(study_id, area1_id, name="Cluster 1", group="Nuclear")["id"] # ============================= # CREATION @@ -591,27 +540,34 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # Creation of a bc without group bc_id_wo_group = "binding_constraint_1" args = {"enabled": True, "timeStep": "hourly", "operator": "less", "terms": [], "comments": "New API"} - res = client.post(f"/v1/studies/{study_id}/bindingconstraints", json={"name": bc_id_wo_group, **args}) - assert res.status_code in {200, 201} - assert res.json()["group"] == "default" + operator_1 = "lt" + properties = preparer.create_binding_constraint(study_id, name=bc_id_wo_group, **args) + assert properties["group"] == "default" # Creation of bc with a group bc_id_w_group = "binding_constraint_2" - res = client.post( - f"/v1/studies/{study_id}/bindingconstraints", - json={"name": bc_id_w_group, "group": "specific_grp", **args}, - ) - assert res.status_code in {200, 201} - assert res.json()["group"] == "specific_grp" + args["operator"], operator_2 = "greater", "gt" + properties = preparer.create_binding_constraint(study_id, name=bc_id_w_group, group="specific_grp", **args) + assert properties["group"] == "specific_grp" # Creation of bc with a matrix bc_id_w_matrix = "binding_constraint_3" matrix_lt3 = np.ones((8784, 3)) + args["operator"], operator_3 = "equal", "eq" + # verify that trying to create a binding constraint with a less_term_matrix will + # while using an `equal` operator will raise an error 422 res = client.post( f"/v1/studies/{study_id}/bindingconstraints", json={"name": bc_id_w_matrix, "less_term_matrix": matrix_lt3.tolist(), **args}, ) - assert res.status_code in {200, 201}, res.json() + assert res.status_code == 422, res.json() + + # now we create the binding constraint with the correct matrix + res = client.post( + f"/v1/studies/{study_id}/bindingconstraints", + json={"name": bc_id_w_matrix, "equal_term_matrix": matrix_lt3.tolist(), **args}, + ) + res.raise_for_status() if study_type == "variant": res = client.get(f"/v1/studies/{study_id}/commands") @@ -619,21 +575,34 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study less_term_matrix = last_cmd_args["less_term_matrix"] equal_term_matrix = last_cmd_args["equal_term_matrix"] greater_term_matrix = last_cmd_args["greater_term_matrix"] - assert greater_term_matrix == equal_term_matrix != less_term_matrix + assert greater_term_matrix == less_term_matrix != equal_term_matrix # Check that raw matrices are created - for term in ["lt", "gt", "eq"]: - path = f"input/bindingconstraints/{bc_id_w_matrix}_{term}" - res = client.get( - f"/v1/studies/{study_id}/raw", - params={"path": path, "depth": 1, "formatted": True}, # type: ignore - ) - assert res.status_code == 200, res.json() - data = res.json()["data"] - if term == "lt": - assert data == matrix_lt3.tolist() - else: - assert data == np.zeros((matrix_lt3.shape[0], 1)).tolist() + for bc_id, operator in zip( + [bc_id_wo_group, bc_id_w_matrix, bc_id_w_group], [operator_1, operator_2, operator_3] + ): + for term in zip( + [ + bc_id_wo_group, + bc_id_w_matrix, + ], + ["lt", "gt", "eq"], + ): + path = f"input/bindingconstraints/{bc_id}_{term}" + res = client.get( + f"/v1/studies/{study_id}/raw", + params={"path": path, "depth": 1, "formatted": True}, # type: ignore + ) + # as we save only the operator matrix, we should have a matrix only for the operator + if term != operator: + assert res.status_code == 404, res.json() + continue + assert res.status_code == 200, res.json() + data = res.json()["data"] + if term == "lt": + assert data == matrix_lt3.tolist() + else: + assert data == np.zeros((matrix_lt3.shape[0], 1)).tolist() # ============================= # CONSTRAINT TERM MANAGEMENT @@ -671,7 +640,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study description = res.json()["description"] assert exception == "DuplicateConstraintTerm" assert bc_id_w_group in description, "Error message should contain the binding constraint ID" - assert f"{area1_id}%{area2_id}" in description, "Error message should contain the duplicate term ID" + assert link_id in description, "Error message should contain the duplicate term ID" # Get binding constraints list to check added terms res = client.get(f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}") @@ -681,7 +650,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study expected = [ { "data": {"area1": area1_id, "area2": area2_id}, - "id": f"{area1_id}%{area2_id}", + "id": link_id, "offset": 2, "weight": 1.0, }, @@ -699,7 +668,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_group}/terms", json=[ { - "id": f"{area1_id}%{area2_id}", + "id": link_id, "weight": 4.4, "offset": 1, }, @@ -720,7 +689,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study expected = [ { "data": {"area1": area1_id, "area2": area2_id}, - "id": f"{area1_id}%{area2_id}", + "id": link_id, "offset": 1, "weight": 4.4, }, @@ -746,13 +715,32 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study assert res.status_code == 200, res.json() assert res.json()["group"] == grp_name - # Update matrix_term + # check that updating of a binding constraint that has an operator "equal" + # with a greater matrix will raise an error 422 + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", + json={"greater_term_matrix": matrix_lt3.tolist()}, + ) + assert res.status_code == 422, res.json() + assert "greater_term_matrix" in res.json()["description"] + assert "equal" in res.json()["description"] + assert res.json()["exception"] == "InvalidFieldForVersionError" + + # update the binding constraint operator first + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", + json={"operator": "greater"}, + ) + assert res.status_code == 200, res.json() + + # update the binding constraint matrix res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", json={"greater_term_matrix": matrix_lt3.tolist()}, ) assert res.status_code == 200, res.json() + # check that the matrix has been updated res = client.get( f"/v1/studies/{study_id}/raw", params={"path": f"input/bindingconstraints/{bc_id_w_matrix}_gt"}, @@ -784,17 +772,44 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # Check that the matrices are daily/weekly matrices expected_matrix = np.zeros((366, 1)) - for term_alias in ["lt", "gt", "eq"]: - res = client.get( - f"/v1/studies/{study_id}/raw", - params={ - "path": f"input/bindingconstraints/{bc_id_w_matrix}_{term_alias}", - "depth": 1, - "formatted": True, - }, # type: ignore - ) + for operator in ["less", "equal", "greater", "both"]: + if operator != "both": + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", + json={"operator": operator, f"{operator}_term_matrix": expected_matrix.tolist()}, + ) + else: + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", + json={ + "operator": operator, + "greater_term_matrix": expected_matrix.tolist(), + "less_term_matrix": expected_matrix.tolist(), + }, + ) assert res.status_code == 200, res.json() - assert res.json()["data"] == expected_matrix.tolist() + for term_operator, term_alias in zip(["less", "equal", "greater"], ["lt", "eq", "gt"]): + res = client.get( + f"/v1/studies/{study_id}/raw", + params={ + "path": f"input/bindingconstraints/{bc_id_w_matrix}_{term_alias}", + "depth": 1, + "formatted": True, + }, # type: ignore + ) + # check that update is made if no conflict between the operator and the matrix term alias + if term_operator == operator or (operator == "both" and term_operator in ["less", "greater"]): + assert res.status_code == 200, res.json() + assert res.json()["data"] == expected_matrix.tolist() + else: + assert res.status_code == 404, res.json() + + # set binding constraint operator to "less" + res = client.put( + f"/v1/studies/{study_id}/bindingconstraints/{bc_id_w_matrix}", + json={"operator": "less"}, + ) + assert res.status_code == 200, res.json() # ============================= # DELETE @@ -805,28 +820,30 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study assert res.status_code == 200, res.json() # Asserts that the deletion worked - res = client.get(f"/v1/studies/{study_id}/bindingconstraints") - assert len(res.json()) == 2 + binding_constraints_list = preparer.get_binding_constraints(study_id) + assert len(binding_constraints_list) == 2 # ============================= # ERRORS # ============================= # Creation with wrong matrix according to version - res = client.post( - f"/v1/studies/{study_id}/bindingconstraints", - json={ - "name": "binding_constraint_700", - "enabled": True, - "timeStep": "hourly", - "operator": "less", - "terms": [], - "comments": "New API", - "values": [[]], - }, - ) - assert res.status_code == 422, res.json() - assert res.json()["description"] == "You cannot fill 'values' as it refers to the matrix before v8.7" + for operator in ["less", "equal", "greater", "both"]: + args["operator"] = operator + res = client.post( + f"/v1/studies/{study_id}/bindingconstraints", + json={ + "name": "binding_constraint_4", + "enabled": True, + "timeStep": "hourly", + "operator": operator, + "terms": [], + "comments": "New API", + "values": [[]], + }, + ) + assert res.status_code == 422 + assert res.json()["description"] == "You cannot fill 'values' as it refers to the matrix before v8.7" # Update with old matrices res = client.put( @@ -862,14 +879,16 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # # Creation of 1 BC # Update raw with wrong columns size -> OK but validation should fail - # - matrix_lt3 = np.ones((8784, 3)) + # update the args operator field to "greater" + args["operator"] = "greater" + + matrix_gt3 = np.ones((8784, 3)) res = client.post( f"/v1/studies/{study_id}/bindingconstraints", json={ "name": "First BC", - "less_term_matrix": matrix_lt3.tolist(), + "greater_term_matrix": matrix_gt3.tolist(), "group": "Group 1", **args, }, @@ -879,12 +898,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study generator = np.random.default_rng(11) random_matrix = pd.DataFrame(generator.integers(0, 10, size=(4, 1))) - _upload_matrix( - client, - study_id, - f"input/bindingconstraints/{first_bc_id}_gt", - random_matrix, - ) + preparer.upload_matrix(study_id, f"input/bindingconstraints/{first_bc_id}_gt", random_matrix) # Validation should fail res = client.get(f"/v1/studies/{study_id}/constraint-groups/Group 1/validate") @@ -896,7 +910,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # So, we correct the shape of the matrix res = client.put( f"/v1/studies/{study_id}/bindingconstraints/{first_bc_id}", - json={"greater_term_matrix": matrix_lt3.tolist()}, + json={"greater_term_matrix": matrix_gt3.tolist()}, ) assert res.status_code in {200, 201}, res.json() @@ -944,6 +958,7 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # third_bd group changes to group1 -> Fails validation # + args["operator"] = "less" matrix_lt4 = np.ones((8784, 4)) res = client.post( f"/v1/studies/{study_id}/bindingconstraints", @@ -972,9 +987,15 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study assert re.search(r"the most common width in the group is 3", description, flags=re.IGNORECASE) assert re.search(r"'third bc_lt' has 4 columns", description, flags=re.IGNORECASE) + # first change `second_bc` operator to greater + client.put( + f"v1/studies/{study_id}/bindingconstraints/{second_bc_id}", + json={"operator": "greater"}, + ) + # So, we correct the shape of the matrix of the Second BC res = client.put( - f"/v1/studies/{study_id}/bindingconstraints/{third_bd_id}", + f"/v1/studies/{study_id}/bindingconstraints/{second_bc_id}", json={"greater_term_matrix": matrix_lt3.tolist()}, ) assert res.status_code in {200, 201}, res.json() @@ -1002,6 +1023,12 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study # This should succeed but cause the validation endpoint to fail. assert res.status_code in {200, 201}, res.json() + # reset `second_bc` operator to less + client.put( + f"v1/studies/{study_id}/bindingconstraints/{second_bc_id}", + json={"operator": "less"}, + ) + # Collect all the binding constraints groups res = client.get(f"/v1/studies/{study_id}/constraint-groups") assert res.status_code in {200, 201}, res.json() @@ -1031,3 +1058,153 @@ def test_for_version_870(self, client: TestClient, user_access_token: str, study assert re.search(r"'Group 1':", description, flags=re.IGNORECASE) assert re.search(r"the most common width in the group is 3", description, flags=re.IGNORECASE) assert re.search(r"'third bc_lt' has 4 columns", description, flags=re.IGNORECASE) + + @pytest.mark.parametrize("study_version", [870]) + @pytest.mark.parametrize("denormalize", [True, False]) + def test_rhs_matrices( + self, client: TestClient, user_access_token: str, study_version: int, denormalize: bool + ) -> None: + """ + The goal of this test is to verify that there are no unnecessary RHS matrices created + in the case of **creation** or **update** of a binding constraint. + This test only concerns studies in **version >= 8.7** for which we have a specific matrix + for each operation: "less", "equal", "greater" or "both". + + To perform this test, we will create a raw study "Base Study" with a "France" area + and a single thermal cluster "Nuclear". + We will then create a variant study "Variant Study" based on the raw study "Base Study" + to apply binding constraint creation or update commands. + + The use of a variant and commands allows to check the behavior for both variant studies + and raw studies by generating the variant snapshot. + + To verify the expected behaviors, we must control the number and naming of the matrices + after generating the snapshot. + In the case of an update and depending on the values of the `operator` and `time_step` parameters, + we must also control the preservation or zeroing of the matrix values. + """ + client.headers = {"Authorization": f"Bearer {user_access_token}"} # type: ignore + + # ======================= + # RAW STUDY PREPARATION + # ======================= + + preparer = PreparerProxy(client, user_access_token) + study_id = preparer.create_study("Base Study", version=study_version) + area_id = preparer.create_area(study_id, name="France")["id"] + cluster_id = preparer.create_thermal(study_id, area_id, name="Nuclear", group="Nuclear")["id"] + + # ============================= + # VARIANT STUDY CREATION + # ============================= + + variant_id = preparer.create_variant(study_id, name="Variant Study") + + # ============================= + # CREATION W/O MATRICES + # ============================= + + all_time_steps = set(MATRIX_SIZES) + all_operators = set(REQUIRED_MATRICES) + + for bc_time_step in all_time_steps: + for bc_operator in all_operators: + bc_name = f"BC_{bc_time_step}_{bc_operator}" + # Creation of a binding constraint without matrices using a command + res = client.post( + f"/v1/studies/{variant_id}/commands", + json=[ + { + "action": "create_binding_constraint", + "args": { + "name": bc_name, + "type": bc_time_step, + "operator": bc_operator, + "coeffs": {f"{area_id}.{cluster_id.lower()}": [1, 2]}, + }, + } + ], + ) + assert res.status_code == 200, res.json() + + preparer.generate_snapshot(variant_id, denormalize=denormalize) + + # Check the matrices size, values and existence + for bc_time_step in all_time_steps: + for bc_operator in all_operators: + bc_name = f"BC_{bc_time_step}_{bc_operator}" + bc_id = bc_name.lower() + + required_matrices = REQUIRED_MATRICES[bc_operator] + for matrix in required_matrices: + df = preparer.download_matrix(variant_id, f"input/bindingconstraints/{bc_id}_{matrix}") + assert df.shape == (MATRIX_SIZES[bc_time_step], 1) + assert (df == 0).all().all() + + superfluous_matrices = {"lt", "gt", "eq"} - required_matrices + for matrix in superfluous_matrices: + try: + preparer.download_matrix(variant_id, f"input/bindingconstraints/{bc_id}_{matrix}") + except HTTPError as e: + assert e.response.status_code == 404 + else: + assert False, "The matrix should not exist" + + # drop all commands to avoid conflicts with the next test + preparer.drop_all_commands(variant_id) + + # ============================= + # CREATION WITH MATRICES + # ============================= + + # random matrices + matrices_by_time_steps = { + time_step: np.random.rand(size, 1).astype(np.float64) for time_step, size in MATRIX_SIZES.items() + } + + for bc_time_step in all_time_steps: + for bc_operator in all_operators: + bc_name = f"BC_{bc_time_step}_{bc_operator}" + matrix = matrices_by_time_steps[bc_time_step].tolist() + args = { + "name": bc_name, + "type": bc_time_step, + "operator": bc_operator, + "coeffs": {f"{area_id}.{cluster_id.lower()}": [1, 2]}, + } + if bc_operator == "less": + args["lessTermMatrix"] = matrix + elif bc_operator == "greater": + args["greaterTermMatrix"] = matrix + elif bc_operator == "equal": + args["equalTermMatrix"] = matrix + else: + args["lessTermMatrix"] = args["greaterTermMatrix"] = matrix + res = client.post( + f"/v1/studies/{variant_id}/commands", + json=[{"action": "create_binding_constraint", "args": args}], + ) + assert res.status_code == 200, res.json() + + preparer.generate_snapshot(variant_id, denormalize=denormalize) + + # Check the matrices size, values and existence + for bc_time_step in all_time_steps: + for bc_operator in all_operators: + bc_name = f"BC_{bc_time_step}_{bc_operator}" + bc_id = bc_name.lower() + + required_matrices = REQUIRED_MATRICES[bc_operator] + for matrix in required_matrices: + df = preparer.download_matrix(variant_id, f"input/bindingconstraints/{bc_id}_{matrix}") + assert df.shape == (MATRIX_SIZES[bc_time_step], 1) + assert np.allclose(df.values, matrices_by_time_steps[bc_time_step], atol=1e-6) + + superfluous_matrices = {"lt", "gt", "eq"} - required_matrices + for matrix in superfluous_matrices: + try: + preparer.download_matrix(variant_id, f"input/bindingconstraints/{bc_id}_{matrix}") + except HTTPError as e: + assert e.response.status_code == 404 + else: + assert False, "The matrix should not exist" diff --git a/tests/storage/business/test_xpansion_manager.py b/tests/storage/business/test_xpansion_manager.py index bb5651bcbd..100bddd286 100644 --- a/tests/storage/business/test_xpansion_manager.py +++ b/tests/storage/business/test_xpansion_manager.py @@ -10,6 +10,7 @@ from fastapi import UploadFile from pandas.errors import ParserError +from antarest.core.exceptions import ChildNotFoundError from antarest.core.model import JSON from antarest.study.business.xpansion_management import ( FileCurrentlyUsedInSettings, @@ -26,7 +27,6 @@ from antarest.study.model import RawStudy from antarest.study.storage.rawstudy.model.filesystem.config.files import build from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.root.filestudytree import FileStudyTree from antarest.study.storage.rawstudy.raw_study_service import RawStudyService from antarest.study.storage.storage_service import StudyStorageService diff --git a/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py b/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py index 6b7bcbaa01..b6ac49fce1 100644 --- a/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py +++ b/tests/storage/repository/filesystem/matrix/test_input_series_matrix.py @@ -5,11 +5,11 @@ import pytest +from antarest.core.exceptions import ChildNotFoundError from antarest.matrixstore.service import ISimpleMatrixService from antarest.matrixstore.uri_resolver_service import UriResolverService from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.matrix.input_series_matrix import InputSeriesMatrix diff --git a/tests/storage/repository/filesystem/matrix/test_output_series_matrix.py b/tests/storage/repository/filesystem/matrix/test_output_series_matrix.py index d77bd47ee2..e6eb256c51 100644 --- a/tests/storage/repository/filesystem/matrix/test_output_series_matrix.py +++ b/tests/storage/repository/filesystem/matrix/test_output_series_matrix.py @@ -4,9 +4,8 @@ import pandas as pd import pytest -from antarest.core.exceptions import MustNotModifyOutputException +from antarest.core.exceptions import ChildNotFoundError, MustNotModifyOutputException from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.matrix.head_writer import AreaHeadWriter from antarest.study.storage.rawstudy.model.filesystem.matrix.matrix import MatrixFrequency from antarest.study.storage.rawstudy.model.filesystem.matrix.output_series_matrix import OutputSeriesMatrix diff --git a/tests/storage/repository/filesystem/test_folder_node.py b/tests/storage/repository/filesystem/test_folder_node.py index 7927927d7e..ae017d7007 100644 --- a/tests/storage/repository/filesystem/test_folder_node.py +++ b/tests/storage/repository/filesystem/test_folder_node.py @@ -6,9 +6,9 @@ import pytest +from antarest.core.exceptions import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import StudyFactory -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode from antarest.study.storage.rawstudy.model.filesystem.inode import INode from antarest.study.storage.rawstudy.model.filesystem.raw_file_node import RawFileNode diff --git a/tests/storage/repository/filesystem/test_lazy_node.py b/tests/storage/repository/filesystem/test_lazy_node.py index f899d32fa3..a2c72415f5 100644 --- a/tests/storage/repository/filesystem/test_lazy_node.py +++ b/tests/storage/repository/filesystem/test_lazy_node.py @@ -2,6 +2,8 @@ from typing import List, Optional from unittest.mock import Mock +import pytest + from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.context import ContextServer from antarest.study.storage.rawstudy.model.filesystem.lazy_node import LazyNode @@ -138,3 +140,113 @@ def test_save_txt(tmp_path: Path): assert file.read_text() == content assert not link.exists() resolver.resolve.assert_called_once_with(content) + + +@pytest.mark.parametrize("target_is_link", [True, False]) +def test_rename_file(tmp_path: Path, target_is_link: bool): + file = tmp_path / "my-study/lazy.txt" + file.parent.mkdir() + + link = file.parent / f"{file.name}.link" + link.write_text("Link: Mock File Content") + + resolver = Mock() + resolver.resolve.return_value = None + + resolver2 = Mock() + resolver2.resolve.return_value = None + + config = FileStudyTreeConfig(study_path=file, path=file, version=-1, study_id="") + context = ContextServer(matrix=Mock(), resolver=resolver) + node = MockLazyNode(context=context, config=config) + + renaming_file = file.parent / "lazy_rename.txt" + renaming_link = file.parent / f"{renaming_file.name}.link" + config2 = FileStudyTreeConfig(study_path=renaming_file, path=renaming_file, version=-1, study_id="") + context2 = ContextServer(matrix=Mock(), resolver=resolver2) + target = MockLazyNode(context=context2, config=config2) + + if target_is_link: + assert not renaming_link.exists() + assert link.exists() + assert not file.exists() + assert not renaming_file.exists() + + node.rename_file(target) + + assert not link.exists() + assert renaming_link.exists() + assert not file.exists() + assert not renaming_file.exists() + assert renaming_link.read_text() == "Link: Mock File Content" + + else: + content = "No Link: Mock File Content" + node.save(content) + assert file.read_text() == content + assert not link.exists() + assert not renaming_file.exists() + resolver.resolve.assert_called_once_with(content) + + node.rename_file(target) + + assert not link.exists() + assert not file.exists() + assert renaming_file.exists() + assert not renaming_link.exists() + assert renaming_file.read_text() == "No Link: Mock File Content" + + +@pytest.mark.parametrize("target_is_link", [True, False]) +def test_copy_file(tmp_path: Path, target_is_link: bool): + file = tmp_path / "my-study/lazy.txt" + file.parent.mkdir() + + link = file.parent / f"{file.name}.link" + link.write_text("Link: Mock File Content") + + resolver = Mock() + resolver.resolve.return_value = None + + resolver2 = Mock() + resolver2.resolve.return_value = None + + config = FileStudyTreeConfig(study_path=file, path=file, version=-1, study_id="") + context = ContextServer(matrix=Mock(), resolver=resolver) + node = MockLazyNode(context=context, config=config) + + copied_file = file.parent / "lazy_copy.txt" + copied_link = file.parent / f"{copied_file.name}.link" + config2 = FileStudyTreeConfig(study_path=copied_file, path=copied_file, version=-1, study_id="") + context2 = ContextServer(matrix=Mock(), resolver=resolver2) + target = MockLazyNode(context=context2, config=config2) + + if target_is_link: + assert not copied_link.exists() + assert link.exists() + assert not file.exists() + assert not copied_file.exists() + + node.copy_file(target) + + assert link.exists() + assert copied_link.exists() + assert not file.exists() + assert not copied_file.exists() + assert copied_link.read_text() == "Link: Mock File Content" + + else: + content = "No Link: Mock File Content" + node.save(content) + assert file.read_text() == content + assert not link.exists() + assert not copied_file.exists() + resolver.resolve.assert_called_once_with(content) + + node.copy_file(target) + + assert not link.exists() + assert file.exists() + assert copied_file.exists() + assert not copied_link.exists() + assert copied_file.read_text() == "No Link: Mock File Content" diff --git a/tests/variantstudy/model/command/test_manage_binding_constraints.py b/tests/variantstudy/model/command/test_manage_binding_constraints.py index dd5e8a917e..f2dc3ccaf5 100644 --- a/tests/variantstudy/model/command/test_manage_binding_constraints.py +++ b/tests/variantstudy/model/command/test_manage_binding_constraints.py @@ -26,7 +26,10 @@ from antarest.study.storage.variantstudy.model.command.remove_area import RemoveArea from antarest.study.storage.variantstudy.model.command.remove_binding_constraint import RemoveBindingConstraint from antarest.study.storage.variantstudy.model.command.remove_link import RemoveLink -from antarest.study.storage.variantstudy.model.command.update_binding_constraint import UpdateBindingConstraint +from antarest.study.storage.variantstudy.model.command.update_binding_constraint import ( + UpdateBindingConstraint, + _update_matrices_names, +) from antarest.study.storage.variantstudy.model.command.update_scenario_builder import UpdateScenarioBuilder from antarest.study.storage.variantstudy.model.command_context import CommandContext @@ -72,10 +75,7 @@ def test_manage_binding_constraint(empty_study: FileStudy, command_context: Comm matrix_links = [ # fmt: off "bd 1_lt.txt.link", - "bd 1_eq.txt.link", - "bd 1_gt.txt.link", "bd 2_lt.txt.link", - "bd 2_eq.txt.link", "bd 2_gt.txt.link", # fmt: on ] @@ -514,3 +514,94 @@ def test_create_diff(command_context: CommandContext): base = RemoveBindingConstraint(id="foo", command_context=command_context) other_match = RemoveBindingConstraint(id="foo", command_context=command_context) assert base.create_diff(other_match) == [] + + +@pytest.mark.parametrize( + "existing_operator, new_operator", + [ + (BindingConstraintOperator.LESS, BindingConstraintOperator.LESS), + (BindingConstraintOperator.LESS, BindingConstraintOperator.GREATER), + (BindingConstraintOperator.LESS, BindingConstraintOperator.BOTH), + (BindingConstraintOperator.LESS, BindingConstraintOperator.EQUAL), + (BindingConstraintOperator.GREATER, BindingConstraintOperator.LESS), + (BindingConstraintOperator.GREATER, BindingConstraintOperator.GREATER), + (BindingConstraintOperator.GREATER, BindingConstraintOperator.BOTH), + (BindingConstraintOperator.GREATER, BindingConstraintOperator.EQUAL), + (BindingConstraintOperator.BOTH, BindingConstraintOperator.LESS), + (BindingConstraintOperator.BOTH, BindingConstraintOperator.GREATER), + (BindingConstraintOperator.BOTH, BindingConstraintOperator.BOTH), + (BindingConstraintOperator.BOTH, BindingConstraintOperator.EQUAL), + (BindingConstraintOperator.EQUAL, BindingConstraintOperator.LESS), + (BindingConstraintOperator.EQUAL, BindingConstraintOperator.GREATER), + (BindingConstraintOperator.EQUAL, BindingConstraintOperator.BOTH), + (BindingConstraintOperator.EQUAL, BindingConstraintOperator.EQUAL), + ], +) +@pytest.mark.parametrize("empty_study", ["empty_study_870.zip"], indirect=True) +def test__update_matrices_names( + empty_study: FileStudy, + command_context: CommandContext, + existing_operator: BindingConstraintOperator, + new_operator: BindingConstraintOperator, +): + study_path = empty_study.config.study_path + + all_file_templates = {"{bc_id}_eq.txt.link", "{bc_id}_gt.txt.link", "{bc_id}_lt.txt.link"} + + operator_matrix_file_map = { + BindingConstraintOperator.EQUAL: ["{bc_id}_eq.txt.link"], + BindingConstraintOperator.GREATER: ["{bc_id}_gt.txt.link"], + BindingConstraintOperator.LESS: ["{bc_id}_lt.txt.link"], + BindingConstraintOperator.BOTH: ["{bc_id}_lt.txt.link", "{bc_id}_gt.txt.link"], + } + + area1 = "area1" + area2 = "area2" + cluster = "cluster" + CreateArea(area_name=area1, command_context=command_context).apply(empty_study) + CreateArea(area_name=area2, command_context=command_context).apply(empty_study) + CreateLink(area1=area1, area2=area2, command_context=command_context).apply(empty_study) + CreateCluster(area_id=area1, cluster_name=cluster, parameters={}, command_context=command_context).apply( + empty_study + ) + + # create a binding constraint + _ = CreateBindingConstraint( + name="BD_RENAME_MATRICES", + time_step=BindingConstraintFrequency.HOURLY, + operator=existing_operator, + coeffs={"area1%area2": [800, 30]}, + command_context=command_context, + ).apply(empty_study) + + # check that the matrices are created + file_templates = set(operator_matrix_file_map[existing_operator]) + superfluous_templates = all_file_templates - file_templates + existing_matrices = [file_template.format(bc_id="bd_rename_matrices") for file_template in file_templates] + superfluous_matrices = [file_template.format(bc_id="bd_rename_matrices") for file_template in superfluous_templates] + for matrix_link in existing_matrices: + link_path = study_path / f"input/bindingconstraints/{matrix_link}" + assert link_path.exists(), f"Missing matrix link: {matrix_link!r}" + for matrix_link in superfluous_matrices: + link_path = study_path / f"input/bindingconstraints/{matrix_link}" + assert not link_path.exists(), f"Superfluous matrix link: {matrix_link!r}" + + # update matrices names + _update_matrices_names( + file_study=empty_study, + binding_constraint_id="bd_rename_matrices", + existing_operator=existing_operator, + new_operator=new_operator, + ) + + # check that the matrices are renamed + file_templates = set(operator_matrix_file_map[new_operator]) + superfluous_templates = all_file_templates - file_templates + new_matrices = [file_template.format(bc_id="bd_rename_matrices") for file_template in file_templates] + superfluous_matrices = [file_template.format(bc_id="bd_rename_matrices") for file_template in superfluous_templates] + for matrix_link in new_matrices: + link_path = study_path / f"input/bindingconstraints/{matrix_link}" + assert link_path.exists(), f"Missing matrix link: {matrix_link!r}" + for matrix_link in superfluous_matrices: + link_path = study_path / f"input/bindingconstraints/{matrix_link}" + assert not link_path.exists(), f"Superfluous matrix link: {matrix_link!r}" diff --git a/tests/variantstudy/model/command/test_update_config.py b/tests/variantstudy/model/command/test_update_config.py index 99c71bd6d7..999adb6c70 100644 --- a/tests/variantstudy/model/command/test_update_config.py +++ b/tests/variantstudy/model/command/test_update_config.py @@ -3,10 +3,10 @@ import pytest +from antarest.core.exceptions import ChildNotFoundError from antarest.study.storage.rawstudy.ini_reader import IniReader from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.variantstudy.business.command_reverter import CommandReverter from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.remove_area import RemoveArea