diff --git a/antarest/core/exceptions.py b/antarest/core/exceptions.py index ac6a980ba1..87804de393 100644 --- a/antarest/core/exceptions.py +++ b/antarest/core/exceptions.py @@ -366,7 +366,7 @@ def __init__(self, message: str) -> None: super().__init__(HTTPStatus.BAD_REQUEST, message) -class BindingConstraintNotFoundError(HTTPException): +class BindingConstraintNotFound(HTTPException): def __init__(self, message: str) -> None: super().__init__(HTTPStatus.NOT_FOUND, message) diff --git a/antarest/study/business/area_management.py b/antarest/study/business/area_management.py index 544f18d8cf..db04120884 100644 --- a/antarest/study/business/area_management.py +++ b/antarest/study/business/area_management.py @@ -1,15 +1,24 @@ +import enum import logging import re -from enum import Enum -from typing import Any, Dict, List, Optional, Sequence, Tuple +import typing as t -from pydantic import BaseModel +from pydantic import BaseModel, Extra, Field -from antarest.core.exceptions import DuplicateAreaName, LayerNotAllowedToBeDeleted, LayerNotFound +from antarest.core.exceptions import ConfigFileNotFound, DuplicateAreaName, LayerNotAllowedToBeDeleted, LayerNotFound +from antarest.core.model import JSON +from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import Patch, PatchArea, PatchCluster, RawStudy, Study from antarest.study.repository import StudyMetadataRepository from antarest.study.storage.patch_service import PatchService +from antarest.study.storage.rawstudy.model.filesystem.config.area import ( + AdequacyPathProperties, + AreaFolder, + OptimizationProperties, + ThermalAreasProperties, + UIProperties, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, DistrictSet, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.storage_service import StudyStorageService @@ -21,7 +30,7 @@ logger = logging.getLogger(__name__) -class AreaType(Enum): +class AreaType(enum.Enum): AREA = "AREA" DISTRICT = "DISTRICT" @@ -29,44 +38,86 @@ class AreaType(Enum): class AreaCreationDTO(BaseModel): name: str type: AreaType - metadata: Optional[PatchArea] - set: Optional[List[str]] + metadata: t.Optional[PatchArea] + set: t.Optional[t.List[str]] +# review: is this class necessary? class ClusterInfoDTO(PatchCluster): id: str name: str enabled: bool = True unitcount: int = 0 nominalcapacity: int = 0 - group: Optional[str] = None - min_stable_power: Optional[int] = None - min_up_time: Optional[int] = None - min_down_time: Optional[int] = None - spinning: Optional[float] = None - marginal_cost: Optional[float] = None - spread_cost: Optional[float] = None - market_bid_cost: Optional[float] = None + group: t.Optional[str] = None + min_stable_power: t.Optional[int] = None + min_up_time: t.Optional[int] = None + min_down_time: t.Optional[int] = None + spinning: t.Optional[float] = None + marginal_cost: t.Optional[float] = None + spread_cost: t.Optional[float] = None + market_bid_cost: t.Optional[float] = None class AreaInfoDTO(AreaCreationDTO): id: str - thermals: Optional[List[ClusterInfoDTO]] = None - - -class AreaUI(BaseModel): - x: int - y: int - color_rgb: Tuple[int, int, int] + thermals: t.Optional[t.List[ClusterInfoDTO]] = None class LayerInfoDTO(BaseModel): id: str name: str - areas: List[str] + areas: t.List[str] + +class UpdateAreaUi(BaseModel, extra="forbid", allow_population_by_field_name=True): + """ + DTO for updating area UI + + Usage: + + >>> from antarest.study.business.area_management import UpdateAreaUi + >>> from pprint import pprint + + >>> obj = { + ... "x": -673.75, + ... "y": 301.5, + ... "color_rgb": [230, 108, 44], + ... "layerX": {"0": -230, "4": -230, "6": -95, "7": -230, "8": -230}, + ... "layerY": {"0": 136, "4": 136, "6": 39, "7": 136, "8": 136}, + ... "layerColor": { + ... "0": "230, 108, 44", + ... "4": "230, 108, 44", + ... "6": "230, 108, 44", + ... "7": "230, 108, 44", + ... "8": "230, 108, 44", + ... }, + ... } + + >>> model = UpdateAreaUi(**obj) + >>> pprint(model.dict(by_alias=True), width=80) + {'colorRgb': [230, 108, 44], + 'layerColor': {0: '230, 108, 44', + 4: '230, 108, 44', + 6: '230, 108, 44', + 7: '230, 108, 44', + 8: '230, 108, 44'}, + 'layerX': {0: -230, 4: -230, 6: -95, 7: -230, 8: -230}, + 'layerY': {0: 136, 4: 136, 6: 39, 7: 136, 8: 136}, + 'x': -673, + 'y': 301} -def _get_ui_info_map(file_study: FileStudy, area_ids: Sequence[str]) -> Dict[str, Any]: + """ + + x: int = Field(title="X position") + y: int = Field(title="Y position") + color_rgb: t.Sequence[int] = Field(title="RGB color", alias="colorRgb") + layer_x: t.Mapping[int, int] = Field(default_factory=dict, title="X position of each layer", alias="layerX") + layer_y: t.Mapping[int, int] = Field(default_factory=dict, title="Y position of each layer", alias="layerY") + layer_color: t.Mapping[int, str] = Field(default_factory=dict, title="Color of each layer", alias="layerColor") + + +def _get_ui_info_map(file_study: FileStudy, area_ids: t.Sequence[str]) -> t.Dict[str, t.Any]: """ Get the UI information (a JSON object) for each selected Area. @@ -84,7 +135,9 @@ def _get_ui_info_map(file_study: FileStudy, area_ids: Sequence[str]) -> Dict[str # instead of raising an obscure exception. if not area_ids: return {} + ui_info_map = file_study.tree.get(["input", "areas", ",".join(area_ids), "ui"]) + # If there is only one ID in the `area_ids`, the result returned from # the `file_study.tree.get` call will be a single UI object. # On the other hand, if there are multiple values in `area_ids`, @@ -92,25 +145,255 @@ def _get_ui_info_map(file_study: FileStudy, area_ids: Sequence[str]) -> Dict[str # and the values are the corresponding UI objects. if len(area_ids) == 1: ui_info_map = {area_ids[0]: ui_info_map} + + # Convert to UIProperties to ensure that the UI object is valid. + ui_info_map = {area_id: UIProperties(**ui_info).to_config() for area_id, ui_info in ui_info_map.items()} + return ui_info_map -def _get_area_layers(area_uis: Dict[str, Any], area: str) -> List[str]: +def _get_area_layers(area_uis: t.Dict[str, t.Any], area: str) -> t.List[str]: if area in area_uis and "ui" in area_uis[area] and "layers" in area_uis[area]["ui"]: return re.split(r"\s+", (str(area_uis[area]["ui"]["layers"]) or "")) return [] +_ALL_AREAS_PATH = "input/areas" +_THERMAL_AREAS_PATH = "input/thermal/areas" + + +# noinspection SpellCheckingInspection +class _BaseAreaDTO( + OptimizationProperties.FilteringSection, + OptimizationProperties.ModalOptimizationSection, + AdequacyPathProperties.AdequacyPathSection, + extra=Extra.forbid, + validate_assignment=True, + allow_population_by_field_name=True, +): + """ + Represents an area output. + + Aggregates the fields of the `OptimizationProperties` and `AdequacyPathProperties` classes, + but without the `UIProperties` fields. + + Add the fields extracted from the `/input/thermal/areas.ini` information: + + - `average_unsupplied_energy_cost` is extracted from `unserverd_energy_cost`, + - `average_spilled_energy_cost` is extracted from `spilled_energy_cost`. + """ + + average_unsupplied_energy_cost: float = Field(0.0, description="average unserverd energy cost (€/MWh)") + average_spilled_energy_cost: float = Field(0.0, description="average spilled energy cost (€/MWh)") + + +# noinspection SpellCheckingInspection +@camel_case_model +class AreaOutput(_BaseAreaDTO, metaclass=AllOptionalMetaclass, use_none=True): + """ + DTO object use to get the area information using a flat structure. + """ + + @classmethod + def from_model( + cls, + area_folder: AreaFolder, + *, + average_unsupplied_energy_cost: float, + average_spilled_energy_cost: float, + ) -> "AreaOutput": + """ + Creates a `GetAreaDTO` object from configuration data. + + Args: + area_folder: Configuration data read from the `/input/areas/` information. + average_unsupplied_energy_cost: Unserverd energy cost (€/MWh). + average_spilled_energy_cost: Spilled energy cost (€/MWh). + Returns: + The `GetAreaDTO` object. + """ + obj = { + "average_unsupplied_energy_cost": average_unsupplied_energy_cost, + "average_spilled_energy_cost": average_spilled_energy_cost, + **area_folder.optimization.filtering.dict(by_alias=False), + **area_folder.optimization.nodal_optimization.dict(by_alias=False), + # adequacy_patch is only available if study version >= 830. + **(area_folder.adequacy_patch.adequacy_patch.dict(by_alias=False) if area_folder.adequacy_patch else {}), + } + return cls(**obj) + + def _to_optimization(self) -> OptimizationProperties: + obj = {name: getattr(self, name) for name in OptimizationProperties.FilteringSection.__fields__} + filtering_section = OptimizationProperties.FilteringSection(**obj) + obj = {name: getattr(self, name) for name in OptimizationProperties.ModalOptimizationSection.__fields__} + nodal_optimization_section = OptimizationProperties.ModalOptimizationSection(**obj) + return OptimizationProperties( + filtering=filtering_section, + nodal_optimization=nodal_optimization_section, + ) + + def _to_adequacy_patch(self) -> AdequacyPathProperties: + obj = {name: getattr(self, name) for name in AdequacyPathProperties.AdequacyPathSection.__fields__} + adequacy_path_section = AdequacyPathProperties.AdequacyPathSection(**obj) + return AdequacyPathProperties(adequacy_patch=adequacy_path_section) + + @property + def area_folder(self) -> AreaFolder: + area_folder = AreaFolder( + optimization=self._to_optimization(), + adequacy_patch=self._to_adequacy_patch(), + # UI properties are not configurable in Table Mode + ) + return area_folder + + class AreaManager: + """ + Manages operations related to areas in a study, including retrieval, creation, and updates. + + Attributes: + storage_service: The service responsible for study storage operations. + patch_service: The service responsible for study patch operations. + This service is used to store additional data for each area, in particular the country + of origin (`country`) and a list of tags for searching (`tags`). + """ + def __init__( self, storage_service: StudyStorageService, repository: StudyMetadataRepository, ) -> None: + """ + Initializes the AreaManager. + + Args: + storage_service: The service responsible for study storage operations. + repository: The repository for study metadata operations. + """ self.storage_service = storage_service self.patch_service = PatchService(repository=repository) - def get_all_areas(self, study: RawStudy, area_type: Optional[AreaType] = None) -> List[AreaInfoDTO]: + # noinspection SpellCheckingInspection + def get_all_area_props(self, study: RawStudy) -> t.Mapping[str, AreaOutput]: + """ + Retrieves all areas of a study. + + Args: + study: The raw study object. + Returns: + A mapping of area IDs to area properties. + Raises: + ConfigFileNotFound: if a configuration file is not found. + """ + file_study = self.storage_service.get_storage(study).get_raw(study) + + # Get the area information from the `/input/areas/` file. + path = _ALL_AREAS_PATH + try: + areas_cfg = file_study.tree.get(path.split("/"), depth=5) + except KeyError: + raise ConfigFileNotFound(path) from None + else: + # "list" and "sets" must be removed: we only need areas. + areas_cfg.pop("list", None) + areas_cfg.pop("sets", None) + + # Get the unserverd and spilled energy costs from the `/input/thermal/areas.ini` file. + path = _THERMAL_AREAS_PATH + try: + thermal_cfg = file_study.tree.get(path.split("/"), depth=3) + except KeyError: + raise ConfigFileNotFound(path) from None + else: + thermal_areas = ThermalAreasProperties(**thermal_cfg) + + # areas_cfg contains a dictionary where the keys are the area IDs, + # and the values are objects that can be converted to `AreaFolder`. + area_map = {} + for area_id, area_cfg in areas_cfg.items(): + area_folder = AreaFolder(**area_cfg) + area_map[area_id] = AreaOutput.from_model( + area_folder, + average_unsupplied_energy_cost=thermal_areas.unserverd_energy_cost.get(area_id, 0.0), + average_spilled_energy_cost=thermal_areas.spilled_energy_cost.get(area_id, 0.0), + ) + + return area_map + + # noinspection SpellCheckingInspection + def update_areas_props( + self, study: RawStudy, update_areas_by_ids: t.Mapping[str, AreaOutput] + ) -> t.Mapping[str, AreaOutput]: + """ + Update the properties of ares. + + Args: + study: The raw study object. + update_areas_by_ids: A mapping of area IDs to area properties. + + Returns: + A mapping of ALL area IDs to area properties. + """ + old_areas_by_ids = self.get_all_area_props(study) + new_areas_by_ids = {k: v for k, v in old_areas_by_ids.items()} + + # Prepare the commands to update the thermal clusters. + commands = [] + command_context = self.storage_service.variant_study_service.command_factory.command_context + + for area_id, update_area in update_areas_by_ids.items(): + # Update the area properties. + old_area = old_areas_by_ids[area_id] + new_area = old_area.copy(update=update_area.dict(by_alias=False, exclude_none=True)) + new_areas_by_ids[area_id] = new_area + + # Convert the DTO to a configuration object and update the configuration file. + old_area_folder = old_area.area_folder + new_area_folder = new_area.area_folder + + if old_area_folder.optimization != new_area_folder.optimization: + commands.append( + UpdateConfig( + target=f"input/areas/{area_id}/optimization", + data=new_area_folder.optimization.to_config(), + command_context=command_context, + ) + ) + if old_area_folder.adequacy_patch != new_area_folder.adequacy_patch and new_area_folder.adequacy_patch: + commands.append( + UpdateConfig( + target=f"input/areas/{area_id}/adequacy_patch", + data=new_area_folder.adequacy_patch.to_config(), + command_context=command_context, + ) + ) + if old_area.average_unsupplied_energy_cost != new_area.average_unsupplied_energy_cost: + commands.append( + UpdateConfig( + target=f"input/thermal/areas/unserverdenergycost/{area_id}", + data=new_area.average_unsupplied_energy_cost, + command_context=command_context, + ) + ) + if old_area.average_spilled_energy_cost != new_area.average_spilled_energy_cost: + commands.append( + UpdateConfig( + target=f"input/thermal/areas/spilledenergycost:{area_id}", + data=new_area.average_spilled_energy_cost, + command_context=command_context, + ) + ) + + file_study = self.storage_service.get_storage(study).get_raw(study) + execute_or_add_commands(study, file_study, commands, self.storage_service) + + return new_areas_by_ids + + @staticmethod + def get_table_schema() -> JSON: + return AreaOutput.schema() + + def get_all_areas(self, study: RawStudy, area_type: t.Optional[AreaType] = None) -> t.List[AreaInfoDTO]: """ Retrieves all areas and districts of a raw study based on the area type. @@ -124,9 +407,9 @@ def get_all_areas(self, study: RawStudy, area_type: Optional[AreaType] = None) - storage_service = self.storage_service.get_storage(study) file_study = storage_service.get_raw(study) metadata = self.patch_service.get(study) - areas_metadata: Dict[str, PatchArea] = metadata.areas or {} - cfg_areas: Dict[str, Area] = file_study.config.areas - result: List[AreaInfoDTO] = [] + areas_metadata: t.Dict[str, PatchArea] = metadata.areas or {} + cfg_areas: t.Dict[str, Area] = file_study.config.areas + result: t.List[AreaInfoDTO] = [] if area_type is None or area_type == AreaType.AREA: result.extend( @@ -141,7 +424,7 @@ def get_all_areas(self, study: RawStudy, area_type: Optional[AreaType] = None) - ) if area_type is None or area_type == AreaType.DISTRICT: - cfg_sets: Dict[str, DistrictSet] = file_study.config.sets + cfg_sets: t.Dict[str, DistrictSet] = file_study.config.sets result.extend( AreaInfoDTO( id=set_id, @@ -155,7 +438,7 @@ def get_all_areas(self, study: RawStudy, area_type: Optional[AreaType] = None) - return result - def get_all_areas_ui_info(self, study: RawStudy) -> Dict[str, Any]: + def get_all_areas_ui_info(self, study: RawStudy) -> t.Dict[str, t.Any]: """ Retrieve information about all areas' user interface (UI) from the study. @@ -173,7 +456,7 @@ def get_all_areas_ui_info(self, study: RawStudy) -> Dict[str, Any]: area_ids = list(file_study.config.areas) return _get_ui_info_map(file_study, area_ids) - def get_layers(self, study: RawStudy) -> List[LayerInfoDTO]: + def get_layers(self, study: RawStudy) -> t.List[LayerInfoDTO]: storage_service = self.storage_service.get_storage(study) file_study = storage_service.get_raw(study) area_ids = list(file_study.config.areas) @@ -196,7 +479,7 @@ def get_layers(self, study: RawStudy) -> List[LayerInfoDTO]: for layer in layers ] - def update_layer_areas(self, study: RawStudy, layer_id: str, areas: List[str]) -> None: + def update_layer_areas(self, study: RawStudy, layer_id: str, areas: t.List[str]) -> None: logger.info(f"Updating layer {layer_id} with areas {areas}") file_study = self.storage_service.get_storage(study).get_raw(study) layers = file_study.tree.get(["layers", "layers", "layers"]) @@ -213,9 +496,9 @@ def update_layer_areas(self, study: RawStudy, layer_id: str, areas: List[str]) - ] to_remove_areas = [area for area in existing_areas if area not in areas] to_add_areas = [area for area in areas if area not in existing_areas] - commands: List[ICommand] = [] + commands: t.List[ICommand] = [] - def create_update_commands(area_id: str) -> List[ICommand]: + def create_update_commands(area_id: str) -> t.List[ICommand]: return [ UpdateConfig( target=f"input/areas/{area_id}/ui/layerX", @@ -235,7 +518,7 @@ def create_update_commands(area_id: str) -> List[ICommand]: ] for area in to_remove_areas: - area_to_remove_layers: List[str] = _get_area_layers(areas_ui, area) + area_to_remove_layers: t.List[str] = _get_area_layers(areas_ui, area) if layer_id in areas_ui[area]["layerX"]: del areas_ui[area]["layerX"][layer_id] if layer_id in areas_ui[area]["layerY"]: @@ -246,7 +529,7 @@ def create_update_commands(area_id: str) -> List[ICommand]: ) commands.extend(create_update_commands(area)) for area in to_add_areas: - area_to_add_layers: List[str] = _get_area_layers(areas_ui, area) + area_to_add_layers: t.List[str] = _get_area_layers(areas_ui, area) if layer_id not in areas_ui[area]["layerX"]: areas_ui[area]["layerX"][layer_id] = areas_ui[area]["ui"]["x"] if layer_id not in areas_ui[area]["layerY"]: @@ -365,33 +648,40 @@ def update_area_metadata( set=area_or_set.get_areas(list(file_study.config.areas)) if isinstance(area_or_set, DistrictSet) else [], ) - def update_area_ui(self, study: Study, area_id: str, area_ui: AreaUI, layer: str = "0") -> None: + def update_area_ui(self, study: Study, area_id: str, area_ui: UpdateAreaUi, layer: str = "0") -> None: + obj = { + "x": area_ui.x, + "y": area_ui.y, + "color_r": area_ui.color_rgb[0], + "color_g": area_ui.color_rgb[1], + "color_b": area_ui.color_rgb[2], + } file_study = self.storage_service.get_storage(study).get_raw(study) commands = ( [ UpdateConfig( target=f"input/areas/{area_id}/ui/ui/x", - data=area_ui.x, + data=obj["x"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), UpdateConfig( target=f"input/areas/{area_id}/ui/ui/y", - data=area_ui.y, + data=obj["y"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), UpdateConfig( target=f"input/areas/{area_id}/ui/ui/color_r", - data=area_ui.color_rgb[0], + data=obj["color_r"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), UpdateConfig( target=f"input/areas/{area_id}/ui/ui/color_g", - data=area_ui.color_rgb[1], + data=obj["color_g"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), UpdateConfig( target=f"input/areas/{area_id}/ui/ui/color_b", - data=area_ui.color_rgb[2], + data=obj["color_b"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), ] @@ -402,17 +692,17 @@ def update_area_ui(self, study: Study, area_id: str, area_ui: AreaUI, layer: str [ UpdateConfig( target=f"input/areas/{area_id}/ui/layerX/{layer}", - data=area_ui.x, + data=obj["x"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), UpdateConfig( target=f"input/areas/{area_id}/ui/layerY/{layer}", - data=area_ui.y, + data=obj["y"], command_context=self.storage_service.variant_study_service.command_factory.command_context, ), UpdateConfig( target=f"input/areas/{area_id}/ui/layerColor/{layer}", - data=f"{str(area_ui.color_rgb[0])} , {str(area_ui.color_rgb[1])} , {str(area_ui.color_rgb[2])}", + data=f"{obj['color_r']},{obj['color_g']},{obj['color_b']}", command_context=self.storage_service.variant_study_service.command_factory.command_context, ), ] @@ -423,7 +713,7 @@ def update_thermal_cluster_metadata( self, study: Study, area_id: str, - clusters_metadata: Dict[str, PatchCluster], + clusters_metadata: t.Dict[str, PatchCluster], ) -> AreaInfoDTO: file_study = self.storage_service.get_storage(study).get_raw(study) patch = self.patch_service.get(study) @@ -452,7 +742,7 @@ def delete_area(self, study: Study, area_id: str) -> None: def _update_with_cluster_metadata( area: str, info: ClusterInfoDTO, - cluster_patch: Dict[str, PatchCluster], + cluster_patch: t.Dict[str, PatchCluster], ) -> ClusterInfoDTO: patch = cluster_patch.get(f"{area}.{info.id}", PatchCluster()) info.code_oi = patch.code_oi @@ -460,14 +750,11 @@ def _update_with_cluster_metadata( return info @staticmethod - def _get_clusters(file_study: FileStudy, area: str, metadata_patch: Patch) -> List[ClusterInfoDTO]: + def _get_clusters(file_study: FileStudy, area: str, metadata_patch: Patch) -> t.List[ClusterInfoDTO]: thermal_clusters_data = file_study.tree.get(["input", "thermal", "clusters", area, "list"]) cluster_patch = metadata_patch.thermal_clusters or {} - return [ - AreaManager._update_with_cluster_metadata( - area, - ClusterInfoDTO.parse_obj({**thermal_clusters_data[tid], "id": tid}), - cluster_patch, - ) - for tid in thermal_clusters_data + result = [ + AreaManager._update_with_cluster_metadata(area, ClusterInfoDTO(id=tid, **obj), cluster_patch) + for tid, obj in thermal_clusters_data.items() ] + return result diff --git a/antarest/study/business/areas/properties_management.py b/antarest/study/business/areas/properties_management.py index 96850d6d7b..2014c554dc 100644 --- a/antarest/study/business/areas/properties_management.py +++ b/antarest/study/business/areas/properties_management.py @@ -4,9 +4,9 @@ from pydantic import root_validator -from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import FieldInfo, FormFieldsBaseModel, execute_or_add_commands from antarest.study.model import Study +from antarest.study.storage.rawstudy.model.filesystem.config.area import AdequacyPatchMode from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig @@ -37,12 +37,6 @@ def decode_filter(encoded_value: Set[str], current_filter: Optional[str] = None) return ", ".join(sort_filter_options(encoded_value)) -class AdequacyPatchMode(EnumIgnoreCase): - OUTSIDE = "outside" - INSIDE = "inside" - VIRTUAL = "virtual" - - class PropertiesFormFields(FormFieldsBaseModel): energy_cost_unsupplied: Optional[float] energy_cost_spilled: Optional[float] diff --git a/antarest/study/business/areas/renewable_management.py b/antarest/study/business/areas/renewable_management.py index 84f6e56672..1009c9d22c 100644 --- a/antarest/study/business/areas/renewable_management.py +++ b/antarest/study/business/areas/renewable_management.py @@ -1,9 +1,11 @@ +import collections import json import typing as t from pydantic import validator from antarest.core.exceptions import DuplicateRenewableCluster, RenewableClusterConfigNotFound, RenewableClusterNotFound +from antarest.core.model import JSON from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import execute_or_add_commands @@ -22,16 +24,9 @@ from antarest.study.storage.variantstudy.model.command.replace_matrix import ReplaceMatrix from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig -__all__ = ( - "RenewableClusterInput", - "RenewableClusterCreation", - "RenewableClusterOutput", - "RenewableManager", - "TimeSeriesInterpretation", -) - _CLUSTER_PATH = "input/renewables/clusters/{area_id}/list/{cluster_id}" _CLUSTERS_PATH = "input/renewables/clusters/{area_id}/list" +_ALL_CLUSTERS_PATH = "input/renewables/clusters" class TimeSeriesInterpretation(EnumIgnoreCase): @@ -145,6 +140,42 @@ def get_clusters(self, study: Study, area_id: str) -> t.Sequence[RenewableCluste return [create_renewable_output(study.version, cluster_id, cluster) for cluster_id, cluster in clusters.items()] + def get_all_renewables_props( + self, + study: Study, + ) -> t.Mapping[str, t.Mapping[str, RenewableClusterOutput]]: + """ + Retrieve all renewable clusters from all areas within a study. + + Args: + study: Study from which to retrieve the clusters. + + Returns: + A mapping of area IDs to a mapping of cluster IDs to cluster output. + + Raises: + RenewableClusterConfigNotFound: If no clusters are found in the specified area. + """ + + file_study = self._get_file_study(study) + path = _ALL_CLUSTERS_PATH + try: + # may raise KeyError if the path is missing + clusters = file_study.tree.get(path.split("/"), depth=5) + # may raise KeyError if "list" is missing + clusters = {area_id: cluster_list["list"] for area_id, cluster_list in clusters.items()} + except KeyError: + raise RenewableClusterConfigNotFound(path) + + study_version = study.version + renewables_by_areas: t.MutableMapping[str, t.MutableMapping[str, RenewableClusterOutput]] + renewables_by_areas = collections.defaultdict(dict) + for area_id, cluster_obj in clusters.items(): + for cluster_id, cluster in cluster_obj.items(): + renewables_by_areas[area_id][cluster_id] = create_renewable_output(study_version, cluster_id, cluster) + + return renewables_by_areas + def create_cluster( self, study: Study, area_id: str, cluster_data: RenewableClusterCreation ) -> RenewableClusterOutput: @@ -328,3 +359,42 @@ def duplicate_cluster( execute_or_add_commands(study, self._get_file_study(study), commands, self.storage_service) return RenewableClusterOutput(**new_config.dict(by_alias=False)) + + def update_renewables_props( + self, + study: Study, + update_renewables_by_areas: t.Mapping[str, t.Mapping[str, RenewableClusterInput]], + ) -> t.Mapping[str, t.Mapping[str, RenewableClusterOutput]]: + old_renewables_by_areas = self.get_all_renewables_props(study) + new_renewables_by_areas = {area_id: dict(clusters) for area_id, clusters in old_renewables_by_areas.items()} + + # Prepare the commands to update the renewable clusters. + commands = [] + for area_id, update_renewables_by_ids in update_renewables_by_areas.items(): + old_renewables_by_ids = old_renewables_by_areas[area_id] + for renewable_id, update_cluster in update_renewables_by_ids.items(): + # Update the renewable cluster properties. + old_cluster = old_renewables_by_ids[renewable_id] + new_cluster = old_cluster.copy(update=update_cluster.dict(by_alias=False, exclude_none=True)) + new_renewables_by_areas[area_id][renewable_id] = new_cluster + + # Convert the DTO to a configuration object and update the configuration file. + properties = create_renewable_config( + study.version, **new_cluster.dict(by_alias=False, exclude_none=True) + ) + path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=renewable_id) + cmd = UpdateConfig( + target=path, + data=json.loads(properties.json(by_alias=True, exclude={"id"})), + command_context=self.storage_service.variant_study_service.command_factory.command_context, + ) + commands.append(cmd) + + file_study = self.storage_service.get_storage(study).get_raw(study) + execute_or_add_commands(study, file_study, commands, self.storage_service) + + return new_renewables_by_areas + + @staticmethod + def get_table_schema() -> JSON: + return RenewableClusterOutput.schema() diff --git a/antarest/study/business/areas/st_storage_management.py b/antarest/study/business/areas/st_storage_management.py index 73f03b8ec3..373f8c3ea4 100644 --- a/antarest/study/business/areas/st_storage_management.py +++ b/antarest/study/business/areas/st_storage_management.py @@ -1,3 +1,4 @@ +import collections import functools import json import operator @@ -15,6 +16,7 @@ STStorageMatrixNotFound, STStorageNotFound, ) +from antarest.core.model import JSON from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import Study @@ -78,7 +80,7 @@ def to_config(self, study_version: t.Union[str, int]) -> STStorageConfigType: @camel_case_model -class STStorageOutput(STStorage880Config): +class STStorageOutput(STStorage880Config, metaclass=AllOptionalMetaclass, use_none=True): """ Model representing the form used to display the details of a short-term storage entry. """ @@ -216,6 +218,7 @@ def validate_rule_curve( _STORAGE_LIST_PATH = "input/st-storage/clusters/{area_id}/list/{storage_id}" _STORAGE_SERIES_PATH = "input/st-storage/series/{area_id}/{storage_id}/{ts_name}" +_ALL_STORAGE_PATH = "input/st-storage/clusters" def _get_values_by_ids(file_study: FileStudy, area_id: str) -> t.Mapping[str, t.Mapping[str, t.Any]]: @@ -327,6 +330,77 @@ def get_storages( storages = [create_storage_output(study_version, storage_id, options) for storage_id, options in config.items()] return sorted(storages, key=order_by) + def get_all_storages_props( + self, + study: Study, + ) -> t.Mapping[str, t.Mapping[str, STStorageOutput]]: + """ + Retrieve all short-term storages from all areas within a study. + + Args: + study: Study from which to retrieve the storages. + + Returns: + A mapping of area IDs to a mapping of storage IDs to storage configurations. + + Raises: + STStorageConfigNotFound: If no storages are found in the specified area. + """ + + file_study = self._get_file_study(study) + path = _ALL_STORAGE_PATH + try: + # may raise KeyError if the path is missing + storages = file_study.tree.get(path.split("/"), depth=5) + # may raise KeyError if "list" is missing + storages = {area_id: cluster_list["list"] for area_id, cluster_list in storages.items()} + except KeyError: + raise STStorageConfigNotFound(path) from None + + study_version = study.version + storages_by_areas: t.MutableMapping[str, t.MutableMapping[str, STStorageOutput]] + storages_by_areas = collections.defaultdict(dict) + for area_id, cluster_obj in storages.items(): + for cluster_id, cluster in cluster_obj.items(): + storages_by_areas[area_id][cluster_id] = create_storage_output(study_version, cluster_id, cluster) + + return storages_by_areas + + def update_storages_props( + self, + study: Study, + update_storages_by_areas: t.Mapping[str, t.Mapping[str, STStorageInput]], + ) -> t.Mapping[str, t.Mapping[str, STStorageOutput]]: + old_storages_by_areas = self.get_all_storages_props(study) + new_storages_by_areas = {area_id: dict(clusters) for area_id, clusters in old_storages_by_areas.items()} + + # Prepare the commands to update the storage clusters. + commands = [] + for area_id, update_storages_by_ids in update_storages_by_areas.items(): + old_storages_by_ids = old_storages_by_areas[area_id] + for storage_id, update_cluster in update_storages_by_ids.items(): + # Update the storage cluster properties. + old_cluster = old_storages_by_ids[storage_id] + new_cluster = old_cluster.copy(update=update_cluster.dict(by_alias=False, exclude_none=True)) + new_storages_by_areas[area_id][storage_id] = new_cluster + + # Convert the DTO to a configuration object and update the configuration file. + properties = create_st_storage_config( + study.version, **new_cluster.dict(by_alias=False, exclude_none=True) + ) + path = _STORAGE_LIST_PATH.format(area_id=area_id, storage_id=storage_id) + cmd = UpdateConfig( + target=path, + data=json.loads(properties.json(by_alias=True, exclude={"id"})), + command_context=self.storage_service.variant_study_service.command_factory.command_context, + ) + commands.append(cmd) + + file_study = self.storage_service.get_storage(study).get_raw(study) + execute_or_add_commands(study, file_study, commands, self.storage_service) + + return new_storages_by_areas + def get_storage( self, study: Study, @@ -613,3 +687,7 @@ def validate_matrices( # Validation successful return True + + @staticmethod + def get_table_schema() -> JSON: + return STStorageOutput.schema() diff --git a/antarest/study/business/areas/thermal_management.py b/antarest/study/business/areas/thermal_management.py index d5520c0d43..205965eb54 100644 --- a/antarest/study/business/areas/thermal_management.py +++ b/antarest/study/business/areas/thermal_management.py @@ -1,3 +1,4 @@ +import collections import json import typing as t from pathlib import Path @@ -11,6 +12,7 @@ ThermalClusterNotFound, WrongMatrixHeightError, ) +from antarest.core.model import JSON from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import Study @@ -37,6 +39,7 @@ _CLUSTER_PATH = "input/thermal/clusters/{area_id}/list/{cluster_id}" _CLUSTERS_PATH = "input/thermal/clusters/{area_id}/list" +_ALL_CLUSTERS_PATH = "input/thermal/clusters" @camel_case_model @@ -186,6 +189,79 @@ def get_clusters( study_version = study.version return [create_thermal_output(study_version, cluster_id, cluster) for cluster_id, cluster in clusters.items()] + def get_all_thermals_props( + self, + study: Study, + ) -> t.Mapping[str, t.Mapping[str, ThermalClusterOutput]]: + """ + Retrieve all thermal clusters from all areas within a study. + + Args: + study: Study from which to retrieve the clusters. + + Returns: + A mapping of area IDs to a mapping of cluster IDs to thermal cluster configurations. + + Raises: + ThermalClusterConfigNotFound: If no clusters are found in the specified area. + """ + + file_study = self._get_file_study(study) + path = _ALL_CLUSTERS_PATH + try: + # may raise KeyError if the path is missing + clusters = file_study.tree.get(path.split("/"), depth=5) + # may raise KeyError if "list" is missing + clusters = {area_id: cluster_list["list"] for area_id, cluster_list in clusters.items()} + except KeyError: + raise ThermalClusterConfigNotFound(path) from None + + study_version = study.version + thermals_by_areas: t.MutableMapping[str, t.MutableMapping[str, ThermalClusterOutput]] + thermals_by_areas = collections.defaultdict(dict) + for area_id, cluster_obj in clusters.items(): + for cluster_id, cluster in cluster_obj.items(): + thermals_by_areas[area_id][cluster_id] = create_thermal_output(study_version, cluster_id, cluster) + + return thermals_by_areas + + def update_thermals_props( + self, + study: Study, + update_thermals_by_areas: t.Mapping[str, t.Mapping[str, ThermalClusterInput]], + ) -> t.Mapping[str, t.Mapping[str, ThermalClusterOutput]]: + old_thermals_by_areas = self.get_all_thermals_props(study) + new_thermals_by_areas = {area_id: dict(clusters) for area_id, clusters in old_thermals_by_areas.items()} + + # Prepare the commands to update the thermal clusters. + commands = [] + for area_id, update_thermals_by_ids in update_thermals_by_areas.items(): + old_thermals_by_ids = old_thermals_by_areas[area_id] + for thermal_id, update_cluster in update_thermals_by_ids.items(): + # Update the thermal cluster properties. + old_cluster = old_thermals_by_ids[thermal_id] + new_cluster = old_cluster.copy(update=update_cluster.dict(by_alias=False, exclude_none=True)) + new_thermals_by_areas[area_id][thermal_id] = new_cluster + + # Convert the DTO to a configuration object and update the configuration file. + properties = create_thermal_config(study.version, **new_cluster.dict(by_alias=False, exclude_none=True)) + path = _CLUSTER_PATH.format(area_id=area_id, cluster_id=thermal_id) + cmd = UpdateConfig( + target=path, + data=json.loads(properties.json(by_alias=True, exclude={"id"})), + command_context=self.storage_service.variant_study_service.command_factory.command_context, + ) + commands.append(cmd) + + file_study = self.storage_service.get_storage(study).get_raw(study) + execute_or_add_commands(study, file_study, commands, self.storage_service) + + return new_thermals_by_areas + + @staticmethod + def get_table_schema() -> JSON: + return ThermalClusterOutput.schema() + def create_cluster(self, study: Study, area_id: str, cluster_data: ThermalClusterCreation) -> ThermalClusterOutput: """ Create a new cluster. diff --git a/antarest/study/business/binding_constraint_management.py b/antarest/study/business/binding_constraint_management.py index 010e1424d0..9c29c5925d 100644 --- a/antarest/study/business/binding_constraint_management.py +++ b/antarest/study/business/binding_constraint_management.py @@ -2,14 +2,14 @@ import itertools import json import logging -from typing import Any, Dict, List, Mapping, MutableSequence, Optional, Sequence, Tuple, Union +import typing as t import numpy as np from pydantic import BaseModel, Field, root_validator, validator from requests.utils import CaseInsensitiveDict from antarest.core.exceptions import ( - BindingConstraintNotFoundError, + BindingConstraintNotFound, ConstraintAlreadyExistError, ConstraintIdNotFoundError, DuplicateConstraintName, @@ -20,11 +20,15 @@ NoConstraintError, WrongMatrixHeightError, ) +from antarest.core.model import JSON from antarest.core.utils.string import to_camel_case from antarest.study.business.all_optional_meta import camel_case_model from antarest.study.business.utils import execute_or_add_commands from antarest.study.model import Study -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.storage_service import StudyStorageService @@ -40,7 +44,6 @@ from antarest.study.storage.variantstudy.business.matrix_constants.binding_constraint.series_before_v87 import ( default_bc_weekly_daily as default_bc_weekly_daily_86, ) -from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator from antarest.study.storage.variantstudy.model.command.create_binding_constraint import ( DEFAULT_GROUP, EXPECTED_MATRIX_SHAPES, @@ -106,13 +109,13 @@ class ConstraintTerm(BaseModel): data: the constraint term data (link or cluster), if any. """ - id: Optional[str] - weight: Optional[float] - offset: Optional[int] - data: Optional[Union[LinkTerm, ClusterTerm]] + id: t.Optional[str] + weight: t.Optional[float] + offset: t.Optional[int] + data: t.Optional[t.Union[LinkTerm, ClusterTerm]] @validator("id") - def id_to_lower(cls, v: Optional[str]) -> Optional[str]: + def id_to_lower(cls, v: t.Optional[str]) -> t.Optional[str]: """Ensure the ID is lower case.""" if v is None: return None @@ -143,11 +146,11 @@ class ConstraintFilters(BaseModel, frozen=True, extra="forbid"): """ bc_id: str = "" - enabled: Optional[bool] = None - operator: Optional[BindingConstraintOperator] = None + enabled: t.Optional[bool] = None + operator: t.Optional[BindingConstraintOperator] = None comments: str = "" group: str = "" - time_step: Optional[BindingConstraintFrequency] = None + time_step: t.Optional[BindingConstraintFrequency] = None area_name: str = "" cluster_name: str = "" link_id: str = "" @@ -233,7 +236,7 @@ class ConstraintInput870(OptionalProperties): @camel_case_model class ConstraintInput(BindingConstraintMatrices, ConstraintInput870): - terms: MutableSequence[ConstraintTerm] = Field( + terms: t.MutableSequence[ConstraintTerm] = Field( default_factory=lambda: [], ) @@ -243,7 +246,7 @@ class ConstraintCreation(ConstraintInput): name: str @root_validator(pre=True) - def check_matrices_dimensions(cls, values: Dict[str, Any]) -> Dict[str, Any]: + def check_matrices_dimensions(cls, values: t.Dict[str, t.Any]) -> t.Dict[str, t.Any]: for _key in ["time_step"] + TERM_MATRICES: _camel = to_camel_case(_key) values[_key] = values.pop(_camel, values.get(_key)) @@ -297,7 +300,7 @@ def check_matrices_dimensions(cls, values: Dict[str, Any]) -> Dict[str, Any]: class ConstraintOutputBase(BindingConstraintPropertiesBase): id: str name: str - terms: MutableSequence[ConstraintTerm] = Field(default_factory=lambda: []) + terms: t.MutableSequence[ConstraintTerm] = Field(default_factory=lambda: []) @camel_case_model @@ -313,12 +316,12 @@ class ConstraintOutput870(ConstraintOutput830): # WARNING: Do not change the order of the following line, it is used to determine # the type of the output constraint in the FastAPI endpoint. -ConstraintOutput = Union[ConstraintOutputBase, ConstraintOutput830, ConstraintOutput870] +ConstraintOutput = t.Union[ConstraintOutputBase, ConstraintOutput830, ConstraintOutput870] def _get_references_by_widths( - file_study: FileStudy, bcs: Sequence[ConstraintOutput] -) -> Mapping[int, Sequence[Tuple[str, str]]]: + file_study: FileStudy, bcs: t.Sequence[ConstraintOutput] +) -> t.Mapping[int, t.Sequence[t.Tuple[str, str]]]: """ Iterates over each BC and its associated matrices. For each matrix, it checks its width according to the expected matrix shapes. @@ -333,7 +336,7 @@ def _get_references_by_widths( else: matrix_id_fmts = {"{bc_id}_eq", "{bc_id}_lt", "{bc_id}_gt"} - references_by_width: Dict[int, List[Tuple[str, str]]] = {} + references_by_width: t.Dict[int, t.List[t.Tuple[str, str]]] = {} _total = len(bcs) * len(matrix_id_fmts) for _index, (bc, fmt) in enumerate(itertools.product(bcs, matrix_id_fmts), 1): bc_id = bc.id @@ -358,7 +361,7 @@ def _get_references_by_widths( return references_by_width -def _validate_binding_constraints(file_study: FileStudy, bcs: Sequence[ConstraintOutput]) -> bool: +def _validate_binding_constraints(file_study: FileStudy, bcs: t.Sequence[ConstraintOutput]) -> bool: """ Validates the binding constraints within a group. """ @@ -366,7 +369,7 @@ def _validate_binding_constraints(file_study: FileStudy, bcs: Sequence[Constrain if len(references_by_widths) > 1: most_common = collections.Counter(references_by_widths.keys()).most_common() - invalid_constraints: Dict[str, str] = {} + invalid_constraints: t.Dict[str, str] = {} for width, _ in most_common[1:]: references = references_by_widths[width] @@ -386,6 +389,10 @@ def _validate_binding_constraints(file_study: FileStudy, bcs: Sequence[Constrain return True +# noinspection SpellCheckingInspection +_ALL_BINDING_CONSTRAINTS_PATH = "input/bindingconstraints/bindingconstraints" + + class BindingConstraintManager: def __init__( self, @@ -394,7 +401,7 @@ def __init__( self.storage_service = storage_service @staticmethod - def parse_and_add_terms(key: str, value: Any, adapted_constraint: ConstraintOutput) -> None: + def parse_and_add_terms(key: str, value: t.Any, adapted_constraint: ConstraintOutput) -> None: """Parse a single term from the constraint dictionary and add it to the adapted_constraint model.""" if "%" in key or "." in key: separator = "%" if "%" in key else "." @@ -428,7 +435,7 @@ def parse_and_add_terms(key: str, value: Any, adapted_constraint: ConstraintOutp ) @staticmethod - def constraint_model_adapter(constraint: Mapping[str, Any], version: int) -> ConstraintOutput: + def constraint_model_adapter(constraint: t.Mapping[str, t.Any], version: int) -> ConstraintOutput: """ Adapts a binding constraint configuration to the appropriate model version. @@ -486,7 +493,7 @@ def constraint_model_adapter(constraint: Mapping[str, Any], version: int) -> Con return adapted_constraint @staticmethod - def terms_to_coeffs(terms: Sequence[ConstraintTerm]) -> Dict[str, List[float]]: + def terms_to_coeffs(terms: t.Sequence[ConstraintTerm]) -> t.Dict[str, t.List[float]]: """ Converts a sequence of terms into a dictionary mapping each term's ID to its coefficients, including the weight and, optionally, the offset. @@ -515,26 +522,26 @@ def get_binding_constraint(self, study: Study, bc_id: str) -> ConstraintOutput: A ConstraintOutput object representing the binding constraint with the specified ID. Raises: - BindingConstraintNotFoundError: If no binding constraint with the specified ID is found. + BindingConstraintNotFound: If no binding constraint with the specified ID is found. """ storage_service = self.storage_service.get_storage(study) file_study = storage_service.get_raw(study) config = file_study.tree.get(["input", "bindingconstraints", "bindingconstraints"]) - constraints_by_id: Dict[str, ConstraintOutput] = CaseInsensitiveDict() # type: ignore + constraints_by_id: t.Dict[str, ConstraintOutput] = CaseInsensitiveDict() # type: ignore for constraint in config.values(): constraint_config = self.constraint_model_adapter(constraint, int(study.version)) constraints_by_id[constraint_config.id] = constraint_config if bc_id not in constraints_by_id: - raise BindingConstraintNotFoundError(f"Binding constraint '{bc_id}' not found") + raise BindingConstraintNotFound(f"Binding constraint '{bc_id}' not found") return constraints_by_id[bc_id] def get_binding_constraints( self, study: Study, filters: ConstraintFilters = ConstraintFilters() - ) -> Sequence[ConstraintOutput]: + ) -> t.Sequence[ConstraintOutput]: """ Retrieves all binding constraints within a given study, optionally filtered by specific criteria. @@ -552,7 +559,7 @@ def get_binding_constraints( filtered_constraints = list(filter(lambda c: filters.match_filters(c), outputs)) return filtered_constraints - def get_grouped_constraints(self, study: Study) -> Mapping[str, Sequence[ConstraintOutput]]: + def get_grouped_constraints(self, study: Study) -> t.Mapping[str, t.Sequence[ConstraintOutput]]: """ Retrieves and groups all binding constraints by their group names within a given study. @@ -581,7 +588,7 @@ def get_grouped_constraints(self, study: Study) -> Mapping[str, Sequence[Constra return grouped_constraints - def get_constraints_by_group(self, study: Study, group_name: str) -> Sequence[ConstraintOutput]: + def get_constraints_by_group(self, study: Study, group_name: str) -> t.Sequence[ConstraintOutput]: """ Retrieve all binding constraints belonging to a specified group within a study. @@ -593,12 +600,12 @@ def get_constraints_by_group(self, study: Study, group_name: str) -> Sequence[Co A list of ConstraintOutput objects that belong to the specified group. Raises: - BindingConstraintNotFoundError: If the specified group name is not found among the constraint groups. + BindingConstraintNotFound: If the specified group name is not found among the constraint groups. """ grouped_constraints = self.get_grouped_constraints(study) if group_name not in grouped_constraints: - raise BindingConstraintNotFoundError(f"Group '{group_name}' not found") + raise BindingConstraintNotFound(f"Group '{group_name}' not found") return grouped_constraints[group_name] @@ -619,14 +626,14 @@ def validate_constraint_group(self, study: Study, group_name: str) -> bool: True if the group exists and the constraints within the group are valid; False otherwise. Raises: - BindingConstraintNotFoundError: If no matching group name is found in a case-insensitive manner. + BindingConstraintNotFound: If no matching group name is found in a case-insensitive manner. """ storage_service = self.storage_service.get_storage(study) file_study = storage_service.get_raw(study) grouped_constraints = self.get_grouped_constraints(study) if group_name not in grouped_constraints: - raise BindingConstraintNotFoundError(f"Group '{group_name}' not found") + raise BindingConstraintNotFound(f"Group '{group_name}' not found") constraints = grouped_constraints[group_name] return _validate_binding_constraints(file_study, constraints) @@ -757,6 +764,29 @@ def update_binding_constraint( upd_constraint[field] = getattr(data, field) or getattr(existing_constraint, field) return self.constraint_model_adapter(upd_constraint, study_version) + def update_binding_constraints( + self, + study: Study, + bcs_by_ids: t.Mapping[str, ConstraintInput], + ) -> t.Mapping[str, ConstraintOutput]: + """ + Updates multiple binding constraints within a study. + + Args: + study: The study from which to update the constraints. + bcs_by_ids: A mapping of binding constraint IDs to their updated configurations. + + Returns: + A dictionary of the updated binding constraints, indexed by their IDs. + + Raises: + BindingConstraintNotFound: If any of the specified binding constraint IDs are not found. + """ + updated_constraints = {} + for bc_id, data in bcs_by_ids.items(): + updated_constraints[bc_id] = self.update_binding_constraint(study, bc_id, data) + return updated_constraints + def remove_binding_constraint(self, study: Study, binding_constraint_id: str) -> None: """ Removes a binding constraint from a study. @@ -766,7 +796,7 @@ def remove_binding_constraint(self, study: Study, binding_constraint_id: str) -> binding_constraint_id: The ID of the binding constraint to remove. Raises: - BindingConstraintNotFoundError: If no binding constraint with the specified ID is found. + BindingConstraintNotFound: If no binding constraint with the specified ID is found. """ # Check the existence of the binding constraint before removing it bc = self.get_binding_constraint(study, binding_constraint_id) @@ -860,10 +890,14 @@ def remove_constraint_term( ) -> None: return self.update_constraint_term(study, binding_constraint_id, term_id) # type: ignore + @staticmethod + def get_table_schema() -> JSON: + return ConstraintOutput870.schema() + def _replace_matrices_according_to_frequency_and_version( - data: ConstraintInput, version: int, args: Dict[str, Any] -) -> Dict[str, Any]: + data: ConstraintInput, version: int, args: t.Dict[str, t.Any] +) -> t.Dict[str, t.Any]: if version < 870: if "values" not in args: matrix = { @@ -884,7 +918,7 @@ def _replace_matrices_according_to_frequency_and_version( return args -def find_constraint_term_id(constraints_term: Sequence[ConstraintTerm], constraint_term_id: str) -> int: +def find_constraint_term_id(constraints_term: t.Sequence[ConstraintTerm], constraint_term_id: str) -> int: try: index = [elm.id for elm in constraints_term].index(constraint_term_id) return index @@ -892,7 +926,7 @@ def find_constraint_term_id(constraints_term: Sequence[ConstraintTerm], constrai return -1 -def check_attributes_coherence(data: Union[ConstraintCreation, ConstraintInput], study_version: int) -> None: +def check_attributes_coherence(data: t.Union[ConstraintCreation, ConstraintInput], study_version: int) -> None: if study_version < 870: if data.group: raise InvalidFieldForVersionError( diff --git a/antarest/study/business/link_management.py b/antarest/study/business/link_management.py index 971b0ca376..375a539fd8 100644 --- a/antarest/study/business/link_management.py +++ b/antarest/study/business/link_management.py @@ -1,12 +1,19 @@ -from typing import Any, Dict, List, Optional +import typing as t from pydantic import BaseModel +from antarest.core.exceptions import ConfigFileNotFound +from antarest.core.model import JSON +from antarest.study.business.all_optional_meta import AllOptionalMetaclass, camel_case_model from antarest.study.business.utils import execute_or_add_commands -from antarest.study.model import Study +from antarest.study.model import RawStudy +from antarest.study.storage.rawstudy.model.filesystem.config.links import LinkProperties from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.create_link import CreateLink from antarest.study.storage.variantstudy.model.command.remove_link import RemoveLink +from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig + +_ALL_LINKS_PATH = "input/links" class LinkUIDTO(BaseModel): @@ -18,37 +25,29 @@ class LinkUIDTO(BaseModel): class LinkInfoDTO(BaseModel): area1: str area2: str - ui: Optional[LinkUIDTO] = None - - -class GenericElement(BaseModel): - id: str - name: str - - -class GenericItem(BaseModel): - element: GenericElement - item_list: List[GenericElement] + ui: t.Optional[LinkUIDTO] = None -class AllCLustersAndLinks(BaseModel): - links: List[GenericItem] - clusters: List[GenericItem] +@camel_case_model +class LinkOutput(LinkProperties, metaclass=AllOptionalMetaclass, use_none=True): + """ + DTO object use to get the link information. + """ class LinkManager: def __init__(self, storage_service: StudyStorageService) -> None: self.storage_service = storage_service - def get_all_links(self, study: Study, with_ui: bool = False) -> List[LinkInfoDTO]: + def get_all_links(self, study: RawStudy, with_ui: bool = False) -> t.List[LinkInfoDTO]: file_study = self.storage_service.get_storage(study).get_raw(study) result = [] for area_id, area in file_study.config.areas.items(): - links_config: Optional[Dict[str, Any]] = None + links_config: t.Optional[t.Dict[str, t.Any]] = None if with_ui: links_config = file_study.tree.get(["input", "links", area_id, "properties"]) for link in area.links: - ui_info: Optional[LinkUIDTO] = None + ui_info: t.Optional[LinkUIDTO] = None if with_ui and links_config and link in links_config: ui_info = LinkUIDTO( color=f"{links_config[link].get('colorr', '163')},{links_config[link].get('colorg', '163')},{links_config[link].get('colorb', '163')}", @@ -59,7 +58,7 @@ def get_all_links(self, study: Study, with_ui: bool = False) -> List[LinkInfoDTO return result - def create_link(self, study: Study, link_creation_info: LinkInfoDTO) -> LinkInfoDTO: + def create_link(self, study: RawStudy, link_creation_info: LinkInfoDTO) -> LinkInfoDTO: storage_service = self.storage_service.get_storage(study) file_study = storage_service.get_raw(study) command = CreateLink( @@ -73,7 +72,7 @@ def create_link(self, study: Study, link_creation_info: LinkInfoDTO) -> LinkInfo area2=link_creation_info.area2, ) - def delete_link(self, study: Study, area1_id: str, area2_id: str) -> None: + def delete_link(self, study: RawStudy, area1_id: str, area2_id: str) -> None: file_study = self.storage_service.get_storage(study).get_raw(study) command = RemoveLink( area1=area1_id, @@ -81,3 +80,67 @@ def delete_link(self, study: Study, area1_id: str, area2_id: str) -> None: command_context=self.storage_service.variant_study_service.command_factory.command_context, ) execute_or_add_commands(study, file_study, [command], self.storage_service) + + def get_all_links_props(self, study: RawStudy) -> t.Mapping[t.Tuple[str, str], LinkOutput]: + """ + Retrieves all links properties from the study. + + Args: + study: The raw study object. + Returns: + A mapping of link IDS `(area1_id, area2_id)` to link properties. + Raises: + ConfigFileNotFound: if a configuration file is not found. + """ + file_study = self.storage_service.get_storage(study).get_raw(study) + + # Get the link information from the `input/links/{area1}/properties.ini` file. + path = _ALL_LINKS_PATH + try: + links_cfg = file_study.tree.get(path.split("/"), depth=5) + except KeyError: + raise ConfigFileNotFound(path) from None + + # areas_cfg contains a dictionary where the keys are the area IDs, + # and the values are objects that can be converted to `LinkFolder`. + links_by_ids = {} + for area1_id, entries in links_cfg.items(): + property_map = entries.get("properties") or {} + for area2_id, properties_cfg in property_map.items(): + area1_id, area2_id = sorted([area1_id, area2_id]) + properties = LinkProperties(**properties_cfg) + links_by_ids[(area1_id, area2_id)] = LinkOutput(**properties.dict(by_alias=False)) + + return links_by_ids + + def update_links_props( + self, + study: RawStudy, + update_links_by_ids: t.Mapping[t.Tuple[str, str], LinkOutput], + ) -> t.Mapping[t.Tuple[str, str], LinkOutput]: + old_links_by_ids = self.get_all_links_props(study) + new_links_by_ids = {} + file_study = self.storage_service.get_storage(study).get_raw(study) + commands = [] + for (area1, area2), update_link_dto in update_links_by_ids.items(): + # Update the link properties. + old_link_dto = old_links_by_ids[(area1, area2)] + new_link_dto = old_link_dto.copy(update=update_link_dto.dict(by_alias=False, exclude_none=True)) + new_links_by_ids[(area1, area2)] = new_link_dto + + # Convert the DTO to a configuration object and update the configuration file. + properties = LinkProperties(**new_link_dto.dict(by_alias=False)) + path = f"{_ALL_LINKS_PATH}/{area1}/properties" + cmd = UpdateConfig( + target=path, + data={area2: properties.to_config()}, + command_context=self.storage_service.variant_study_service.command_factory.command_context, + ) + commands.append(cmd) + + execute_or_add_commands(study, file_study, commands, self.storage_service) + return new_links_by_ids + + @staticmethod + def get_table_schema() -> JSON: + return LinkOutput.schema() diff --git a/antarest/study/business/table_mode_management.py b/antarest/study/business/table_mode_management.py index 23d8674781..cbbf5358cc 100644 --- a/antarest/study/business/table_mode_management.py +++ b/antarest/study/business/table_mode_management.py @@ -1,582 +1,259 @@ -from typing import Any, Dict, List, Optional, TypedDict, Union - -from pydantic import StrictFloat -from pydantic.types import StrictBool, StrictInt, StrictStr - -from antarest.study.business.areas.properties_management import AdequacyPatchMode -from antarest.study.business.areas.renewable_management import TimeSeriesInterpretation -from antarest.study.business.binding_constraint_management import BindingConstraintManager +import collections +import typing as t + +import numpy as np +import pandas as pd + +from antarest.core.model import JSON +from antarest.study.business.area_management import AreaManager, AreaOutput +from antarest.study.business.areas.renewable_management import RenewableClusterInput, RenewableManager +from antarest.study.business.areas.st_storage_management import STStorageInput, STStorageManager +from antarest.study.business.areas.thermal_management import ThermalClusterInput, ThermalManager +from antarest.study.business.binding_constraint_management import BindingConstraintManager, ConstraintInput from antarest.study.business.enum_ignore_case import EnumIgnoreCase -from antarest.study.business.utils import FormFieldsBaseModel, execute_or_add_commands -from antarest.study.common.default_values import FilteringOptions, LinkProperties, NodalOptimization +from antarest.study.business.link_management import LinkManager, LinkOutput from antarest.study.model import RawStudy -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency -from antarest.study.storage.rawstudy.model.filesystem.config.thermal import LawOption, LocalTSGenerationBehavior -from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.storage_service import StudyStorageService -from antarest.study.storage.variantstudy.model.command.icommand import ICommand -from antarest.study.storage.variantstudy.model.command.update_binding_constraint import UpdateBindingConstraint -from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig - - -class TableTemplateType(EnumIgnoreCase): - AREA = "area" - LINK = "link" - CLUSTER = "cluster" - RENEWABLE = "renewable" - BINDING_CONSTRAINT = "binding constraint" - - -class AssetType(EnumIgnoreCase): - AC = "ac" - DC = "dc" - GAZ = "gaz" - VIRT = "virt" - OTHER = "other" - - -class TransmissionCapacity(EnumIgnoreCase): - INFINITE = "infinite" - IGNORE = "ignore" - ENABLED = "enabled" - - -class BindingConstraintOperator(EnumIgnoreCase): - LESS = "less" - GREATER = "greater" - BOTH = "both" - EQUAL = "equal" - - -class AreaColumns(FormFieldsBaseModel): - # Optimization - Nodal optimization - non_dispatchable_power: Optional[StrictBool] - dispatchable_hydro_power: Optional[StrictBool] - other_dispatchable_power: Optional[StrictBool] - average_unsupplied_energy_cost: Optional[Union[StrictFloat, StrictInt]] - spread_unsupplied_energy_cost: Optional[Union[StrictFloat, StrictInt]] - average_spilled_energy_cost: Optional[Union[StrictFloat, StrictInt]] - spread_spilled_energy_cost: Optional[Union[StrictFloat, StrictInt]] - # Optimization - Filtering - filter_synthesis: Optional[StrictStr] - filter_year_by_year: Optional[StrictStr] - # Adequacy patch - adequacy_patch_mode: Optional[AdequacyPatchMode] - - -class LinkColumns(FormFieldsBaseModel): - hurdles_cost: Optional[StrictBool] - loop_flow: Optional[StrictBool] - use_phase_shifter: Optional[StrictBool] - transmission_capacities: Optional[TransmissionCapacity] - asset_type: Optional[AssetType] - link_style: Optional[StrictStr] - link_width: Optional[StrictInt] - display_comments: Optional[StrictBool] - filter_synthesis: Optional[StrictStr] - filter_year_by_year: Optional[StrictStr] - - -class ClusterColumns(FormFieldsBaseModel): - group: Optional[StrictStr] - enabled: Optional[StrictBool] - must_run: Optional[StrictBool] - unit_count: Optional[StrictInt] - nominal_capacity: Optional[StrictInt] - min_stable_power: Optional[StrictInt] - spinning: Optional[StrictInt] - min_up_time: Optional[StrictInt] - min_down_time: Optional[StrictInt] - co2: Optional[StrictInt] - marginal_cost: Optional[StrictInt] - fixed_cost: Optional[StrictInt] - startup_cost: Optional[StrictInt] - market_bid_cost: Optional[StrictInt] - spread_cost: Optional[StrictInt] - ts_gen: Optional[LocalTSGenerationBehavior] - volatility_forced: Optional[StrictInt] - volatility_planned: Optional[StrictInt] - law_forced: Optional[LawOption] - law_planned: Optional[LawOption] - +from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError -class RenewableColumns(FormFieldsBaseModel): - group: Optional[StrictStr] - ts_interpretation: Optional[TimeSeriesInterpretation] - enabled: Optional[StrictBool] - unit_count: Optional[StrictInt] - nominal_capacity: Optional[StrictInt] +_TableIndex = str # row name +_TableColumn = str # column name +_CellValue = t.Any # cell value (str, int, float, bool, enum, etc.) +TableDataDTO = t.Mapping[_TableIndex, t.Mapping[_TableColumn, _CellValue]] -class BindingConstraintColumns(FormFieldsBaseModel): - type: Optional[BindingConstraintFrequency] - operator: Optional[BindingConstraintOperator] - enabled: Optional[StrictBool] - group: Optional[StrictStr] - - -class ColumnInfo(TypedDict): - path: str - default_value: Any - - -class PathVars(TypedDict, total=False): - # Area - id: str - # Link - area1: str - area2: str - # Cluster, Renewable - area: str - cluster: str - - -AREA_PATH = "input/areas/{area}" -THERMAL_PATH = "input/thermal/areas" -LINK_GLOB_PATH = "input/links/{area1}/properties" -LINK_PATH = f"{LINK_GLOB_PATH}/{{area2}}" -CLUSTER_GLOB_PATH = "input/thermal/clusters/{area}/list" -CLUSTER_PATH = f"{CLUSTER_GLOB_PATH}/{{cluster}}" -RENEWABLE_GLOB_PATH = "input/renewables/clusters/{area}/list" -RENEWABLE_PATH = f"{RENEWABLE_GLOB_PATH}/{{cluster}}" -BINDING_CONSTRAINT_PATH = "input/bindingconstraints/bindingconstraints" - -FIELDS_INFO_BY_TYPE: Dict[TableTemplateType, Dict[str, ColumnInfo]] = { - TableTemplateType.AREA: { - "non_dispatchable_power": { - "path": f"{AREA_PATH}/optimization/nodal optimization/non-dispatchable-power", - "default_value": NodalOptimization.NON_DISPATCHABLE_POWER, - }, - "dispatchable_hydro_power": { - "path": f"{AREA_PATH}/optimization/nodal optimization/dispatchable-hydro-power", - "default_value": NodalOptimization.DISPATCHABLE_HYDRO_POWER, - }, - "other_dispatchable_power": { - "path": f"{AREA_PATH}/optimization/nodal optimization/other-dispatchable-power", - "default_value": NodalOptimization.OTHER_DISPATCHABLE_POWER, - }, - "average_unsupplied_energy_cost": { - "path": f"{THERMAL_PATH}/unserverdenergycost/{{area}}", - "default_value": NodalOptimization.SPREAD_UNSUPPLIED_ENERGY_COST, - }, - "spread_unsupplied_energy_cost": { - "path": f"{AREA_PATH}/optimization/nodal optimization/spread-unsupplied-energy-cost", - "default_value": NodalOptimization.SPREAD_UNSUPPLIED_ENERGY_COST, - }, - "average_spilled_energy_cost": { - "path": f"{THERMAL_PATH}/spilledenergycost/{{area}}", - "default_value": NodalOptimization.SPREAD_SPILLED_ENERGY_COST, - }, - "spread_spilled_energy_cost": { - "path": f"{AREA_PATH}/optimization/nodal optimization/spread-spilled-energy-cost", - "default_value": NodalOptimization.SPREAD_SPILLED_ENERGY_COST, - }, - "filter_synthesis": { - "path": f"{AREA_PATH}/optimization/filtering/filter-synthesis", - "default_value": FilteringOptions.FILTER_SYNTHESIS, - }, - "filter_year_by_year": { - "path": f"{AREA_PATH}/optimization/filtering/filter-year-by-year", - "default_value": FilteringOptions.FILTER_YEAR_BY_YEAR, - }, - "adequacy_patch_mode": { - "path": f"{AREA_PATH}/adequacy_patch/adequacy-patch/adequacy-patch-mode", - "default_value": AdequacyPatchMode.OUTSIDE.value, - }, - }, - TableTemplateType.LINK: { - "hurdles_cost": { - "path": f"{LINK_PATH}/hurdles-cost", - "default_value": LinkProperties.HURDLES_COST, - }, - "loop_flow": { - "path": f"{LINK_PATH}/loop-flow", - "default_value": LinkProperties.LOOP_FLOW, - }, - "use_phase_shifter": { - "path": f"{LINK_PATH}/use-phase-shifter", - "default_value": LinkProperties.USE_PHASE_SHIFTER, - }, - "transmission_capacities": { - "path": f"{LINK_PATH}/transmission-capacities", - "default_value": LinkProperties.TRANSMISSION_CAPACITIES, - }, - "asset_type": { - "path": f"{LINK_PATH}/asset-type", - "default_value": LinkProperties.ASSET_TYPE, - }, - "link_style": { - "path": f"{LINK_PATH}/link-style", - "default_value": LinkProperties.LINK_STYLE, - }, - "link_width": { - "path": f"{LINK_PATH}/link-width", - "default_value": LinkProperties.LINK_WIDTH, - }, - "display_comments": { - "path": f"{LINK_PATH}/display-comments", - "default_value": LinkProperties.DISPLAY_COMMENTS, - }, - "filter_synthesis": { - "path": f"{LINK_PATH}/filter-synthesis", - "default_value": FilteringOptions.FILTER_SYNTHESIS, - }, - "filter_year_by_year": { - "path": f"{LINK_PATH}/filter-year-by-year", - "default_value": FilteringOptions.FILTER_YEAR_BY_YEAR, - }, - }, - TableTemplateType.CLUSTER: { - "group": { - "path": f"{CLUSTER_PATH}/group", - "default_value": "", - }, - "enabled": { - "path": f"{CLUSTER_PATH}/enabled", - "default_value": True, - }, - "must_run": { - "path": f"{CLUSTER_PATH}/must-run", - "default_value": False, - }, - "unit_count": { - "path": f"{CLUSTER_PATH}/unitcount", - "default_value": 0, - }, - "nominal_capacity": { - "path": f"{CLUSTER_PATH}/nominalcapacity", - "default_value": 0, - }, - "min_stable_power": { - "path": f"{CLUSTER_PATH}/min-stable-power", - "default_value": 0, - }, - "spinning": { - "path": f"{CLUSTER_PATH}/spinning", - "default_value": 0, - }, - "min_up_time": { - "path": f"{CLUSTER_PATH}/min-up-time", - "default_value": 1, - }, - "min_down_time": { - "path": f"{CLUSTER_PATH}/min-down-time", - "default_value": 1, - }, - "co2": { - "path": f"{CLUSTER_PATH}/co2", - "default_value": 0, - }, - "marginal_cost": { - "path": f"{CLUSTER_PATH}/marginal-cost", - "default_value": 0, - }, - "fixed_cost": { - "path": f"{CLUSTER_PATH}/fixed-cost", - "default_value": 0, - }, - "startup_cost": { - "path": f"{CLUSTER_PATH}/startup-cost", - "default_value": 0, - }, - "market_bid_cost": { - "path": f"{CLUSTER_PATH}/market-bid-cost", - "default_value": 0, - }, - "spread_cost": { - "path": f"{CLUSTER_PATH}/spread-cost", - "default_value": 0, - }, - "ts_gen": { - "path": f"{CLUSTER_PATH}/gen-ts", - "default_value": LocalTSGenerationBehavior.USE_GLOBAL.value, - }, - "volatility_forced": { - "path": f"{CLUSTER_PATH}/volatility.forced", - "default_value": 0, - }, - "volatility_planned": { - "path": f"{CLUSTER_PATH}/volatility.planned", - "default_value": 0, - }, - "law_forced": { - "path": f"{CLUSTER_PATH}/law.forced", - "default_value": LawOption.UNIFORM.value, - }, - "law_planned": { - "path": f"{CLUSTER_PATH}/law.planned", - "default_value": LawOption.UNIFORM.value, - }, - }, - TableTemplateType.RENEWABLE: { - "group": { - "path": f"{RENEWABLE_PATH}/group", - "default_value": "", - }, - "ts_interpretation": { - "path": f"{RENEWABLE_PATH}/ts-interpretation", - "default_value": TimeSeriesInterpretation.POWER_GENERATION.value, - }, - "enabled": { - "path": f"{RENEWABLE_PATH}/enabled", - "default_value": True, - }, - "unit_count": { - "path": f"{RENEWABLE_PATH}/unitcount", - "default_value": 0, - }, - "nominal_capacity": { - "path": f"{RENEWABLE_PATH}/nominalcapacity", - "default_value": 0, - }, - }, - TableTemplateType.BINDING_CONSTRAINT: { - "type": { - "path": f"{BINDING_CONSTRAINT_PATH}/type", - "default_value": BindingConstraintFrequency.HOURLY.value, - }, - "operator": { - "path": f"{BINDING_CONSTRAINT_PATH}/operator", - "default_value": BindingConstraintOperator.LESS.value, - }, - "enabled": { - "path": f"{BINDING_CONSTRAINT_PATH}/enabled", - "default_value": True, - }, - "group": { - "path": f"{BINDING_CONSTRAINT_PATH}/group", - "default_value": None, - }, - }, -} - -COLUMNS_MODELS_BY_TYPE = { - TableTemplateType.AREA: AreaColumns, - TableTemplateType.LINK: LinkColumns, - TableTemplateType.CLUSTER: ClusterColumns, - TableTemplateType.RENEWABLE: RenewableColumns, - TableTemplateType.BINDING_CONSTRAINT: BindingConstraintColumns, -} - -ColumnsModelTypes = Union[ - AreaColumns, - LinkColumns, - ClusterColumns, - RenewableColumns, - BindingConstraintColumns, -] - - -def _get_glob_object(file_study: FileStudy, table_type: TableTemplateType) -> Dict[str, Any]: +class TableModeType(EnumIgnoreCase): """ - Retrieves the fields of an object according to its type (area, link, thermal cluster...). - - Args: - file_study: A file study from which the configuration can be read. - table_type: Type of the object. - - Returns: - Dictionary containing the fields used in Table mode. - - Raises: - ChildNotFoundError: if one of the Area IDs is not found in the configuration. + Table types. + + This enum is used to define the different types of tables that can be created + by the user to leverage the editing capabilities of multiple objects at once. + + Attributes: + AREA: Area table. + LINK: Link table. + THERMAL: Thermal clusters table. + RENEWABLE: Renewable clusters table. + ST_STORAGE: Short-Term Storages table. + BINDING_CONSTRAINT: Binding constraints table. """ - # sourcery skip: extract-method - if table_type == TableTemplateType.AREA: - info_map: Dict[str, Any] = file_study.tree.get(url=AREA_PATH.format(area="*").split("/"), depth=3) - area_ids = list(file_study.config.areas) - # If there is only one ID in the `area_ids`, the result returned from - # the `file_study.tree.get` call will be a single object. - # On the other hand, if there are multiple values in `area_ids`, - # the result will be a dictionary where the keys are the IDs, - # and the values are the corresponding objects. - if len(area_ids) == 1: - info_map = {area_ids[0]: info_map} - # Add thermal fields in info_map - thermal_fields = file_study.tree.get(THERMAL_PATH.split("/")) - for field, field_props in thermal_fields.items(): - for area_id, value in field_props.items(): - if area_id in info_map: - info_map[area_id][field] = value - return info_map - url = { - TableTemplateType.LINK: LINK_GLOB_PATH.format(area1="*").split("/"), - TableTemplateType.CLUSTER: CLUSTER_GLOB_PATH.format(area="*").split("/"), - TableTemplateType.RENEWABLE: RENEWABLE_GLOB_PATH.format(area="*").split("/"), - TableTemplateType.BINDING_CONSTRAINT: BINDING_CONSTRAINT_PATH.split("/"), - }[table_type] - - return file_study.tree.get(url) + AREA = "areas" + LINK = "links" + THERMAL = "thermals" + RENEWABLE = "renewables" + # Avoid "storages" because we may have "lt-storages" (long-term storages) in the future + ST_STORAGE = "st-storages" + # Avoid "constraints" because we may have other kinds of constraints in the future + BINDING_CONSTRAINT = "binding-constraints" + + @classmethod + def _missing_(cls, value: object) -> t.Optional["EnumIgnoreCase"]: + if isinstance(value, str): + # handle aliases of old table types + value = value.upper() + aliases = { + "AREA": cls.AREA, + "LINK": cls.LINK, + "CLUSTER": cls.THERMAL, + "RENEWABLE": cls.RENEWABLE, + "BINDING CONSTRAINT": cls.BINDING_CONSTRAINT, + } + if value in aliases: + return aliases[value] + return super()._missing_(value) class TableModeManager: - def __init__(self, storage_service: StudyStorageService) -> None: - self.storage_service = storage_service - - def get_table_data( + def __init__( self, - study: RawStudy, - table_type: TableTemplateType, - columns: List[str], - ) -> Dict[str, ColumnsModelTypes]: - file_study = self.storage_service.get_storage(study).get_raw(study) - columns_model = COLUMNS_MODELS_BY_TYPE[table_type] - fields_info = FIELDS_INFO_BY_TYPE[table_type] - glob_object = _get_glob_object(file_study, table_type) - - def get_column_value(col: str, data: Dict[str, Any]) -> Any: - f_info = fields_info[col] - relative_path = TableModeManager.__get_relative_path(f_info["path"], table_type) - return TableModeManager.__get_value( - relative_path, - data, - f_info["default_value"], - ) - - if table_type == TableTemplateType.AREA: - return { - area_id: columns_model.construct(**{col: get_column_value(col, data) for col in columns}) # type: ignore - for area_id, data in glob_object.items() + area_manager: AreaManager, + link_manager: LinkManager, + thermal_manager: ThermalManager, + renewable_manager: RenewableManager, + st_storage_manager: STStorageManager, + binding_constraint_manager: BindingConstraintManager, + ) -> None: + self._area_manager = area_manager + self._link_manager = link_manager + self._thermal_manager = thermal_manager + self._renewable_manager = renewable_manager + self._st_storage_manager = st_storage_manager + self._binding_constraint_manager = binding_constraint_manager + + def _get_table_data_unsafe(self, study: RawStudy, table_type: TableModeType) -> TableDataDTO: + if table_type == TableModeType.AREA: + areas_map = self._area_manager.get_all_area_props(study) + data = {area_id: area.dict(by_alias=True) for area_id, area in areas_map.items()} + elif table_type == TableModeType.LINK: + links_map = self._link_manager.get_all_links_props(study) + data = { + f"{area1_id} / {area2_id}": link.dict(by_alias=True) for (area1_id, area2_id), link in links_map.items() } - - if table_type == TableTemplateType.BINDING_CONSTRAINT: - return { - data["id"]: columns_model.construct(**{col: get_column_value(col, data) for col in columns}) # type: ignore - for data in glob_object.values() + elif table_type == TableModeType.THERMAL: + thermals_by_areas = self._thermal_manager.get_all_thermals_props(study) + data = { + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) + for area_id, thermals_by_ids in thermals_by_areas.items() + for cluster_id, cluster in thermals_by_ids.items() } + elif table_type == TableModeType.RENEWABLE: + renewables_by_areas = self._renewable_manager.get_all_renewables_props(study) + data = { + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) + for area_id, renewables_by_ids in renewables_by_areas.items() + for cluster_id, cluster in renewables_by_ids.items() + } + elif table_type == TableModeType.ST_STORAGE: + storages_by_areas = self._st_storage_manager.get_all_storages_props(study) + data = { + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) + for area_id, storages_by_ids in storages_by_areas.items() + for cluster_id, cluster in storages_by_ids.items() + } + elif table_type == TableModeType.BINDING_CONSTRAINT: + bc_seq = self._binding_constraint_manager.get_binding_constraints(study) + data = {bc.id: bc.dict(by_alias=True, exclude={"id", "name", "terms"}) for bc in bc_seq} + else: # pragma: no cover + raise NotImplementedError(f"Table type {table_type} not implemented") + return data - obj: Dict[str, Any] = {} - for id_1, value_1 in glob_object.items(): - for id_2, value_2 in value_1.items(): - obj[f"{id_1} / {id_2}"] = columns_model.construct( - **{col: get_column_value(col, value_2) for col in columns} - ) - - return obj - - def set_table_data( + def get_table_data( self, study: RawStudy, - table_type: TableTemplateType, - data: Dict[str, ColumnsModelTypes], - ) -> None: - commands: List[ICommand] = [] - bindings_by_id = None - command_context = self.storage_service.variant_study_service.command_factory.command_context - - for key, columns in data.items(): - path_vars = TableModeManager.__get_path_vars_from_key(table_type, key) - - if table_type == TableTemplateType.BINDING_CONSTRAINT: - file_study = self.storage_service.get_storage(study).get_raw(study) - bindings_by_id = bindings_by_id or { - binding["id"]: binding for binding in _get_glob_object(file_study, table_type).values() - } - binding_id = path_vars["id"] - current_binding = bindings_by_id.get(binding_id, None) - - if current_binding: - col_values = columns.dict(exclude_none=True) - current_binding_dto = BindingConstraintManager.constraint_model_adapter( - current_binding, int(study.version) - ) - - commands.append( - UpdateBindingConstraint( - id=binding_id, - enabled=col_values.get("enabled", current_binding_dto.enabled), - time_step=col_values.get("type", current_binding_dto.time_step), - operator=col_values.get("operator", current_binding_dto.operator), - coeffs=BindingConstraintManager.terms_to_coeffs(current_binding_dto.terms), - command_context=command_context, - ) - ) - else: - for col, val in columns.__iter__(): - if val is not None: - commands.append( - UpdateConfig( - target=TableModeManager.__get_column_path(table_type, path_vars, col), - data=val, - command_context=command_context, - ) - ) - - if commands: - file_study = self.storage_service.get_storage(study).get_raw(study) - execute_or_add_commands(study, file_study, commands, self.storage_service) - - @staticmethod - def __get_value(path: List[str], data: Dict[str, Any], default_value: Any) -> Any: - if len(path): - return TableModeManager.__get_value(path[1:], data.get(path[0], {}), default_value) - return data if data != {} else default_value - - @staticmethod - def __get_relative_path( - path: str, - table_type: TableTemplateType, - ) -> List[str]: - base_path = "" - path_arr = path.split("/") - - if table_type == TableTemplateType.AREA: - if path.startswith(THERMAL_PATH): - base_path = THERMAL_PATH - # Remove {area} - path_arr = path_arr[:-1] - else: - base_path = AREA_PATH - elif table_type == TableTemplateType.LINK: - base_path = LINK_PATH - elif table_type == TableTemplateType.CLUSTER: - base_path = CLUSTER_PATH - elif table_type == TableTemplateType.RENEWABLE: - base_path = RENEWABLE_PATH - elif table_type == TableTemplateType.BINDING_CONSTRAINT: - base_path = BINDING_CONSTRAINT_PATH - - return path_arr[len(base_path.split("/")) :] - - @staticmethod - def __get_column_path( - table_type: TableTemplateType, - path_vars: PathVars, - column: str, - ) -> str: - path = FIELDS_INFO_BY_TYPE[table_type][column]["path"] - - if table_type == TableTemplateType.AREA: - return path.format(area=path_vars["id"]) - if table_type == TableTemplateType.LINK: - return path.format(area1=path_vars["area1"], area2=path_vars["area2"]) - if table_type in [ - TableTemplateType.CLUSTER, - TableTemplateType.RENEWABLE, - ]: - return path.format(area=path_vars["area"], cluster=path_vars["cluster"]) - - return path - - @staticmethod - def __get_path_vars_from_key( - table_type: TableTemplateType, - key: str, - ) -> PathVars: - if table_type in [ - TableTemplateType.AREA, - TableTemplateType.BINDING_CONSTRAINT, - ]: - return PathVars(id=key) - if table_type == TableTemplateType.LINK: - area1, area2 = [v.strip() for v in key.split("/")] - return PathVars(area1=area1, area2=area2) - if table_type in [ - TableTemplateType.CLUSTER, - TableTemplateType.RENEWABLE, - ]: - area, cluster = [v.strip() for v in key.split("/")] - return PathVars(area=area, cluster=cluster) - - return PathVars() + table_type: TableModeType, + columns: t.Sequence[_TableColumn], + ) -> TableDataDTO: + """ + Get the table data of the specified type for the given study. + + Args: + study: The study to get the table data from. + table_type: The type of the table. + columns: The columns to include in the table. If empty, all columns are included. + + Returns: + The table data as a dictionary of dictionaries. + Where keys are the row names and values are dictionaries of column names and cell values. + """ + try: + data = self._get_table_data_unsafe(study, table_type) + except ChildNotFoundError: + # It's better to return an empty table than raising an 404 error + return {} + + df = pd.DataFrame.from_dict(data, orient="index") # type: ignore + if columns: + # Create a new dataframe with the listed columns. + df = pd.DataFrame(df, columns=columns) # type: ignore + + # According to the study version, some properties may not be present, + # so we need to drop columns that are all NaN. + df = df.dropna(axis=1, how="all") + + # Convert NaN to `None` because it is not JSON-serializable + df.replace(np.nan, None, inplace=True) + + return t.cast(TableDataDTO, df.to_dict(orient="index")) + + def update_table_data( + self, + study: RawStudy, + table_type: TableModeType, + data: TableDataDTO, + ) -> TableDataDTO: + """ + Update the properties of the objects in the study using the provided data. + + Args: + study: The study to update the objects in. + table_type: The type of the table. + data: The new properties of the objects as a dictionary of dictionaries. + Where keys are the row names and values are dictionaries of column names and cell values. + + Returns: + The updated properties of the objects including the old ones. + """ + if table_type == TableModeType.AREA: + # Use AreaOutput to update properties of areas + area_props_by_ids = {key: AreaOutput(**values) for key, values in data.items()} + areas_map = self._area_manager.update_areas_props(study, area_props_by_ids) + data = {area_id: area.dict(by_alias=True) for area_id, area in areas_map.items()} + return data + elif table_type == TableModeType.LINK: + links_map = {tuple(key.split(" / ")): LinkOutput(**values) for key, values in data.items()} + updated_map = self._link_manager.update_links_props(study, links_map) # type: ignore + data = { + f"{area1_id} / {area2_id}": link.dict(by_alias=True) + for (area1_id, area2_id), link in updated_map.items() + } + return data + elif table_type == TableModeType.THERMAL: + thermals_by_areas: t.MutableMapping[str, t.MutableMapping[str, ThermalClusterInput]] + thermals_by_areas = collections.defaultdict(dict) + for key, values in data.items(): + area_id, cluster_id = key.split(" / ") + thermals_by_areas[area_id][cluster_id] = ThermalClusterInput(**values) + thermals_map = self._thermal_manager.update_thermals_props(study, thermals_by_areas) + data = { + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) + for area_id, thermals_by_ids in thermals_map.items() + for cluster_id, cluster in thermals_by_ids.items() + } + return data + elif table_type == TableModeType.RENEWABLE: + renewables_by_areas: t.MutableMapping[str, t.MutableMapping[str, RenewableClusterInput]] + renewables_by_areas = collections.defaultdict(dict) + for key, values in data.items(): + area_id, cluster_id = key.split(" / ") + renewables_by_areas[area_id][cluster_id] = RenewableClusterInput(**values) + renewables_map = self._renewable_manager.update_renewables_props(study, renewables_by_areas) + data = { + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) + for area_id, renewables_by_ids in renewables_map.items() + for cluster_id, cluster in renewables_by_ids.items() + } + return data + elif table_type == TableModeType.ST_STORAGE: + storages_by_areas: t.MutableMapping[str, t.MutableMapping[str, STStorageInput]] + storages_by_areas = collections.defaultdict(dict) + for key, values in data.items(): + area_id, cluster_id = key.split(" / ") + storages_by_areas[area_id][cluster_id] = STStorageInput(**values) + storages_map = self._st_storage_manager.update_storages_props(study, storages_by_areas) + data = { + f"{area_id} / {cluster_id}": cluster.dict(by_alias=True, exclude={"id", "name"}) + for area_id, storages_by_ids in storages_map.items() + for cluster_id, cluster in storages_by_ids.items() + } + return data + elif table_type == TableModeType.BINDING_CONSTRAINT: + bcs_by_ids = {key: ConstraintInput(**values) for key, values in data.items()} + bcs_map = self._binding_constraint_manager.update_binding_constraints(study, bcs_by_ids) + return {bc_id: bc.dict(by_alias=True, exclude={"id", "name", "terms"}) for bc_id, bc in bcs_map.items()} + else: # pragma: no cover + raise NotImplementedError(f"Table type {table_type} not implemented") + + def get_table_schema(self, table_type: TableModeType) -> JSON: + """ + Get the properties of the table columns which type is provided as a parameter. + + Args: + table_type: The type of the table. + + Returns: + JSON Schema which allows to know the name, title and type of each column. + """ + if table_type == TableModeType.AREA: + return self._area_manager.get_table_schema() + elif table_type == TableModeType.LINK: + return self._link_manager.get_table_schema() + elif table_type == TableModeType.THERMAL: + return self._thermal_manager.get_table_schema() + elif table_type == TableModeType.RENEWABLE: + return self._renewable_manager.get_table_schema() + elif table_type == TableModeType.ST_STORAGE: + return self._st_storage_manager.get_table_schema() + elif table_type == TableModeType.BINDING_CONSTRAINT: + return self._binding_constraint_manager.get_table_schema() + else: # pragma: no cover + raise NotImplementedError(f"Table type {table_type} not implemented") diff --git a/antarest/study/business/timeseries_config_management.py b/antarest/study/business/timeseries_config_management.py index fac397b3be..418921e7f0 100644 --- a/antarest/study/business/timeseries_config_management.py +++ b/antarest/study/business/timeseries_config_management.py @@ -6,7 +6,7 @@ from antarest.study.business.enum_ignore_case import EnumIgnoreCase from antarest.study.business.utils import GENERAL_DATA_PATH, FormFieldsBaseModel, execute_or_add_commands from antarest.study.model import Study -from antarest.study.storage.rawstudy.model.filesystem.config.model import ENR_MODELLING +from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.storage_service import StudyStorageService from antarest.study.storage.variantstudy.model.command.update_config import UpdateConfig @@ -193,7 +193,7 @@ def __get_form_fields_for_type( input_ = general_data.get("input", {}) output = general_data.get("output", {}) - is_aggregated = file_study.config.enr_modelling == ENR_MODELLING.AGGREGATED.value + is_aggregated = file_study.config.enr_modelling == EnrModelling.AGGREGATED.value if ts_type == TSType.RENEWABLES and is_aggregated: return None diff --git a/antarest/study/service.py b/antarest/study/service.py index f6f04d4681..66b3b2ddad 100644 --- a/antarest/study/service.py +++ b/antarest/study/service.py @@ -49,7 +49,7 @@ from antarest.study.business.adequacy_patch_management import AdequacyPatchManager from antarest.study.business.advanced_parameters_management import AdvancedParamsManager from antarest.study.business.allocation_management import AllocationManager -from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaManager, AreaType, AreaUI +from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaManager, AreaType, UpdateAreaUi from antarest.study.business.areas.hydro_management import HydroManager from antarest.study.business.areas.properties_management import PropertiesManager from antarest.study.business.areas.renewable_management import RenewableManager @@ -104,6 +104,7 @@ StudySortBy, ) from antarest.study.storage.matrix_profile import adjust_matrix_columns_index +from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfigDTO from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError from antarest.study.storage.rawstudy.model.filesystem.ini_file_node import IniFileNode @@ -275,13 +276,20 @@ def __init__( self.thermal_manager = ThermalManager(self.storage_service) self.st_storage_manager = STStorageManager(self.storage_service) self.ts_config_manager = TimeSeriesConfigManager(self.storage_service) - self.table_mode_manager = TableModeManager(self.storage_service) self.playlist_manager = PlaylistManager(self.storage_service) self.scenario_builder_manager = ScenarioBuilderManager(self.storage_service) self.xpansion_manager = XpansionManager(self.storage_service) self.matrix_manager = MatrixManager(self.storage_service) self.binding_constraint_manager = BindingConstraintManager(self.storage_service) self.correlation_manager = CorrelationManager(self.storage_service) + self.table_mode_manager = TableModeManager( + self.areas, + self.links, + self.thermal_manager, + self.renewable_manager, + self.st_storage_manager, + self.binding_constraint_manager, + ) self.cache_service = cache_service self.config = config self.on_deletion_callbacks: t.List[t.Callable[[str], None]] = [] @@ -1284,11 +1292,13 @@ def export_task(notifier: TaskUpdateNotifier) -> TaskResult: ) return FileResponse( tmp_export_file, - headers={"Content-Disposition": "inline"} - if filetype == ExportFormat.JSON - else { - "Content-Disposition": f'attachment; filename="output-{output_id}.{"tar.gz" if filetype == ExportFormat.TAR_GZ else "zip"}' - }, + headers=( + {"Content-Disposition": "inline"} + if filetype == ExportFormat.JSON + else { + "Content-Disposition": f'attachment; filename="output-{output_id}.{"tar.gz" if filetype == ExportFormat.TAR_GZ else "zip"}' + } + ), media_type=filetype, ) else: @@ -1854,7 +1864,7 @@ def update_area_ui( self, uuid: str, area_id: str, - area_ui: AreaUI, + area_ui: UpdateAreaUi, layer: str, params: RequestParameters, ) -> None: diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/area.py b/antarest/study/storage/rawstudy/model/filesystem/config/area.py new file mode 100644 index 0000000000..5ade25159f --- /dev/null +++ b/antarest/study/storage/rawstudy/model/filesystem/config/area.py @@ -0,0 +1,541 @@ +""" +Object model used to read and update area configuration. +""" + +import typing as t + +from pydantic import Field, root_validator, validator + +from antarest.study.business.enum_ignore_case import EnumIgnoreCase +from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import ( + validate_color_rgb, + validate_colors, + validate_filtering, +) +from antarest.study.storage.rawstudy.model.filesystem.config.ini_properties import IniProperties + + +# noinspection SpellCheckingInspection +class OptimizationProperties(IniProperties): + """ + Object linked to `/input/areas//optimization.ini` information. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import OptimizationProperties + >>> from pprint import pprint + + Create and validate a new Optimization object from a dictionary read from a configuration file. + + >>> obj = { + ... "filtering": { + ... "filter-synthesis": "hourly, daily, weekly, monthly, annual", + ... "filter-year-by-year": "annual,hourly", + ... }, + ... "nodal optimization": { + ... "non-dispatchable-power": "true", + ... "dispatchable-hydro-power": "false", + ... "spread-unsupplied-energy-cost": "1500", + ... "spread-spilled-energy-cost": "317.2500", + ... }, + ... } + + >>> opt = OptimizationProperties(**obj) + + >>> pprint(opt.dict(by_alias=True), width=80) + {'filtering': {'filter-synthesis': 'hourly, daily, weekly, monthly, annual', + 'filter-year-by-year': 'hourly, annual'}, + 'nodal optimization': {'dispatchable-hydro-power': False, + 'non-dispatchable-power': True, + 'other-dispatchable-power': True, + 'spread-spilled-energy-cost': 317.25, + 'spread-unsupplied-energy-cost': 1500.0}} + + Update the filtering configuration : + + >>> opt.filtering.filter_synthesis = "hourly,weekly,monthly,annual,century" + >>> opt.filtering.filter_year_by_year = "hourly, monthly, annual" + + Update the modal optimization configuration : + + >>> opt.nodal_optimization.non_dispatchable_power = False + >>> opt.nodal_optimization.spread_spilled_energy_cost = 0.0 + + Convert the object to a dictionary for writing to a configuration file: + + >>> pprint(opt.dict(by_alias=True, exclude_defaults=True), width=80) + {'filtering': {'filter-synthesis': 'hourly, weekly, monthly, annual', + 'filter-year-by-year': 'hourly, monthly, annual'}, + 'nodal optimization': {'dispatchable-hydro-power': False, + 'non-dispatchable-power': False, + 'spread-unsupplied-energy-cost': 1500.0}} + """ + + class FilteringSection(IniProperties): + """Configuration read from section `[filtering]` of `/input/areas//optimization.ini`.""" + + filter_synthesis: str = Field("", alias="filter-synthesis") + filter_year_by_year: str = Field("", alias="filter-year-by-year") + + @validator("filter_synthesis", "filter_year_by_year", pre=True) + def _validate_filtering(cls, v: t.Any) -> str: + return validate_filtering(v) + + # noinspection SpellCheckingInspection + class ModalOptimizationSection(IniProperties): + """Configuration read from section `[nodal optimization]` of `/input/areas//optimization.ini`.""" + + non_dispatchable_power: bool = Field(default=True, alias="non-dispatchable-power") + dispatchable_hydro_power: bool = Field(default=True, alias="dispatchable-hydro-power") + other_dispatchable_power: bool = Field(default=True, alias="other-dispatchable-power") + spread_unsupplied_energy_cost: float = Field(default=0.0, ge=0, alias="spread-unsupplied-energy-cost") + spread_spilled_energy_cost: float = Field(default=0.0, ge=0, alias="spread-spilled-energy-cost") + + filtering: FilteringSection = Field( + default_factory=FilteringSection, + alias="filtering", + ) + nodal_optimization: ModalOptimizationSection = Field( + default_factory=ModalOptimizationSection, + alias="nodal optimization", + ) + + +class AdequacyPatchMode(EnumIgnoreCase): + """ + Adequacy patch mode. + + Only available if study version >= 830. + """ + + OUTSIDE = "outside" + INSIDE = "inside" + VIRTUAL = "virtual" + + +class AdequacyPathProperties(IniProperties): + """ + Object linked to `/input/areas//adequacy_patch.ini` information. + + Only available if study version >= 830. + """ + + class AdequacyPathSection(IniProperties): + """Configuration read from section `[adequacy-patch]` of `/input/areas//adequacy_patch.ini`.""" + + adequacy_patch_mode: AdequacyPatchMode = Field(default=AdequacyPatchMode.OUTSIDE, alias="adequacy-patch-mode") + + adequacy_patch: AdequacyPathSection = Field(default_factory=AdequacyPathSection, alias="adequacy-patch") + + +class AreaUI(IniProperties): + """ + Style of an area in the map or in a layer. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI + >>> from pprint import pprint + + Create and validate a new AreaUI object from a dictionary read from a configuration file. + + >>> obj = { + ... "x": 1148, + ... "y": 144, + ... "color_r": 0, + ... "color_g": 128, + ... "color_b": 255, + ... } + >>> ui = AreaUI(**obj) + >>> pprint(ui.dict(by_alias=True), width=80) + {'colorRgb': '#0080FF', 'x': 1148, 'y': 144} + + Update the color: + + >>> ui.color_rgb = (192, 168, 127) + >>> pprint(ui.dict(by_alias=True), width=80) + {'colorRgb': '#C0A87F', 'x': 1148, 'y': 144} + """ + + x: int = Field(0, description="x coordinate of the area in the map") + y: int = Field(0, description="y coordinate of the area in the map") + color_rgb: str = Field( + "#E66C2C", + alias="colorRgb", + description="color of the area in the map", + ) + + @validator("color_rgb", pre=True) + def _validate_color_rgb(cls, v: t.Any) -> str: + return validate_color_rgb(v) + + @root_validator(pre=True) + def _validate_colors(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: + return validate_colors(values) + + def to_config(self) -> t.Mapping[str, t.Any]: + """ + Convert the object to a dictionary for writing to a configuration file: + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI + >>> from pprint import pprint + + >>> ui = AreaUI(x=1148, y=144, color_rgb="#0080FF") + >>> pprint(ui.to_config(), width=80) + {'color_b': 255, 'color_g': 128, 'color_r': 0, 'x': 1148, 'y': 144} + """ + r = int(self.color_rgb[1:3], 16) + g = int(self.color_rgb[3:5], 16) + b = int(self.color_rgb[5:7], 16) + return {"x": self.x, "y": self.y, "color_r": r, "color_g": g, "color_b": b} + + +class UIProperties(IniProperties): + """ + Object linked to `/input/areas//ui.ini` information. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import UIProperties + >>> from pprint import pprint + + UIProperties has default values for `style` and `layers`: + + >>> ui = UIProperties() + >>> pprint(ui.dict(), width=80) + {'layer_styles': {0: {'color_rgb': '#E66C2C', 'x': 0, 'y': 0}}, + 'layers': {0}, + 'style': {'color_rgb': '#E66C2C', 'x': 0, 'y': 0}} + + Create and validate a new UI object from a dictionary read from a configuration file. + + >>> obj = { + ... "ui": { + ... "x": 1148, + ... "y": 144, + ... "color_r": 0, + ... "color_g": 128, + ... "color_b": 255, + ... "layers": "0 7", + ... }, + ... "layerX": {"0": 1148, "7": 18}, + ... "layerY": {"0": 144, "7": -22}, + ... "layerColor": { + ... "0": "0 , 128 , 255", + ... "4": "0 , 128 , 255", + ... "6": "192 , 168 , 99", + ... "7": "0 , 128 , 255", + ... "8": "0 , 128 , 255", + ... }, + ... } + + >>> ui = UIProperties(**obj) + >>> pprint(ui.dict(), width=80) + {'layer_styles': {0: {'color_rgb': '#0080FF', 'x': 1148, 'y': 144}, + 4: {'color_rgb': '#0080FF', 'x': 1148, 'y': 144}, + 6: {'color_rgb': '#C0A863', 'x': 1148, 'y': 144}, + 7: {'color_rgb': '#0080FF', 'x': 18, 'y': -22}, + 8: {'color_rgb': '#0080FF', 'x': 1148, 'y': 144}}, + 'layers': {0, 7}, + 'style': {'color_rgb': '#0080FF', 'x': 1148, 'y': 144}} + + """ + + style: AreaUI = Field( + default_factory=AreaUI, + description="style of the area in the map: coordinates and color", + ) + layers: t.Set[int] = Field( + default_factory=set, + description="layers where the area is visible", + ) + layer_styles: t.Dict[int, AreaUI] = Field( + default_factory=dict, + description="style of the area in each layer", + alias="layerStyles", + ) + + @root_validator(pre=True) + def _set_default_style(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: + """Defined the default style if missing.""" + style = values.get("style") + if style is None: + values["style"] = AreaUI() + elif isinstance(style, dict): + values["style"] = AreaUI(**style) + else: + values["style"] = AreaUI(**style.dict()) + return values + + @root_validator(pre=True) + def _set_default_layers(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: + """Define the default layers if missing.""" + _layers = values.get("layers") + if _layers is None: + values["layers"] = {0} + return values + + @root_validator(pre=True) + def _set_default_layer_styles(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: + """Define the default layer styles if missing.""" + layer_styles = values.get("layer_styles") + if layer_styles is None: + values["layer_styles"] = {0: AreaUI()} + elif isinstance(layer_styles, dict): + values["layer_styles"] = {0: AreaUI()} + for key, style in layer_styles.items(): + key = int(key) + if isinstance(style, dict): + values["layer_styles"][key] = AreaUI(**style) + else: + values["layer_styles"][key] = AreaUI(**style.dict()) + else: + raise TypeError(f"Invalid type for layer_styles: {type(layer_styles)}") + return values + + @root_validator(pre=True) + def _validate_layers(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: + # Parse the `[ui]` section (if any) + ui_section = values.pop("ui", {}) + if ui_section: + # If `layers` is a single integer, convert it to `str` first + layers = str(ui_section.pop("layers", "0")) + values["layers"] = set([int(layer) for layer in layers.split()]) + values["style"].x = ui_section.pop("x", values["style"].x) + values["style"].y = ui_section.pop("y", values["style"].y) + values["style"].color_rgb = ( + ui_section.pop("color_r", values["style"].color_rgb[0]), + ui_section.pop("color_g", values["style"].color_rgb[1]), + ui_section.pop("color_b", values["style"].color_rgb[2]), + ) + + # Parse the `[layerX]`, `[layerY]` and `[layerColor]` sections (if any) + layer_x_section = values.pop("layerX", {}) + layer_y_section = values.pop("layerY", {}) + layer_color_section = values.pop("layerColor", {}) + # Key are converted to `int` and values to `str` (for splitting) + layer_x_section = {int(layer): str(x) for layer, x in layer_x_section.items()} + layer_y_section = {int(layer): str(y) for layer, y in layer_y_section.items()} + layer_color_section = {int(layer): str(color) for layer, color in layer_color_section.items()} + # indexes must contain all the keys from the three sections + indexes = set(layer_x_section) | set(layer_y_section) | set(layer_color_section) + if indexes: + layer_styles = {index: values["style"].copy() for index in indexes} + for layer, x in layer_x_section.items(): + layer_styles[layer].x = int(x) + for layer, y in layer_y_section.items(): + layer_styles[layer].y = int(y) + for layer, color in layer_color_section.items(): + r, g, b = [int(c) for c in color.split(",")] + layer_styles[layer].color_rgb = r, g, b + values["layer_styles"].update(layer_styles) + values["layers"] = values["layers"].intersection(indexes) + + return values + + def to_config(self) -> t.Mapping[str, t.Mapping[str, t.Any]]: + """ + Convert the object to a dictionary for writing to a configuration file: + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import UIProperties + >>> from pprint import pprint + + >>> ui = UIProperties( + ... style=AreaUI(x=1148, y=144, color_rgb=(0, 128, 255)), + ... layers={0, 7}, + ... layer_styles={ + ... 6: AreaUI(x=1148, y=144, color_rgb="#C0A863"), + ... 7: AreaUI(x=18, y=-22, color_rgb=(0, 128, 255)), + ... }, + ... ) + >>> pprint(ui.to_config(), width=80) + {'layerColor': {'0': '230, 108, 44', '6': '192, 168, 99', '7': '0, 128, 255'}, + 'layerX': {'0': 0, '6': 1148, '7': 18}, + 'layerY': {'0': 0, '6': 144, '7': -22}, + 'ui': {'color_b': 255, + 'color_g': 128, + 'color_r': 0, + 'layers': '0 7', + 'x': 1148, + 'y': 144}} + """ + obj: t.MutableMapping[str, t.MutableMapping[str, t.Any]] = { + "ui": {}, + "layerX": {}, + "layerY": {}, + "layerColor": {}, + } + obj["ui"].update(self.style.to_config()) + obj["ui"]["layers"] = " ".join(str(layer) for layer in sorted(self.layers)) + for layer, style in self.layer_styles.items(): + obj["layerX"][str(layer)] = style.x + obj["layerY"][str(layer)] = style.y + r = int(style.color_rgb[1:3], 16) + g = int(style.color_rgb[3:5], 16) + b = int(style.color_rgb[5:7], 16) + obj["layerColor"][str(layer)] = f"{r}, {g}, {b}" + return obj + + +class AreaFolder(IniProperties): + """ + Object linked to `/input/areas/` information. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaFolder + >>> from pprint import pprint + + Create and validate a new AreaProperties object from a dictionary read from a configuration file. + + >>> obj = AreaFolder() + >>> pprint(obj.dict(), width=80) + {'adequacy_patch': None, + 'optimization': {'filtering': {'filter_synthesis': '', + 'filter_year_by_year': ''}, + 'nodal_optimization': {'dispatchable_hydro_power': True, + 'non_dispatchable_power': True, + 'other_dispatchable_power': True, + 'spread_spilled_energy_cost': 0.0, + 'spread_unsupplied_energy_cost': 0.0}}, + 'ui': {'layer_styles': {0: {'color_rgb': '#E66C2C', 'x': 0, 'y': 0}}, + 'layers': {0}, + 'style': {'color_rgb': '#E66C2C', 'x': 0, 'y': 0}}} + + >>> pprint(obj.to_config(), width=80) + {'optimization': {'filtering': {'filter-synthesis': '', + 'filter-year-by-year': ''}, + 'nodal optimization': {'dispatchable-hydro-power': True, + 'non-dispatchable-power': True, + 'other-dispatchable-power': True, + 'spread-spilled-energy-cost': 0.0, + 'spread-unsupplied-energy-cost': 0.0}}, + 'ui': {'layerColor': {'0': '230, 108, 44'}, + 'layerX': {'0': 0}, + 'layerY': {'0': 0}, + 'ui': {'color_b': 44, + 'color_g': 108, + 'color_r': 230, + 'layers': '0', + 'x': 0, + 'y': 0}}} + + We can construct an AreaProperties object from invalid data: + + >>> data = { + ... "optimization": { + ... "filtering": {"filter-synthesis": "annual, centennial"}, + ... "nodal optimization": { + ... "spread-spilled-energy-cost": "15.5", + ... "spread-unsupplied-energy-cost": "yes", + ... }, + ... }, + ... "ui": {"style": {"color_rgb": (0, 128, 256)}}, + ... } + + >>> obj = AreaFolder.construct(**data) + >>> pprint(obj.dict(), width=80) + {'adequacy_patch': None, + 'optimization': {'filtering': {'filter-synthesis': 'annual, centennial'}, + 'nodal optimization': {'spread-spilled-energy-cost': '15.5', + 'spread-unsupplied-energy-cost': 'yes'}}, + 'ui': {'style': {'color_rgb': (0, 128, 256)}}} + + >>> AreaFolder.validate(data) + Traceback (most recent call last): + ... + pydantic.error_wrappers.ValidationError: 1 validation error for AreaFolder + optimization -> nodal optimization -> spread-unsupplied-energy-cost + value is not a valid float (type=type_error.float) + """ + + optimization: OptimizationProperties = Field( + default_factory=OptimizationProperties, + description="optimization configuration", + ) + adequacy_patch: t.Optional[AdequacyPathProperties] = Field( + None, + description="adequacy patch configuration", + ) + ui: UIProperties = Field( + default_factory=UIProperties, + description="UI configuration", + ) + + +# noinspection SpellCheckingInspection +class ThermalAreasProperties(IniProperties): + """ + Object linked to `/input/thermal/areas.ini` information. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.area import ThermalAreasProperties + >>> from pprint import pprint + + Create and validate a new ThermalArea object from a dictionary read from a configuration file:: + + [unserverdenergycost] + at = 4000.80 + be = 3500 + de = 1250 + fr = 138.50 + + [spilledenergycost] + cz = 100.0 + + >>> obj = { + ... "unserverdenergycost": { + ... "at": "4000.80", + ... "be": "3500", + ... "de": "1250", + ... "fr": "138.50", + ... }, + ... "spilledenergycost": { + ... "cz": "100.0", + ... }, + ... } + >>> area = ThermalAreasProperties(**obj) + >>> pprint(area.dict(), width=80) + {'spilled_energy_cost': {'cz': 100.0}, + 'unserverd_energy_cost': {'at': 4000.8, + 'be': 3500.0, + 'de': 1250.0, + 'fr': 138.5}} + + Update the unserverd energy cost: + + >>> area.unserverd_energy_cost["at"] = 6500.0 + >>> area.unserverd_energy_cost["fr"] = 0.0 + >>> pprint(area.dict(), width=80) + {'spilled_energy_cost': {'cz': 100.0}, + 'unserverd_energy_cost': {'at': 6500.0, 'be': 3500.0, 'de': 1250.0, 'fr': 0.0}} + + Convert the object to a dictionary for writing to a configuration file: + + >>> pprint(area.to_config(), width=80) + {'spilledenergycost': {'cz': 100.0}, + 'unserverdenergycost': {'at': 6500.0, 'be': 3500.0, 'de': 1250.0, 'fr': 0.0}} + """ + + unserverd_energy_cost: t.MutableMapping[str, float] = Field( + default_factory=dict, + alias="unserverdenergycost", + description="unserverd energy cost (€/MWh) of each area", + ) + + spilled_energy_cost: t.MutableMapping[str, float] = Field( + default_factory=dict, + alias="spilledenergycost", + description="spilled energy cost (€/MWh) of each area", + ) + + @validator("unserverd_energy_cost", "spilled_energy_cost", pre=True) + def _validate_energy_cost(cls, v: t.Any) -> t.MutableMapping[str, float]: + if isinstance(v, dict): + return {str(k): float(v) for k, v in v.items()} + raise TypeError(f"Invalid type for energy cost: {type(v)}") diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py index a396ea950d..11749cf456 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/binding_constraint.py @@ -1,28 +1,18 @@ -import typing as t -from enum import Enum +""" +Object model used to read and update binding constraint configuration. +""" -from pydantic import BaseModel +from antarest.study.business.enum_ignore_case import EnumIgnoreCase -class BindingConstraintFrequency(str, Enum): +class BindingConstraintFrequency(EnumIgnoreCase): """ - Frequency of binding constraint - - - HOURLY: hourly time series with 8784 lines - - DAILY: daily time series with 366 lines - - WEEKLY: weekly time series with 366 lines (same as daily) - - Usage example: - - >>> bcf = BindingConstraintFrequency.HOURLY - >>> bcf == "hourly" - True - >>> bcf = BindingConstraintFrequency.DAILY - >>> "daily" == bcf - True - >>> bcf = BindingConstraintFrequency.WEEKLY - >>> bcf != "daily" - True + Frequency of a binding constraint. + + Attributes: + HOURLY: hourly time series with 8784 lines + DAILY: daily time series with 366 lines + WEEKLY: weekly time series with 366 lines (same as daily) """ HOURLY = "hourly" @@ -30,8 +20,18 @@ class BindingConstraintFrequency(str, Enum): WEEKLY = "weekly" -class BindingConstraintDTO(BaseModel): - id: str - areas: t.Set[str] - clusters: t.Set[str] - time_step: BindingConstraintFrequency +class BindingConstraintOperator(EnumIgnoreCase): + """ + Operator of a binding constraint. + + Attributes: + LESS: less than or equal to + GREATER: greater than or equal to + BOTH: both LESS and GREATER + EQUAL: equal to + """ + + LESS = "less" + GREATER = "greater" + BOTH = "both" + EQUAL = "equal" diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/cluster.py b/antarest/study/storage/rawstudy/model/filesystem/config/cluster.py index 4563a0d217..1c84019294 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/cluster.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/cluster.py @@ -3,13 +3,12 @@ In the near future, this set of classes may be used for solar, wind and hydro clusters. """ + import functools import typing as t from pydantic import BaseModel, Extra, Field -__all__ = ("ItemProperties", "ClusterProperties") - @functools.total_ordering class ItemProperties( @@ -69,10 +68,16 @@ class ClusterProperties(ItemProperties): # Activity status: # - True: the plant may generate. # - False: not yet commissioned, moth-balled, etc. - enabled: bool = Field(default=True, description="Activity status") + enabled: bool = Field(default=True, description="Activity status", title="Enabled") # noinspection SpellCheckingInspection - unit_count: int = Field(default=1, ge=1, description="Unit count", alias="unitcount") + unit_count: int = Field( + default=1, + ge=1, + description="Unit count", + alias="unitcount", + title="Unit Count", + ) # noinspection SpellCheckingInspection nominal_capacity: float = Field( @@ -80,6 +85,7 @@ class ClusterProperties(ItemProperties): ge=0, description="Nominal capacity (MW per unit)", alias="nominalcapacity", + title="Nominal Capacity", ) @property diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/files.py b/antarest/study/storage/rawstudy/model/filesystem/config/files.py index cafc901644..6f49c9f6fa 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/files.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/files.py @@ -10,10 +10,7 @@ from antarest.core.model import JSON from antarest.study.storage.rawstudy.ini_reader import IniReader -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( - BindingConstraintDTO, - BindingConstraintFrequency, -) +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.exceptions import ( SimulationParsingError, XpansionParsingError, @@ -21,6 +18,7 @@ from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import extract_filtering from antarest.study.storage.rawstudy.model.filesystem.config.model import ( Area, + BindingConstraintDTO, DistrictSet, FileStudyTreeConfig, Link, diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/ini_properties.py b/antarest/study/storage/rawstudy/model/filesystem/config/ini_properties.py new file mode 100644 index 0000000000..51f10a5ca5 --- /dev/null +++ b/antarest/study/storage/rawstudy/model/filesystem/config/ini_properties.py @@ -0,0 +1,52 @@ +import json +import typing as t + +from pydantic import BaseModel, Extra + + +class IniProperties( + BaseModel, + # On reading, if the configuration contains an extra field, it is better + # to forbid it, because it allows errors to be detected early. + # Ignoring extra attributes can hide errors. + extra=Extra.forbid, + # If a field is updated on assignment, it is also validated. + validate_assignment=True, + # On testing, we can use snake_case for field names. + allow_population_by_field_name=True, +): + """ + Base class for configuration sections. + """ + + def to_config(self) -> t.Mapping[str, t.Any]: + """ + Convert the object to a dictionary for writing to a configuration file (`*.ini`). + + Returns: + A dictionary with the configuration values. + """ + + config = {} + for field_name, field in self.__fields__.items(): + value = getattr(self, field_name) + if value is None: + continue + if isinstance(value, IniProperties): + config[field.alias] = value.to_config() + else: + config[field.alias] = json.loads(json.dumps(value)) + return config + + @classmethod + def construct(cls, _fields_set: t.Optional[t.Set[str]] = None, **values: t.Any) -> "IniProperties": + """ + Construct a new model instance from a dict of values, replacing aliases with real field names. + """ + # The pydantic construct() function does not allow aliases to be handled. + aliases = {(field.alias or name): name for name, field in cls.__fields__.items()} + renamed_values = {aliases.get(k, k): v for k, v in values.items()} + if _fields_set is not None: + _fields_set = {aliases.get(f, f) for f in _fields_set} + # noinspection PyTypeChecker + return super().construct(_fields_set, **renamed_values) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/links.py b/antarest/study/storage/rawstudy/model/filesystem/config/links.py new file mode 100644 index 0000000000..7ebc0e2176 --- /dev/null +++ b/antarest/study/storage/rawstudy/model/filesystem/config/links.py @@ -0,0 +1,161 @@ +""" +Object model used to read and update link configuration. +""" + +import typing as t + +from pydantic import Field, root_validator, validator + +from antarest.study.business.enum_ignore_case import EnumIgnoreCase +from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import ( + validate_color_rgb, + validate_colors, + validate_filtering, +) +from antarest.study.storage.rawstudy.model.filesystem.config.ini_properties import IniProperties + + +# noinspection SpellCheckingInspection +class AssetType(EnumIgnoreCase): + """ + Enum representing the type of asset for a link between two areas. + + Attributes: + AC: Represents an Alternating Current link. This is the most common type of electricity transmission. + DC: Represents a Direct Current link. This is typically used for long-distance transmission. + GAZ: Represents a gas link. This is used when the link is related to gas transmission. + VIRT: Represents a virtual link. This is used when the link doesn't physically exist + but is used for modeling purposes. + OTHER: Represents any other type of link that doesn't fall into the above categories. + """ + + AC = "ac" + DC = "dc" + GAZ = "gaz" + VIRT = "virt" + OTHER = "other" + + +class TransmissionCapacity(EnumIgnoreCase): + """ + Enum representing the transmission capacity of a link. + + Attributes: + INFINITE: Represents a link with infinite transmission capacity. + This means there are no limits on the amount of electricity that can be transmitted. + IGNORE: Represents a link where the transmission capacity is ignored. + This means the capacity is not considered during simulations. + ENABLED: Represents a link with a specific transmission capacity. + This means the capacity is considered in the model and has a certain limit. + """ + + INFINITE = "infinite" + IGNORE = "ignore" + ENABLED = "enabled" + + +class LinkProperties(IniProperties): + """ + Configuration read from a section in the `input/links//properties.ini` file. + + Usage: + + >>> from antarest.study.storage.rawstudy.model.filesystem.config.links import LinkProperties + >>> from pprint import pprint + + Create and validate a new `LinkProperties` object from a dictionary read from a configuration file. + + >>> obj = { + ... "hurdles-cost": "false", + ... "loop-flow": "false", + ... "use-phase-shifter": "false", + ... "transmission-capacities": "infinite", + ... "asset-type": "ac", + ... "link-style": "plain", + ... "link-width": "1", + ... "colorr": "80", + ... "colorg": "192", + ... "colorb": "255", + ... "comments": "This is a link", + ... "display-comments": "true", + ... "filter-synthesis": "hourly, daily, weekly, monthly, annual", + ... "filter-year-by-year": "hourly, daily, weekly, monthly, annual", + ... } + + >>> opt = LinkProperties(**obj) + + >>> pprint(opt.dict(by_alias=True), width=80) + {'asset-type': , + 'colorRgb': '#50C0FF', + 'comments': 'This is a link', + 'display-comments': True, + 'filter-synthesis': 'hourly, daily, weekly, monthly, annual', + 'filter-year-by-year': 'hourly, daily, weekly, monthly, annual', + 'hurdles-cost': False, + 'link-style': 'plain', + 'link-width': 1, + 'loop-flow': False, + 'transmission-capacities': , + 'use-phase-shifter': False} + + >>> pprint(opt.to_config(), width=80) + {'asset-type': 'ac', + 'colorb': 255, + 'colorg': 192, + 'colorr': 80, + 'comments': 'This is a link', + 'display-comments': True, + 'filter-synthesis': 'hourly, daily, weekly, monthly, annual', + 'filter-year-by-year': 'hourly, daily, weekly, monthly, annual', + 'hurdles-cost': False, + 'link-style': 'plain', + 'link-width': 1, + 'loop-flow': False, + 'transmission-capacities': 'infinite', + 'use-phase-shifter': False} + """ + + hurdles_cost: bool = Field(default=False, alias="hurdles-cost") + loop_flow: bool = Field(default=False, alias="loop-flow") + use_phase_shifter: bool = Field(default=False, alias="use-phase-shifter") + transmission_capacities: TransmissionCapacity = Field( + default=TransmissionCapacity.ENABLED, alias="transmission-capacities" + ) + asset_type: AssetType = Field(default=AssetType.AC, alias="asset-type") + link_style: str = Field(default="plain", alias="link-style") + link_width: int = Field(default=1, alias="link-width") + comments: str = Field(default="", alias="comments") # unknown field?! + display_comments: bool = Field(default=True, alias="display-comments") + filter_synthesis: str = Field(default="", alias="filter-synthesis") + filter_year_by_year: str = Field(default="", alias="filter-year-by-year") + color_rgb: str = Field( + "#707070", + alias="colorRgb", + description="color of the area in the map", + ) + + @validator("filter_synthesis", "filter_year_by_year", pre=True) + def _validate_filtering(cls, v: t.Any) -> str: + return validate_filtering(v) + + @validator("color_rgb", pre=True) + def _validate_color_rgb(cls, v: t.Any) -> str: + return validate_color_rgb(v) + + @root_validator(pre=True) + def _validate_colors(cls, values: t.MutableMapping[str, t.Any]) -> t.Mapping[str, t.Any]: + return validate_colors(values) + + # noinspection SpellCheckingInspection + def to_config(self) -> t.Mapping[str, t.Any]: + """ + Convert the object to a dictionary for writing to a configuration file. + """ + obj = dict(super().to_config()) + color_rgb = obj.pop("colorRgb", "#707070") + return { + "colorr": int(color_rgb[1:3], 16), + "colorg": int(color_rgb[3:5], 16), + "colorb": int(color_rgb[5:7], 16), + **obj, + } diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/model.py b/antarest/study/storage/rawstudy/model/filesystem/config/model.py index 18e9702571..ff79c51073 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/model.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/model.py @@ -1,21 +1,20 @@ import re import typing as t -from enum import Enum from pathlib import Path -from pydantic import Field, root_validator -from pydantic.main import BaseModel +from pydantic import BaseModel, Field, root_validator from antarest.core.utils.utils import DTO +from antarest.study.business.enum_ignore_case import EnumIgnoreCase -from .binding_constraint import BindingConstraintDTO +from .binding_constraint import BindingConstraintFrequency from .field_validators import extract_filtering from .renewable import RenewableConfigType from .st_storage import STStorageConfigType from .thermal import ThermalConfigType -class ENR_MODELLING(Enum): +class EnrModelling(EnumIgnoreCase): AGGREGATED = "aggregated" CLUSTERS = "clusters" @@ -101,6 +100,13 @@ def get_file(self) -> str: return f"{self.date}{modes[self.mode]}{dash}{self.name}" +class BindingConstraintDTO(BaseModel): + id: str + areas: t.Set[str] + clusters: t.Set[str] + time_step: BindingConstraintFrequency + + class FileStudyTreeConfig(DTO): """ Root object to handle all study parameters which impact tree structure @@ -119,7 +125,7 @@ def __init__( bindings: t.Optional[t.List[BindingConstraintDTO]] = None, store_new_set: bool = False, archive_input_series: t.Optional[t.List[str]] = None, - enr_modelling: str = ENR_MODELLING.AGGREGATED.value, + enr_modelling: str = EnrModelling.AGGREGATED.value, cache: t.Optional[t.Dict[str, t.List[str]]] = None, zip_path: t.Optional[Path] = None, ): @@ -254,7 +260,7 @@ class FileStudyTreeConfigDTO(BaseModel): bindings: t.List[BindingConstraintDTO] = list() store_new_set: bool = False archive_input_series: t.List[str] = list() - enr_modelling: str = ENR_MODELLING.AGGREGATED.value + enr_modelling: str = EnrModelling.AGGREGATED.value zip_path: t.Optional[Path] = None @staticmethod diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py b/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py index 4d34e21637..ed0716147a 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/renewable.py @@ -6,15 +6,6 @@ from antarest.study.storage.rawstudy.model.filesystem.config.cluster import ClusterProperties from antarest.study.storage.rawstudy.model.filesystem.config.identifier import IgnoreCaseIdentifier -__all__ = ( - "TimeSeriesInterpretation", - "RenewableProperties", - "RenewableConfig", - "RenewableConfigType", - "create_renewable_config", - "RenewableClusterGroup", -) - class TimeSeriesInterpretation(EnumIgnoreCase): """ @@ -73,11 +64,13 @@ class RenewableProperties(ClusterProperties): """ group: RenewableClusterGroup = Field( + title="Renewable Cluster Group", default=RenewableClusterGroup.OTHER1, description="Renewable Cluster Group", ) ts_interpretation: TimeSeriesInterpretation = Field( + title="Time Series Interpretation", default=TimeSeriesInterpretation.POWER_GENERATION, description="Time series interpretation", alias="ts-interpretation", @@ -105,6 +98,22 @@ class RenewableConfig(RenewableProperties, IgnoreCaseIdentifier): RenewableConfigType = RenewableConfig +def get_renewable_config_cls(study_version: t.Union[str, int]) -> t.Type[RenewableConfig]: + """ + Retrieves the renewable configuration class based on the study version. + + Args: + study_version: The version of the study. + + Returns: + The renewable configuration class. + """ + version = int(study_version) + if version >= 810: + return RenewableConfig + raise ValueError(f"Unsupported study version {study_version}, required 810 or above.") + + def create_renewable_config(study_version: t.Union[str, int], **kwargs: t.Any) -> RenewableConfigType: """ Factory method to create a renewable configuration model. @@ -119,4 +128,5 @@ def create_renewable_config(study_version: t.Union[str, int], **kwargs: t.Any) - Raises: ValueError: If the study version is not supported. """ - return RenewableConfig(**kwargs) + cls = get_renewable_config_cls(study_version) + return cls(**kwargs) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py b/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py index 61a644b3a5..3355ba571a 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/st_storage.py @@ -42,30 +42,35 @@ class STStorageProperties(ItemProperties): group: STStorageGroup = Field( STStorageGroup.OTHER1, description="Energy storage system group", + title="Short-Term Storage Group", ) injection_nominal_capacity: float = Field( 0, description="Injection nominal capacity (MW)", ge=0, alias="injectionnominalcapacity", + title="Injection Nominal Capacity", ) withdrawal_nominal_capacity: float = Field( 0, description="Withdrawal nominal capacity (MW)", ge=0, alias="withdrawalnominalcapacity", + title="Withdrawal Nominal Capacity", ) reservoir_capacity: float = Field( 0, description="Reservoir capacity (MWh)", ge=0, alias="reservoircapacity", + title="Reservoir Capacity", ) efficiency: float = Field( 1, description="Efficiency of the storage system (%)", ge=0, le=1, + title="Efficiency", ) # The `initial_level` value must be between 0 and 1, but the default value is 0.5 initial_level: float = Field( @@ -74,11 +79,13 @@ class STStorageProperties(ItemProperties): ge=0, le=1, alias="initiallevel", + title="Initial Level", ) initial_level_optim: bool = Field( False, description="Flag indicating if the initial level is optimized", alias="initialleveloptim", + title="Initial Level Optimization", ) @@ -142,6 +149,24 @@ class STStorage880Config(STStorage880Properties, LowerCaseIdentifier): STStorageConfigType = t.Union[STStorageConfig, STStorage880Config] +def get_st_storage_config_cls(study_version: t.Union[str, int]) -> t.Type[STStorageConfigType]: + """ + Retrieves the short-term storage configuration class based on the study version. + + Args: + study_version: The version of the study. + + Returns: + The short-term storage configuration class. + """ + version = int(study_version) + if version >= 880: + return STStorage880Config + elif version >= 860: + return STStorageConfig + raise ValueError(f"Unsupported study version: {version}") + + def create_st_storage_config(study_version: t.Union[str, int], **kwargs: t.Any) -> STStorageConfigType: """ Factory method to create a short-term storage configuration model. @@ -156,9 +181,5 @@ def create_st_storage_config(study_version: t.Union[str, int], **kwargs: t.Any) Raises: ValueError: If the study version is not supported. """ - version = int(study_version) - if version < 860: - raise ValueError(f"Unsupported study version: {version}") - elif version < 880: - return STStorageConfig(**kwargs) - return STStorage880Config(**kwargs) + cls = get_st_storage_config_cls(study_version) + return cls(**kwargs) diff --git a/antarest/study/storage/rawstudy/model/filesystem/config/thermal.py b/antarest/study/storage/rawstudy/model/filesystem/config/thermal.py index f2a810025a..dcd0bc7729 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/config/thermal.py +++ b/antarest/study/storage/rawstudy/model/filesystem/config/thermal.py @@ -6,20 +6,6 @@ from antarest.study.storage.rawstudy.model.filesystem.config.cluster import ClusterProperties from antarest.study.storage.rawstudy.model.filesystem.config.identifier import IgnoreCaseIdentifier -__all__ = ( - "LawOption", - "LocalTSGenerationBehavior", - "Thermal860Config", - "Thermal870Config", - "Thermal870Properties", - "ThermalClusterGroup", - "ThermalConfig", - "ThermalConfigType", - "ThermalCostGeneration", - "ThermalProperties", - "create_thermal_config", -) - class LocalTSGenerationBehavior(EnumIgnoreCase): """ @@ -108,17 +94,20 @@ class ThermalProperties(ClusterProperties): group: ThermalClusterGroup = Field( default=ThermalClusterGroup.OTHER1, description="Thermal Cluster Group", + title="Thermal Cluster Group", ) gen_ts: LocalTSGenerationBehavior = Field( default=LocalTSGenerationBehavior.USE_GLOBAL, description="Time Series Generation Option", alias="gen-ts", + title="Time Series Generation", ) min_stable_power: float = Field( default=0.0, description="Min. Stable Power (MW)", alias="min-stable-power", + title="Min. Stable Power", ) min_up_time: int = Field( default=1, @@ -126,6 +115,7 @@ class ThermalProperties(ClusterProperties): le=168, description="Min. Up time (h)", alias="min-up-time", + title="Min. Up Time", ) min_down_time: int = Field( default=1, @@ -133,17 +123,20 @@ class ThermalProperties(ClusterProperties): le=168, description="Min. Down time (h)", alias="min-down-time", + title="Min. Down Time", ) must_run: bool = Field( default=False, description="Must run flag", alias="must-run", + title="Must Run", ) spinning: float = Field( default=0.0, ge=0, le=100, description="Spinning (%)", + title="Spinning", ) volatility_forced: float = Field( default=0.0, @@ -151,6 +144,7 @@ class ThermalProperties(ClusterProperties): le=1, description="Forced Volatility", alias="volatility.forced", + title="Forced Volatility", ) volatility_planned: float = Field( default=0.0, @@ -158,51 +152,60 @@ class ThermalProperties(ClusterProperties): le=1, description="Planned volatility", alias="volatility.planned", + title="Planned Volatility", ) law_forced: LawOption = Field( default=LawOption.UNIFORM, description="Forced Law (ts-generator)", alias="law.forced", + title="Forced Law", ) law_planned: LawOption = Field( default=LawOption.UNIFORM, description="Planned Law (ts-generator)", alias="law.planned", + title="Planned Law", ) marginal_cost: float = Field( default=0.0, ge=0, description="Marginal cost (euros/MWh)", alias="marginal-cost", + title="Marginal Cost", ) spread_cost: float = Field( default=0.0, ge=0, description="Spread (euros/MWh)", alias="spread-cost", + title="Spread Cost", ) fixed_cost: float = Field( default=0.0, ge=0, description="Fixed cost (euros/hour)", alias="fixed-cost", + title="Fixed Cost", ) startup_cost: float = Field( default=0.0, ge=0, description="Startup cost (euros/startup)", alias="startup-cost", + title="Startup Cost", ) market_bid_cost: float = Field( default=0.0, ge=0, description="Market bid cost (euros/MWh)", alias="market-bid-cost", + title="Market Bid Cost", ) co2: float = Field( default=0.0, ge=0, description="Emission rate of CO2 (t/MWh)", + title="Emission rate of CO2", ) @@ -215,62 +218,74 @@ class Thermal860Properties(ThermalProperties): default=0.0, ge=0, description="Emission rate of NH3 (t/MWh)", + title="Emission rate of NH3", ) so2: float = Field( default=0.0, ge=0, description="Emission rate of SO2 (t/MWh)", + title="Emission rate of SO2", ) nox: float = Field( default=0.0, ge=0, description="Emission rate of NOX (t/MWh)", + title="Emission rate of NOX", ) pm2_5: float = Field( default=0.0, ge=0, description="Emission rate of PM 2.5 (t/MWh)", + title="Emission rate of PM 2.5", alias="pm2_5", ) pm5: float = Field( default=0.0, ge=0, description="Emission rate of PM 5 (t/MWh)", + title="Emission rate of PM 5", ) pm10: float = Field( default=0.0, ge=0, description="Emission rate of PM 10 (t/MWh)", + title="Emission rate of PM 10", ) nmvoc: float = Field( default=0.0, ge=0, description="Emission rate of NMVOC (t/MWh)", + title="Emission rate of NMVOC", ) op1: float = Field( default=0.0, ge=0, description="Emission rate of pollutant 1 (t/MWh)", + title="Emission rate of pollutant 1", ) op2: float = Field( default=0.0, ge=0, description="Emission rate of pollutant 2 (t/MWh)", + title="Emission rate of pollutant 2", ) op3: float = Field( default=0.0, ge=0, description="Emission rate of pollutant 3 (t/MWh)", + title="Emission rate of pollutant 3", ) op4: float = Field( default=0.0, ge=0, description="Emission rate of pollutant 4 (t/MWh)", + title="Emission rate of pollutant 4", ) op5: float = Field( default=0.0, ge=0, description="Emission rate of pollutant 5 (t/MWh)", + title="Emission rate of pollutant 5", ) @@ -284,18 +299,21 @@ class Thermal870Properties(Thermal860Properties): default=ThermalCostGeneration.SET_MANUALLY, alias="costgeneration", description="Cost generation option", + title="Cost Generation", ) efficiency: float = Field( default=100.0, ge=0, le=100, description="Efficiency (%)", + title="Efficiency", ) # Even if `variableomcost` is a cost it could be negative. variable_o_m_cost: float = Field( default=0.0, description="Operating and Maintenance Cost (€/MWh)", alias="variableomcost", + title="Variable O&M Cost", ) @@ -375,6 +393,25 @@ class Thermal870Config(Thermal870Properties, IgnoreCaseIdentifier): ThermalConfigType = t.Union[Thermal870Config, Thermal860Config, ThermalConfig] +def get_thermal_config_cls(study_version: t.Union[str, int]) -> t.Type[ThermalConfigType]: + """ + Retrieves the thermal configuration class based on the study version. + + Args: + study_version: The version of the study. + + Returns: + The thermal configuration class. + """ + version = int(study_version) + if version >= 870: + return Thermal870Config + elif version == 860: + return Thermal860Config + else: + return ThermalConfig + + def create_thermal_config(study_version: t.Union[str, int], **kwargs: t.Any) -> ThermalConfigType: """ Factory method to create a thermal configuration model. @@ -389,10 +426,5 @@ def create_thermal_config(study_version: t.Union[str, int], **kwargs: t.Any) -> Raises: ValueError: If the study version is not supported. """ - version = int(study_version) - if version >= 870: - return Thermal870Config(**kwargs) - elif version == 860: - return Thermal860Config(**kwargs) - else: - return ThermalConfig(**kwargs) + cls = get_thermal_config_cls(study_version) + return cls(**kwargs) diff --git a/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py b/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py index 37e0badb0c..909140b68a 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py +++ b/antarest/study/storage/rawstudy/model/filesystem/matrix/date_serializer.py @@ -68,7 +68,7 @@ class HourlyMatrixSerializer(IDateMatrixSerializer): def build_date(self, index: pd.Index) -> pd.DataFrame: def _map(row: str) -> Tuple[str, int, str, str, str]: - m, d, h = re.split("[\s/]", row) + m, d, h = re.split(r"[\s/]", row) return "", 1, d, IDateMatrixSerializer._R_MONTHS[m], h items = index.map(_map).tolist() diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py index 95f48a48f3..9e07a32506 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/hydro/hydro_ini.py @@ -5,6 +5,7 @@ class InputHydroIni(IniFileNode): def __init__(self, context: ContextServer, config: FileStudyTreeConfig): + # TODO: missing "use heuristic", "follow load" and "reservoir capacity" sections = [ "inter-daily-breakdown", "intra-daily-modulation", diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py b/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py index 88b58c5369..4e26ff0c9c 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/input/input.py @@ -1,4 +1,4 @@ -from antarest.study.storage.rawstudy.model.filesystem.config.model import ENR_MODELLING +from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling from antarest.study.storage.rawstudy.model.filesystem.folder_node import FolderNode from antarest.study.storage.rawstudy.model.filesystem.inode import TREE from antarest.study.storage.rawstudy.model.filesystem.root.input.areas.areas import InputAreas @@ -37,7 +37,7 @@ def build(self) -> TREE: "wind": InputWind(self.context, self.config.next_file("wind")), } - if self.config.enr_modelling == ENR_MODELLING.CLUSTERS.value: + if self.config.enr_modelling == EnrModelling.CLUSTERS.value: children["renewables"] = ClusteredRenewables(self.context, self.config.next_file("renewables")) if self.config.version >= 860: children["st-storage"] = InputSTStorage(self.context, self.config.next_file("st-storage")) diff --git a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py index d2b9541a22..dc5726554d 100644 --- a/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py +++ b/antarest/study/storage/rawstudy/model/filesystem/root/output/simulation/mode/common/area.py @@ -53,7 +53,7 @@ def build(self) -> TREE: self.area, ) - # has_enr_clusters = self.config.enr_modelling == ENR_MODELLING.CLUSTERS.value and + # has_enr_clusters = self.config.enr_modelling == EnrModelling.CLUSTERS.value and # len(self.config.get_renewable_ids(self.area)) > 0 # todo get the config related to this output (now this may fail if input has changed since the launch) has_enr_clusters = True diff --git a/antarest/study/storage/study_download_utils.py b/antarest/study/storage/study_download_utils.py index 9cc25d5586..c89d60b380 100644 --- a/antarest/study/storage/study_download_utils.py +++ b/antarest/study/storage/study_download_utils.py @@ -22,7 +22,7 @@ StudyDownloadType, TimeSerie, ) -from antarest.study.storage.rawstudy.model.filesystem.config.model import ENR_MODELLING, Area, FileStudyTreeConfig +from antarest.study.storage.rawstudy.model.filesystem.config.model import Area, EnrModelling, FileStudyTreeConfig from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.rawstudy.model.filesystem.folder_node import ChildNotFoundError, FilterError, FolderNode from antarest.study.storage.rawstudy.model.filesystem.inode import INode @@ -98,7 +98,7 @@ def level_output_filter( data: StudyDownloadDTO, ) -> None: cluster_details = [f"details-{data.level.value}"] - if study.config.enr_modelling == ENR_MODELLING.CLUSTERS.value: + if study.config.enr_modelling == EnrModelling.CLUSTERS.value: cluster_details += [f"details-res-{data.level.value}"] files_matcher = ( diff --git a/antarest/study/storage/variantstudy/business/utils_binding_constraint.py b/antarest/study/storage/variantstudy/business/utils_binding_constraint.py index 4db7c525d8..ebbeeec739 100644 --- a/antarest/study/storage/variantstudy/business/utils_binding_constraint.py +++ b/antarest/study/storage/variantstudy/business/utils_binding_constraint.py @@ -1,10 +1,7 @@ import typing as t -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( - BindingConstraintDTO, - BindingConstraintFrequency, -) -from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.model import BindingConstraintDTO, FileStudyTreeConfig def parse_bindings_coeffs_and_save_into_config( diff --git a/antarest/study/storage/variantstudy/model/command/common.py b/antarest/study/storage/variantstudy/model/command/common.py index a6ac905fd9..40ec8629cf 100644 --- a/antarest/study/storage/variantstudy/model/command/common.py +++ b/antarest/study/storage/variantstudy/model/command/common.py @@ -8,13 +8,6 @@ class CommandOutput: message: str = "" -class BindingConstraintOperator(Enum): - BOTH = "both" - EQUAL = "equal" - GREATER = "greater" - LESS = "less" - - class CoeffType(Enum): THERMAL = "thermal" LINK = "link" diff --git a/antarest/study/storage/variantstudy/model/command/create_area.py b/antarest/study/storage/variantstudy/model/command/create_area.py index d2114c254e..f956ef298c 100644 --- a/antarest/study/storage/variantstudy/model/command/create_area.py +++ b/antarest/study/storage/variantstudy/model/command/create_area.py @@ -5,8 +5,8 @@ from antarest.core.model import JSON from antarest.study.common.default_values import FilteringOptions, NodalOptimization from antarest.study.storage.rawstudy.model.filesystem.config.model import ( - ENR_MODELLING, Area, + EnrModelling, FileStudyTreeConfig, transform_name_to_id, ) @@ -238,7 +238,7 @@ def _apply(self, study_data: FileStudy) -> CommandOutput: f"waterValues_{area_id}" ] = self.command_context.generator_matrix_constants.get_null_matrix() - if version >= 810 and study_data.config.enr_modelling == ENR_MODELLING.CLUSTERS.value: + if version >= 810 and study_data.config.enr_modelling == EnrModelling.CLUSTERS.value: new_area_data["input"]["renewables"] = { "clusters": {area_id: {"list": {}}}, } diff --git a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py index 70a1c1f627..96e9643165 100644 --- a/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/create_binding_constraint.py @@ -3,11 +3,15 @@ from abc import ABCMeta import numpy as np -from pydantic import BaseModel, Extra, Field, root_validator +from pydantic import BaseModel, Extra, Field, root_validator, validator from antarest.matrixstore.model import MatrixData from antarest.study.business.all_optional_meta import AllOptionalMetaclass -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) +from antarest.study.storage.rawstudy.model.filesystem.config.field_validators import validate_filtering from antarest.study.storage.rawstudy.model.filesystem.config.model import FileStudyTreeConfig, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.matrix_constants_generator import GeneratorMatrixConstants @@ -15,11 +19,7 @@ from antarest.study.storage.variantstudy.business.utils_binding_constraint import ( parse_bindings_coeffs_and_save_into_config, ) -from antarest.study.storage.variantstudy.model.command.common import ( - BindingConstraintOperator, - CommandName, - CommandOutput, -) +from antarest.study.storage.variantstudy.model.command.common import CommandName, CommandOutput from antarest.study.storage.variantstudy.model.command.icommand import MATCH_SIGNATURE_SEPARATOR, ICommand from antarest.study.storage.variantstudy.model.model import CommandDTO @@ -91,6 +91,10 @@ class BindingConstraintProperties830(BindingConstraintPropertiesBase): filter_year_by_year: str = Field("", alias="filter-year-by-year") filter_synthesis: str = Field("", alias="filter-synthesis") + @validator("filter_synthesis", "filter_year_by_year", pre=True) + def _validate_filtering(cls, v: t.Any) -> str: + return validate_filtering(v) + class BindingConstraintProperties870(BindingConstraintProperties830): group: str = DEFAULT_GROUP diff --git a/antarest/study/storage/variantstudy/model/command/create_renewables_cluster.py b/antarest/study/storage/variantstudy/model/command/create_renewables_cluster.py index ab61d8f710..3e5ad8e213 100644 --- a/antarest/study/storage/variantstudy/model/command/create_renewables_cluster.py +++ b/antarest/study/storage/variantstudy/model/command/create_renewables_cluster.py @@ -4,8 +4,8 @@ from antarest.core.model import JSON from antarest.study.storage.rawstudy.model.filesystem.config.model import ( - ENR_MODELLING, Area, + EnrModelling, FileStudyTreeConfig, transform_name_to_id, ) @@ -42,7 +42,7 @@ def validate_cluster_name(cls, val: str) -> str: return val def _apply_config(self, study_data: FileStudyTreeConfig) -> t.Tuple[CommandOutput, t.Dict[str, t.Any]]: - if study_data.enr_modelling != ENR_MODELLING.CLUSTERS.value: + if study_data.enr_modelling != EnrModelling.CLUSTERS.value: # Since version 8.1 of the solver, we can use renewable clusters # instead of "Load", "Wind" and "Solar" objects for modelling. # When the "renewable-generation-modelling" parameter is set to "aggregated", @@ -50,7 +50,7 @@ def _apply_config(self, study_data: FileStudyTreeConfig) -> t.Tuple[CommandOutpu # To use renewable clusters, the parameter must therefore be set to "clusters". message = ( f"Parameter 'renewable-generation-modelling'" - f" must be set to '{ENR_MODELLING.CLUSTERS.value}'" + f" must be set to '{EnrModelling.CLUSTERS.value}'" f" instead of '{study_data.enr_modelling}'" ) return CommandOutput(status=False, message=message), {} diff --git a/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py b/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py index 25b180c49d..958e9d81f1 100644 --- a/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py +++ b/antarest/study/storage/variantstudy/model/command/remove_binding_constraint.py @@ -30,7 +30,7 @@ def _apply_config(self, study_data: FileStudyTreeConfig) -> Tuple[CommandOutput, def _apply(self, study_data: FileStudy) -> CommandOutput: if self.id not in [bind.id for bind in study_data.config.bindings]: - return CommandOutput(status=False, message="Binding constraint not found") + return CommandOutput(status=False, message=f"Binding constraint not found: '{self.id}'") binding_constraints = study_data.tree.get(["input", "bindingconstraints", "bindingconstraints"]) new_binding_constraints: JSON = {} index = 0 diff --git a/antarest/study/web/study_data_blueprint.py b/antarest/study/web/study_data_blueprint.py index 4f46ec1fab..ecdd5ff191 100644 --- a/antarest/study/web/study_data_blueprint.py +++ b/antarest/study/web/study_data_blueprint.py @@ -9,7 +9,7 @@ from antarest.core.config import Config from antarest.core.jwt import JWTUser -from antarest.core.model import StudyPermissionType +from antarest.core.model import JSON, StudyPermissionType from antarest.core.requests import RequestParameters from antarest.core.utils.utils import sanitize_uuid from antarest.core.utils.web import APITag @@ -18,7 +18,7 @@ from antarest.study.business.adequacy_patch_management import AdequacyPatchFormFields from antarest.study.business.advanced_parameters_management import AdvancedParamsFormFields from antarest.study.business.allocation_management import AllocationFormFields, AllocationMatrix -from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaType, AreaUI, LayerInfoDTO +from antarest.study.business.area_management import AreaCreationDTO, AreaInfoDTO, AreaType, LayerInfoDTO, UpdateAreaUi from antarest.study.business.areas.hydro_management import InflowStructure, ManagementOptionsFormFields from antarest.study.business.areas.properties_management import PropertiesFormFields from antarest.study.business.areas.renewable_management import ( @@ -54,16 +54,16 @@ from antarest.study.business.link_management import LinkInfoDTO from antarest.study.business.optimization_management import OptimizationFormFields from antarest.study.business.playlist_management import PlaylistColumns -from antarest.study.business.table_mode_management import ( - BindingConstraintOperator, - ColumnsModelTypes, - TableTemplateType, -) +from antarest.study.business.table_mode_management import TableDataDTO, TableModeType from antarest.study.business.thematic_trimming_field_infos import ThematicTrimmingFormFields from antarest.study.business.timeseries_config_management import TSFormFields from antarest.study.model import PatchArea, PatchCluster from antarest.study.service import StudyService -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.area import AreaUI +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id logger = logging.getLogger(__name__) @@ -189,7 +189,7 @@ def create_link( def update_area_ui( uuid: str, area_id: str, - area_ui: AreaUI, + area_ui: UpdateAreaUi, layer: str = "0", current_user: JWTUser = Depends(auth.get_current_user), ) -> t.Any: @@ -841,47 +841,95 @@ def set_timeseries_form_values( study_service.ts_config_manager.set_field_values(study, field_values) @bp.get( - path="/studies/{uuid}/tablemode", + path="/table-schema/{table_type}", + tags=[APITag.study_data], + summary="Get table schema", + ) + def get_table_schema( + table_type: TableModeType, + current_user: JWTUser = Depends(auth.get_current_user), + ) -> JSON: + """ + Get the properties of the table columns. + + Args: + - `table_type`: The type of table to get the schema for. + """ + logger.info("Getting table schema", extra={"user": current_user.id}) + model_schema = study_service.table_mode_manager.get_table_schema(table_type) + return model_schema + + @bp.get( + path="/studies/{uuid}/table-mode/{table_type}", tags=[APITag.study_data], summary="Get table data for table form", - # `Any` because `Union[AreaColumns, LinkColumns]` not working - response_model=t.Dict[str, t.Dict[str, t.Any]], - response_model_exclude_none=True, ) def get_table_mode( uuid: str, - table_type: TableTemplateType, - columns: str, + table_type: TableModeType, + columns: str = Query("", description="A comma-separated list of columns to include in the table data"), current_user: JWTUser = Depends(auth.get_current_user), - ) -> t.Dict[str, ColumnsModelTypes]: + ) -> TableDataDTO: + """ + Get the table data for the given study and table type. + + Args: + - uuid: The UUID of the study. + - table_type: The type of table to get the data for. + """ logger.info( - f"Getting template table data for study {uuid}", + f"Getting table data for study {uuid}", extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) study = study_service.check_study_access(uuid, StudyPermissionType.READ, params) - - return study_service.table_mode_manager.get_table_data(study, table_type, columns.split(",")) + column_list = columns.split(",") if columns else [] + table_data = study_service.table_mode_manager.get_table_data(study, table_type, column_list) + return table_data @bp.put( - path="/studies/{uuid}/tablemode", + path="/studies/{uuid}/table-mode/{table_type}", tags=[APITag.study_data], - summary="Set table data with values from table form", + summary="Update table data with values from table form", ) - def set_table_mode( + def update_table_mode( uuid: str, - table_type: TableTemplateType, - data: t.Dict[str, ColumnsModelTypes], + table_type: TableModeType, + data: TableDataDTO = Body( + ..., + example={ + "de / nuclear_cl1": { + "enabled": True, + "group": "Nuclear", + "unitCount": 17, + "nominalCapacity": 123, + }, + "de / gas_cl1": { + "enabled": True, + "group": "Gas", + "unitCount": 15, + "nominalCapacity": 456, + }, + }, + ), current_user: JWTUser = Depends(auth.get_current_user), - ) -> None: + ) -> TableDataDTO: + """ + Update the table data for the given study and table type. + + Args: + - uuid: The UUID of the study. + - table_type: The type of table to update. + - data: The table data to update. + """ logger.info( f"Updating table data for study {uuid}", extra={"user": current_user.id}, ) params = RequestParameters(user=current_user) study = study_service.check_study_access(uuid, StudyPermissionType.WRITE, params) - - study_service.table_mode_manager.set_table_data(study, table_type, data) + table_data = study_service.table_mode_manager.update_table_data(study, table_type, data) + return table_data @bp.post( "/studies/_update_version", diff --git a/tests/integration/study_data_blueprint/test_binding_constraints.py b/tests/integration/study_data_blueprint/test_binding_constraints.py index 4da996a229..29394c3b2f 100644 --- a/tests/integration/study_data_blueprint/test_binding_constraints.py +++ b/tests/integration/study_data_blueprint/test_binding_constraints.py @@ -543,7 +543,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st }, headers=user_headers, ) - assert res.status_code == 422 + assert res.status_code == 422, res.json() description = res.json()["description"] assert "cannot fill 'values'" in description assert "'less_term_matrix'" in description @@ -560,11 +560,11 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st "operator": "less", "terms": [], "comments": "Incoherent matrix with version", - "less_term_matrix": [[]], + "lessTermMatrix": [[]], }, headers=user_headers, ) - assert res.status_code == 422 + assert res.status_code == 422, res.json() description = res.json()["description"] assert description == "You cannot fill a 'matrix_term' as these values refer to v8.7+ studies" @@ -594,7 +594,7 @@ def test_lifecycle__nominal(self, client: TestClient, user_access_token: str, st # Delete a fake binding constraint res = client.delete(f"/v1/studies/{study_id}/bindingconstraints/fake_bc", headers=user_headers) assert res.status_code == 404, res.json() - assert res.json()["exception"] == "BindingConstraintNotFoundError" + assert res.json()["exception"] == "BindingConstraintNotFound" assert res.json()["description"] == "Binding constraint 'fake_bc' not found" # Add a group before v8.7 diff --git a/tests/integration/study_data_blueprint/test_renewable.py b/tests/integration/study_data_blueprint/test_renewable.py index 0e57e1464b..8a9d575d97 100644 --- a/tests/integration/study_data_blueprint/test_renewable.py +++ b/tests/integration/study_data_blueprint/test_renewable.py @@ -23,6 +23,7 @@ * delete a cluster (or several clusters) * validate the consistency of the matrices (and properties) """ + import json import re import typing as t diff --git a/tests/integration/study_data_blueprint/test_st_storage.py b/tests/integration/study_data_blueprint/test_st_storage.py index 33b506fc77..b8aa0de878 100644 --- a/tests/integration/study_data_blueprint/test_st_storage.py +++ b/tests/integration/study_data_blueprint/test_st_storage.py @@ -8,14 +8,22 @@ from starlette.testclient import TestClient from antarest.core.tasks.model import TaskStatus -from antarest.study.business.areas.st_storage_management import STStorageOutput +from antarest.study.business.areas.st_storage_management import create_storage_output from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id -from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import STStorageConfig +from antarest.study.storage.rawstudy.model.filesystem.config.st_storage import create_st_storage_config from tests.integration.utils import wait_task_completion -DEFAULT_CONFIG = json.loads(STStorageConfig(id="dummy", name="dummy").json(by_alias=True, exclude={"id", "name"})) +_ST_STORAGE_860_CONFIG = create_st_storage_config(860, name="dummy") +_ST_STORAGE_880_CONFIG = create_st_storage_config(880, name="dummy") -DEFAULT_PROPERTIES = json.loads(STStorageOutput(name="dummy").json(by_alias=True, exclude={"id", "name"})) +_ST_STORAGE_OUTPUT_860 = create_storage_output(860, cluster_id="dummy", config={"name": "dummy"}) +_ST_STORAGE_OUTPUT_880 = create_storage_output(880, cluster_id="dummy", config={"name": "dummy"}) + +DEFAULT_CONFIG_860 = json.loads(_ST_STORAGE_860_CONFIG.json(by_alias=True, exclude={"id", "name"})) +DEFAULT_CONFIG_880 = json.loads(_ST_STORAGE_880_CONFIG.json(by_alias=True, exclude={"id", "name"})) + +DEFAULT_OUTPUT_860 = json.loads(_ST_STORAGE_OUTPUT_860.json(by_alias=True, exclude={"id", "name"})) +DEFAULT_OUTPUT_880 = json.loads(_ST_STORAGE_OUTPUT_880.json(by_alias=True, exclude={"id", "name"})) # noinspection SpellCheckingInspection @@ -30,7 +38,13 @@ class TestSTStorage: """ @pytest.mark.parametrize("study_type", ["raw", "variant"]) - @pytest.mark.parametrize("study_version", [860, 880]) + @pytest.mark.parametrize( + "study_version, default_output", + [ + pytest.param(860, DEFAULT_OUTPUT_860, id="860"), + pytest.param(880, DEFAULT_OUTPUT_880, id="880"), + ], + ) def test_lifecycle__nominal( self, client: TestClient, @@ -38,6 +52,7 @@ def test_lifecycle__nominal( study_id: str, study_type: str, study_version: int, + default_output: t.Dict[str, t.Any], ) -> None: """ The purpose of this integration test is to test the endpoints @@ -68,7 +83,7 @@ def test_lifecycle__nominal( # ============================= user_headers = {"Authorization": f"Bearer {user_access_token}"} - # Upgrade study to version 860 + # Upgrade study to version 860 or above res = client.put( f"/v1/studies/{study_id}/upgrade", headers=user_headers, @@ -118,17 +133,15 @@ def test_lifecycle__nominal( assert res.status_code == 422, res.json() assert res.json()["exception"] in {"ValidationError", "RequestValidationError"}, res.json() - # We can create a short-term storage with the following properties: + # We can create a short-term storage with the following properties. + # Unfilled properties will be set to their default values. siemens_properties = { - **DEFAULT_PROPERTIES, "name": siemens_battery, "group": "Battery", "injectionNominalCapacity": 1450, "withdrawalNominalCapacity": 1350, "reservoirCapacity": 1500, } - if study_version < 880: - del siemens_properties["enabled"] # only exists since v8.8 res = client.post( f"/v1/studies/{study_id}/areas/{area_id}/storages", headers=user_headers, @@ -137,8 +150,8 @@ def test_lifecycle__nominal( assert res.status_code == 200, res.json() siemens_battery_id = res.json()["id"] assert siemens_battery_id == transform_name_to_id(siemens_battery) - siemens_config = {**siemens_properties, "id": siemens_battery_id, "enabled": True} - assert res.json() == siemens_config + siemens_output = {**default_output, **siemens_properties, "id": siemens_battery_id} + assert res.json() == siemens_output # reading the properties of a short-term storage res = client.get( @@ -146,7 +159,7 @@ def test_lifecycle__nominal( headers=user_headers, ) assert res.status_code == 200, res.json() - assert res.json() == siemens_config + assert res.json() == siemens_output # ============================= # SHORT-TERM STORAGE MATRICES @@ -195,7 +208,7 @@ def test_lifecycle__nominal( headers=user_headers, ) assert res.status_code == 200, res.json() - assert res.json() == [siemens_config] + assert res.json() == [siemens_output] # updating properties res = client.patch( @@ -207,19 +220,19 @@ def test_lifecycle__nominal( }, ) assert res.status_code == 200, res.json() - siemens_config = { - **siemens_config, + siemens_output = { + **siemens_output, "name": "New Siemens Battery", "reservoirCapacity": 2500, } - assert res.json() == siemens_config + assert res.json() == siemens_output res = client.get( f"/v1/studies/{study_id}/areas/{area_id}/storages/{siemens_battery_id}", headers=user_headers, ) assert res.status_code == 200, res.json() - assert res.json() == siemens_config + assert res.json() == siemens_output # =========================== # SHORT-TERM STORAGE UPDATE @@ -234,13 +247,13 @@ def test_lifecycle__nominal( "reservoirCapacity": 0, }, ) - siemens_config = { - **siemens_config, + siemens_output = { + **siemens_output, "initialLevel": 0.59, "reservoirCapacity": 0, } assert res.status_code == 200, res.json() - assert res.json() == siemens_config + assert res.json() == siemens_output # An attempt to update the `efficiency` property with an invalid value # should raise a validation error. @@ -260,7 +273,7 @@ def test_lifecycle__nominal( headers=user_headers, ) assert res.status_code == 200, res.json() - assert res.json() == siemens_config + assert res.json() == siemens_output # ============================= # SHORT-TERM STORAGE DUPLICATION @@ -274,11 +287,11 @@ def test_lifecycle__nominal( ) assert res.status_code in {200, 201}, res.json() # asserts the config is the same - duplicated_config = dict(siemens_config) - duplicated_config["name"] = new_name # type: ignore + duplicated_output = dict(siemens_output) + duplicated_output["name"] = new_name duplicated_id = transform_name_to_id(new_name) - duplicated_config["id"] = duplicated_id # type: ignore - assert res.json() == duplicated_config + duplicated_output["id"] = duplicated_id + assert res.json() == duplicated_output # asserts the matrix has also been duplicated res = client.get( @@ -358,16 +371,16 @@ def test_lifecycle__nominal( headers=user_headers, ) assert res.status_code == 200, res.json() - siemens_config = {**DEFAULT_PROPERTIES, **siemens_properties, "id": siemens_battery_id} - grand_maison_config = {**DEFAULT_PROPERTIES, **grand_maison_properties, "id": grand_maison_id} - assert res.json() == [duplicated_config, siemens_config, grand_maison_config] + siemens_output = {**default_output, **siemens_properties, "id": siemens_battery_id} + grand_maison_output = {**default_output, **grand_maison_properties, "id": grand_maison_id} + assert res.json() == [duplicated_output, siemens_output, grand_maison_output] # We can delete the three short-term storages at once. res = client.request( "DELETE", f"/v1/studies/{study_id}/areas/{area_id}/storages", headers=user_headers, - json=[grand_maison_id, duplicated_config["id"]], + json=[grand_maison_id, duplicated_output["id"]], ) assert res.status_code == 204, res.json() assert res.text in {"", "null"} # Old FastAPI versions return 'null'. @@ -539,7 +552,22 @@ def test_lifecycle__nominal( assert res.json()["enabled"] is False @pytest.mark.parametrize("study_type", ["raw", "variant"]) - def test__default_values(self, client: TestClient, user_access_token: str, study_type: str) -> None: + @pytest.mark.parametrize( + "study_version, default_config, default_output", + [ + pytest.param(860, DEFAULT_CONFIG_860, DEFAULT_OUTPUT_860, id="860"), + pytest.param(880, DEFAULT_CONFIG_880, DEFAULT_OUTPUT_880, id="880"), + ], + ) + def test__default_values( + self, + client: TestClient, + user_access_token: str, + study_type: str, + study_version: int, + default_config: t.Dict[str, t.Any], + default_output: t.Dict[str, t.Any], + ) -> None: """ The purpose of this integration test is to test the default values of the properties of a short-term storage. @@ -553,7 +581,7 @@ def test__default_values(self, client: TestClient, user_access_token: str, study res = client.post( "/v1/studies", headers=user_headers, - params={"name": "MyStudy", "version": 860}, + params={"name": "MyStudy", "version": study_version}, ) assert res.status_code in {200, 201}, res.json() study_id = res.json() @@ -586,8 +614,8 @@ def test__default_values(self, client: TestClient, user_access_token: str, study ) assert res.status_code == 200, res.json() tesla_battery_id = res.json()["id"] - tesla_config = {**DEFAULT_PROPERTIES, "id": tesla_battery_id, "name": tesla_battery, "group": "Battery"} - assert res.json() == tesla_config + tesla_output = {**default_output, "id": tesla_battery_id, "name": tesla_battery, "group": "Battery"} + assert res.json() == tesla_output # Use the Debug mode to make sure that the initialLevel and initialLevelOptim properties # are properly set in the configuration file. @@ -598,7 +626,7 @@ def test__default_values(self, client: TestClient, user_access_token: str, study ) assert res.status_code == 200, res.json() actual = res.json() - expected = {**DEFAULT_CONFIG, "name": tesla_battery, "group": "Battery"} + expected = {**default_config, "name": tesla_battery, "group": "Battery"} assert actual == expected # We want to make sure that the default properties are applied to a study variant. @@ -637,7 +665,7 @@ def test__default_values(self, client: TestClient, user_access_token: str, study "action": "create_st_storage", "args": { "area_id": "fr", - "parameters": {**DEFAULT_CONFIG, "name": siemens_battery, "group": "Battery"}, + "parameters": {**default_config, "name": siemens_battery, "group": "Battery"}, "pmax_injection": ANY, "pmax_withdrawal": ANY, "lower_rule_curve": ANY, @@ -721,7 +749,7 @@ def test__default_values(self, client: TestClient, user_access_token: str, study assert res.status_code == 200, res.json() actual = res.json() expected = { - **DEFAULT_CONFIG, + **default_config, "name": siemens_battery, "group": "Battery", "injectionnominalcapacity": 1600, diff --git a/tests/integration/study_data_blueprint/test_table_mode.py b/tests/integration/study_data_blueprint/test_table_mode.py new file mode 100644 index 0000000000..15536b8426 --- /dev/null +++ b/tests/integration/study_data_blueprint/test_table_mode.py @@ -0,0 +1,899 @@ +import pytest +from starlette.testclient import TestClient + +from antarest.core.tasks.model import TaskStatus +from tests.integration.utils import wait_task_completion + + +# noinspection SpellCheckingInspection +@pytest.mark.unit_test +class TestTableMode: + """ + Test the end points related to the table mode. + + Those tests use the "examples/studies/STA-mini.zip" Study, + which contains the following areas: ["de", "es", "fr", "it"]. + """ + + def test_lifecycle__nominal( + self, + client: TestClient, + user_access_token: str, + study_id: str, + ) -> None: + user_headers = {"Authorization": f"Bearer {user_access_token}"} + + # In order to test the table mode for renewable clusters and short-term storage, + # it is required that the study is either in version 8.1 for renewable energies + # or in version 8.6 for short-term storage and that the renewable clusters are enabled + # in the study configuration. + + # Upgrade the study to version 8.6 + res = client.put( + f"/v1/studies/{study_id}/upgrade", + headers={"Authorization": f"Bearer {user_access_token}"}, + params={"target_version": 860}, + ) + assert res.status_code == 200, res.json() + + task_id = res.json() + task = wait_task_completion(client, user_access_token, task_id) + assert task.status == TaskStatus.COMPLETED, task + + # Parameter 'renewable-generation-modelling' must be set to 'clusters' instead of 'aggregated'. + # The `enr_modelling` value must be set to "clusters" instead of "aggregated" + args = { + "target": "settings/generaldata/other preferences", + "data": {"renewable-generation-modelling": "clusters"}, + } + res = client.post( + f"/v1/studies/{study_id}/commands", + headers={"Authorization": f"Bearer {user_access_token}"}, + json=[{"action": "update_config", "args": args}], + ) + assert res.status_code == 200, res.json() + + # Table Mode - Area + # ================= + + # Get the schema of the areas table + res = client.get( + "/v1/table-schema/areas", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + # Optimization - Nodal optimization + "nonDispatchablePower", + "dispatchableHydroPower", + "otherDispatchablePower", + "averageUnsuppliedEnergyCost", + "spreadUnsuppliedEnergyCost", + "averageSpilledEnergyCost", + "spreadSpilledEnergyCost", + # Optimization - Filtering + "filterSynthesis", + "filterYearByYear", + # Adequacy patch + "adequacyPatchMode", + } + + res = client.put( + f"/v1/studies/{study_id}/table-mode/areas", + headers=user_headers, + json={ + "de": { + "averageUnsuppliedEnergyCost": 3456, + "dispatchableHydroPower": False, + "filterSynthesis": "daily, monthly", # not changed + "filterYearByYear": "annual, weekly", + }, + "es": { + "adequacyPatchMode": "inside", + "spreadSpilledEnergyCost": None, # not changed + }, + }, + ) + assert res.status_code == 200, res.json() + expected_areas = { + "de": { + "adequacyPatchMode": "outside", + "averageSpilledEnergyCost": 0, + "averageUnsuppliedEnergyCost": 3456, + "dispatchableHydroPower": False, + "filterSynthesis": "daily, monthly", + "filterYearByYear": "weekly, annual", + "nonDispatchablePower": True, + "otherDispatchablePower": True, + "spreadSpilledEnergyCost": 0, + "spreadUnsuppliedEnergyCost": 0, + }, + "es": { + "adequacyPatchMode": "inside", + "averageSpilledEnergyCost": 0, + "averageUnsuppliedEnergyCost": 3000, + "dispatchableHydroPower": True, + "filterSynthesis": "daily, monthly", + "filterYearByYear": "hourly, weekly, annual", + "nonDispatchablePower": True, + "otherDispatchablePower": True, + "spreadSpilledEnergyCost": 0, + "spreadUnsuppliedEnergyCost": 0, + }, + "fr": { + "adequacyPatchMode": "outside", + "averageSpilledEnergyCost": 0, + "averageUnsuppliedEnergyCost": 3000, + "dispatchableHydroPower": True, + "filterSynthesis": "", + "filterYearByYear": "hourly", + "nonDispatchablePower": True, + "otherDispatchablePower": True, + "spreadSpilledEnergyCost": 0, + "spreadUnsuppliedEnergyCost": 0, + }, + "it": { + "adequacyPatchMode": "outside", + "averageSpilledEnergyCost": 0, + "averageUnsuppliedEnergyCost": 3000, + "dispatchableHydroPower": True, + "filterSynthesis": "", + "filterYearByYear": "hourly", + "nonDispatchablePower": True, + "otherDispatchablePower": True, + "spreadSpilledEnergyCost": 0, + "spreadUnsuppliedEnergyCost": 0, + }, + } + actual = res.json() + assert actual == expected_areas + + res = client.get(f"/v1/studies/{study_id}/table-mode/areas", headers=user_headers) + assert res.status_code == 200, res.json() + actual = res.json() + assert actual == expected_areas + + # Table Mode - Links + # ================== + + # Get the schema of the links table + res = client.get( + "/v1/table-schema/links", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + "colorRgb", + "comments", + "hurdlesCost", + "loopFlow", + "usePhaseShifter", + "transmissionCapacities", + "assetType", + "linkStyle", + "linkWidth", + "displayComments", + "filterSynthesis", + "filterYearByYear", + } + + res = client.put( + f"/v1/studies/{study_id}/table-mode/links", + headers=user_headers, + json={ + "de / fr": { + "colorRgb": "#FFA500", + "displayComments": False, + "filterSynthesis": "hourly, daily, weekly, annual", + "filterYearByYear": "hourly, daily, monthly, annual", + "hurdlesCost": True, + "linkStyle": "plain", + "linkWidth": 2, + "loopFlow": False, + "transmissionCapacities": "ignore", + }, + "es / fr": { + "colorRgb": "#FF6347", + "displayComments": True, + "filterSynthesis": "hourly, daily, weekly, monthly, annual, annual", # duplicate is ignored + "filterYearByYear": "hourly, daily, weekly, annual", + "hurdlesCost": True, + "linkStyle": "plain", + "linkWidth": 1, + "loopFlow": False, + "transmissionCapacities": "enabled", + "usePhaseShifter": True, + }, + "fr / it": { + "comments": "Link from France to Italie", + "assetType": "DC", # case-insensitive + }, + }, + ) + assert res.status_code == 200, res.json() + expected_links = { + "de / fr": { + "assetType": "ac", + "colorRgb": "#FFA500", + "comments": "", + "displayComments": False, + "filterSynthesis": "hourly, daily, weekly, annual", + "filterYearByYear": "hourly, daily, monthly, annual", + "hurdlesCost": True, + "linkStyle": "plain", + "linkWidth": 2, + "loopFlow": False, + "transmissionCapacities": "ignore", + "usePhaseShifter": False, + }, + "es / fr": { + "assetType": "ac", + "colorRgb": "#FF6347", + "comments": "", + "displayComments": True, + "filterSynthesis": "hourly, daily, weekly, monthly, annual", + "filterYearByYear": "hourly, daily, weekly, annual", + "hurdlesCost": True, + "linkStyle": "plain", + "linkWidth": 1, + "loopFlow": False, + "transmissionCapacities": "enabled", + "usePhaseShifter": True, + }, + "fr / it": { + "assetType": "dc", + "colorRgb": "#707070", + "comments": "Link from France to Italie", + "displayComments": True, + "filterSynthesis": "", + "filterYearByYear": "hourly", + "hurdlesCost": True, + "linkStyle": "plain", + "linkWidth": 1, + "loopFlow": False, + "transmissionCapacities": "enabled", + "usePhaseShifter": False, + }, + } + actual = res.json() + assert actual == expected_links + + res = client.get(f"/v1/studies/{study_id}/table-mode/links", headers=user_headers) + assert res.status_code == 200, res.json() + actual = res.json() + assert actual == expected_links + + # Table Mode - Thermal Clusters + # ============================= + + # Get the schema of the thermals table + res = client.get( + "/v1/table-schema/thermals", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + # read-only fields + "id", + "name", + # Thermals fields + "group", + "enabled", + "unitCount", + "nominalCapacity", + "genTs", + "minStablePower", + "minUpTime", + "minDownTime", + "mustRun", + "spinning", + "volatilityForced", + "volatilityPlanned", + "lawForced", + "lawPlanned", + "marginalCost", + "spreadCost", + "fixedCost", + "startupCost", + "marketBidCost", + # pollutants - since v8.6 (except for "co2") + "co2", + "nh3", + "so2", + "nox", + "pm25", + "pm5", + "pm10", + "nmvoc", + "op1", + "op2", + "op3", + "op4", + "op5", + # since v8.7 + "costGeneration", + "efficiency", + "variableOMCost", + } + + res = client.put( + f"/v1/studies/{study_id}/table-mode/thermals", + headers=user_headers, + json={ + "de / 01_solar": { + "group": "Other 2", + "nominalCapacity": 500000, + "so2": 8.25, + "unitCount": 17, + }, + "de / 02_wind_on": { + "group": "Nuclear", + "nominalCapacity": 314159, + "co2": 123, + "unitCount": 15, + }, + }, + ) + assert res.status_code == 200, res.json() + expected_thermals = { + "de / 01_solar": { + # "id": "01_solar", + # "name": "01_solar", + "co2": 0, + "costGeneration": None, + "efficiency": None, + "enabled": True, + "fixedCost": 0, + "genTs": "use global", + "group": "Other 2", + "lawForced": "uniform", + "lawPlanned": "uniform", + "marginalCost": 10, + "marketBidCost": 10, + "minDownTime": 1, + "minStablePower": 0, + "minUpTime": 1, + "mustRun": False, + "nh3": 0, + "nmvoc": 0, + "nominalCapacity": 500000, + "nox": 0, + "op1": 0, + "op2": 0, + "op3": 0, + "op4": 0, + "op5": 0, + "pm10": 0, + "pm25": 0, + "pm5": 0, + "so2": 8.25, + "spinning": 0, + "spreadCost": 0, + "startupCost": 0, + "unitCount": 17, + "variableOMCost": None, + "volatilityForced": 0, + "volatilityPlanned": 0, + }, + "de / 02_wind_on": { + # "id": "02_wind_on", + # "name": "02_wind_on", + "co2": 123, + "costGeneration": None, + "efficiency": None, + "enabled": True, + "fixedCost": 0, + "genTs": "use global", + "group": "Nuclear", + "lawForced": "uniform", + "lawPlanned": "uniform", + "marginalCost": 20, + "marketBidCost": 20, + "minDownTime": 1, + "minStablePower": 0, + "minUpTime": 1, + "mustRun": False, + "nh3": 0, + "nmvoc": 0, + "nominalCapacity": 314159, + "nox": 0, + "op1": 0, + "op2": 0, + "op3": 0, + "op4": 0, + "op5": 0, + "pm10": 0, + "pm25": 0, + "pm5": 0, + "so2": 0, + "spinning": 0, + "spreadCost": 0, + "startupCost": 0, + "unitCount": 15, + "variableOMCost": None, + "volatilityForced": 0, + "volatilityPlanned": 0, + }, + } + assert res.json()["de / 01_solar"] == expected_thermals["de / 01_solar"] + assert res.json()["de / 02_wind_on"] == expected_thermals["de / 02_wind_on"] + + res = client.get( + f"/v1/studies/{study_id}/table-mode/thermals", + headers=user_headers, + params={"columns": ",".join(["group", "unitCount", "nominalCapacity", "so2"])}, + ) + assert res.status_code == 200, res.json() + expected = { + "de / 01_solar": {"group": "Other 2", "nominalCapacity": 500000, "so2": 8.25, "unitCount": 17}, + "de / 02_wind_on": {"group": "Nuclear", "nominalCapacity": 314159, "so2": 0, "unitCount": 15}, + "de / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "de / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "es / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "fr / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 01_solar": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 02_wind_on": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 03_wind_off": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 04_res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 05_nuclear": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 06_coal": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 07_gas": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 08_non-res": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + "it / 09_hydro_pump": {"group": "Other 1", "nominalCapacity": 1000000, "so2": 0, "unitCount": 1}, + } + actual = res.json() + assert actual == expected + + # Table Mode - Renewable Clusters + # =============================== + + # Prepare data for renewable clusters tests + generators_by_country = { + "fr": { + "La Rochelle": { + "name": "La Rochelle", + "group": "solar pv", + "nominalCapacity": 2.1, + "unitCount": 1, + "tsInterpretation": "production-factor", + }, + "Oleron": { + "name": "Oleron", + "group": "wind offshore", + "nominalCapacity": 15, + "unitCount": 70, + "tsInterpretation": "production-factor", + }, + "Dieppe": { + "name": "Dieppe", + "group": "wind offshore", + "nominalCapacity": 8, + "unitCount": 62, + "tsInterpretation": "power-generation", + }, + }, + "it": { + "Sicile": { + "name": "Sicile", + "group": "solar pv", + "nominalCapacity": 1.8, + "unitCount": 1, + "tsInterpretation": "production-factor", + }, + "Sardaigne": { + "name": "Sardaigne", + "group": "wind offshore", + "nominalCapacity": 12, + "unitCount": 86, + "tsInterpretation": "power-generation", + }, + "Pouilles": { + "name": "Pouilles", + "enabled": False, + "group": "wind offshore", + "nominalCapacity": 11, + "unitCount": 40, + "tsInterpretation": "power-generation", + }, + }, + } + + for area_id, generators in generators_by_country.items(): + for generator_id, generator in generators.items(): + res = client.post( + f"/v1/studies/{study_id}/areas/{area_id}/clusters/renewable", + headers=user_headers, + json=generator, + ) + res.raise_for_status() + + # Get the schema of the renewables table + res = client.get( + "/v1/table-schema/renewables", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + # read-only fields + "id", + "name", + # Renewables fields + "group", + "tsInterpretation", + "enabled", + "unitCount", + "nominalCapacity", + } + + # Update some generators using the table mode + res = client.put( + f"/v1/studies/{study_id}/table-mode/renewables", + headers=user_headers, + json={ + "fr / Dieppe": {"enabled": False}, + "fr / La Rochelle": {"enabled": True, "nominalCapacity": 3.1, "unitCount": 2}, + "it / Pouilles": {"group": "Wind Onshore"}, + }, + ) + assert res.status_code == 200, res.json() + + res = client.get( + f"/v1/studies/{study_id}/table-mode/renewables", + headers=user_headers, + params={"columns": ",".join(["group", "enabled", "unitCount", "nominalCapacity"])}, + ) + assert res.status_code == 200, res.json() + expected = { + "fr / Dieppe": {"enabled": False, "group": "Wind Offshore", "nominalCapacity": 8, "unitCount": 62}, + "fr / La Rochelle": {"enabled": True, "group": "Solar PV", "nominalCapacity": 3.1, "unitCount": 2}, + "fr / Oleron": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 15, "unitCount": 70}, + "it / Pouilles": {"enabled": False, "group": "Wind Onshore", "nominalCapacity": 11, "unitCount": 40}, + "it / Sardaigne": {"enabled": True, "group": "Wind Offshore", "nominalCapacity": 12, "unitCount": 86}, + "it / Sicile": {"enabled": True, "group": "Solar PV", "nominalCapacity": 1.8, "unitCount": 1}, + } + actual = res.json() + assert actual == expected + + # Table Mode - Short Term Storage + # =============================== + + # Get the schema of the short-term storages table + res = client.get( + "/v1/table-schema/st-storages", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + # read-only fields + "id", + "name", + # Short-term storage fields + "enabled", # since v8.8 + "group", + "injectionNominalCapacity", + "withdrawalNominalCapacity", + "reservoirCapacity", + "efficiency", + "initialLevel", + "initialLevelOptim", + } + + # Prepare data for short-term storage tests + storage_by_country = { + "fr": { + "siemens": { + "name": "Siemens", + "group": "battery", + "injectionNominalCapacity": 1500, + "withdrawalNominalCapacity": 1500, + "reservoirCapacity": 1500, + "initialLevel": 0.5, + "initialLevelOptim": False, + }, + "tesla": { + "name": "Tesla", + "group": "battery", + "injectionNominalCapacity": 1200, + "withdrawalNominalCapacity": 1200, + "reservoirCapacity": 1200, + "initialLevelOptim": True, + }, + }, + "it": { + "storage3": { + "name": "storage3", + "group": "psp_open", + "injectionNominalCapacity": 1234, + "withdrawalNominalCapacity": 1020, + "reservoirCapacity": 1357, + "initialLevel": 1, + "initialLevelOptim": False, + }, + "storage4": { + "name": "storage4", + "group": "psp_open", + "injectionNominalCapacity": 567, + "withdrawalNominalCapacity": 456, + "reservoirCapacity": 500, + "initialLevelOptim": True, + }, + }, + } + for area_id, storages in storage_by_country.items(): + for storage_id, storage in storages.items(): + res = client.post( + f"/v1/studies/{study_id}/areas/{area_id}/storages", + headers=user_headers, + json=storage, + ) + res.raise_for_status() + + # Update some generators using the table mode + res = client.put( + f"/v1/studies/{study_id}/table-mode/st-storages", + headers=user_headers, + json={ + "fr / siemens": {"injectionNominalCapacity": 1550, "withdrawalNominalCapacity": 1550}, + "fr / tesla": {"efficiency": 0.75, "initialLevel": 0.89, "initialLevelOptim": False}, + "it / storage3": {"group": "Pondage"}, + }, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert actual == { + "fr / siemens": { + # "id": "siemens", + # "name": "Siemens", + "efficiency": 1, + "enabled": None, + "group": "Battery", + "initialLevel": 0.5, + "initialLevelOptim": False, + "injectionNominalCapacity": 1550, + "reservoirCapacity": 1500, + "withdrawalNominalCapacity": 1550, + }, + "fr / tesla": { + # "id": "tesla", + # "name": "Tesla", + "efficiency": 0.75, + "enabled": None, + "group": "Battery", + "initialLevel": 0.89, + "initialLevelOptim": False, + "injectionNominalCapacity": 1200, + "reservoirCapacity": 1200, + "withdrawalNominalCapacity": 1200, + }, + "it / storage3": { + # "id": "storage3", + # "name": "storage3", + "efficiency": 1, + "enabled": None, + "group": "Pondage", + "initialLevel": 1, + "initialLevelOptim": False, + "injectionNominalCapacity": 1234, + "reservoirCapacity": 1357, + "withdrawalNominalCapacity": 1020, + }, + "it / storage4": { + # "id": "storage4", + # "name": "storage4", + "efficiency": 1, + "enabled": None, + "group": "PSP_open", + "initialLevel": 0.5, + "initialLevelOptim": True, + "injectionNominalCapacity": 567, + "reservoirCapacity": 500, + "withdrawalNominalCapacity": 456, + }, + } + + res = client.get( + f"/v1/studies/{study_id}/table-mode/st-storages", + headers=user_headers, + params={ + "columns": ",".join( + [ + "group", + "injectionNominalCapacity", + "withdrawalNominalCapacity", + "reservoirCapacity", + "unknowColumn", # should be ignored + ] + ), + }, + ) + assert res.status_code == 200, res.json() + expected = { + "fr / siemens": { + "group": "Battery", + "injectionNominalCapacity": 1550, + "reservoirCapacity": 1500, + "withdrawalNominalCapacity": 1550, + }, + "fr / tesla": { + "group": "Battery", + "injectionNominalCapacity": 1200, + "reservoirCapacity": 1200, + "withdrawalNominalCapacity": 1200, + }, + "it / storage3": { + "group": "Pondage", + "injectionNominalCapacity": 1234, + "reservoirCapacity": 1357, + "withdrawalNominalCapacity": 1020, + }, + "it / storage4": { + "group": "PSP_open", + "injectionNominalCapacity": 567, + "reservoirCapacity": 500, + "withdrawalNominalCapacity": 456, + }, + } + actual = res.json() + assert actual == expected + + # Table Mode - Binding Constraints + # ================================ + + # Prepare data for binding constraints tests + # Create a cluster in fr + fr_id = "fr" + res = client.post( + f"/v1/studies/{study_id}/areas/{fr_id}/clusters/thermal", + headers=user_headers, + json={ + "name": "Cluster 1", + "group": "Nuclear", + }, + ) + assert res.status_code == 200, res.json() + cluster_id = res.json()["id"] + assert cluster_id == "Cluster 1" + + # Create Binding Constraints + res = client.post( + f"/v1/studies/{study_id}/bindingconstraints", + json={ + "name": "Binding Constraint 1", + "enabled": True, + "time_step": "hourly", + "operator": "less", + }, + headers=user_headers, + ) + assert res.status_code == 200, res.json() + + res = client.post( + f"/v1/studies/{study_id}/bindingconstraints", + json={ + "name": "Binding Constraint 2", + "enabled": False, + "time_step": "daily", + "operator": "greater", + "comments": "This is a binding constraint", + "filter_synthesis": "hourly, daily, weekly", + }, + headers=user_headers, + ) + assert res.status_code == 200, res.json() + + # Get the schema of the binding constraints table + res = client.get( + "/v1/table-schema/binding-constraints", + headers=user_headers, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert set(actual["properties"]) == { + # read-only fields + "id", + "name", + # Binding Constraints fields + "group", + "enabled", + "timeStep", + "operator", + "comments", + "filterSynthesis", + "filterYearByYear", + # Binding Constraints - Terms + "terms", + } + + # Update some binding constraints using the table mode + res = client.put( + f"/v1/studies/{study_id}/table-mode/binding-constraints", + headers=user_headers, + json={ + "binding constraint 1": {"comments": "Hello World!", "enabled": True}, + "binding constraint 2": {"filterSynthesis": "hourly", "filterYearByYear": "hourly", "operator": "both"}, + }, + ) + assert res.status_code == 200, res.json() + actual = res.json() + assert actual == { + "binding constraint 1": { + "comments": "Hello World!", + "enabled": True, + "filterSynthesis": "", + "filterYearByYear": "", + "operator": "less", + "timeStep": "hourly", + }, + "binding constraint 2": { + "comments": "This is a binding constraint", + "enabled": False, + "filterSynthesis": "hourly", + "filterYearByYear": "hourly", + "operator": "both", + "timeStep": "daily", + }, + } + + res = client.get( + f"/v1/studies/{study_id}/table-mode/binding-constraints", + headers=user_headers, + params={"columns": ""}, + ) + assert res.status_code == 200, res.json() + expected = { + "binding constraint 1": { + "comments": "Hello World!", + "enabled": True, + "filterSynthesis": "", + "filterYearByYear": "", + "operator": "less", + "timeStep": "hourly", + }, + "binding constraint 2": { + "comments": "This is a binding constraint", + "enabled": False, + "filterSynthesis": "hourly", + "filterYearByYear": "hourly", + "operator": "both", + "timeStep": "daily", + }, + } + actual = res.json() + assert actual == expected + + +def test_table_type_aliases(client: TestClient, user_access_token: str) -> None: + """ + Ensure that we can use the old table type aliases to get the schema of the tables. + """ + user_headers = {"Authorization": f"Bearer {user_access_token}"} + # do not use `pytest.mark.parametrize`, because it is too slow + for table_type in ["area", "link", "cluster", "renewable", "binding constraint"]: + res = client.get(f"/v1/table-schema/{table_type}", headers=user_headers) + assert res.status_code == 200, f"Failed to get schema for {table_type}: {res.json()}" diff --git a/tests/integration/test_integration.py b/tests/integration/test_integration.py index 940b4c785a..8c03873992 100644 --- a/tests/integration/test_integration.py +++ b/tests/integration/test_integration.py @@ -6,28 +6,14 @@ from starlette.testclient import TestClient -from antarest.core.model import PublicMode from antarest.launcher.model import LauncherLoadDTO -from antarest.study.business.adequacy_patch_management import PriceTakingOrder from antarest.study.business.area_management import LayerInfoDTO -from antarest.study.business.areas.properties_management import AdequacyPatchMode -from antarest.study.business.areas.renewable_management import TimeSeriesInterpretation from antarest.study.business.general_management import Mode from antarest.study.business.optimization_management import ( SimplexOptimizationRange, TransmissionCapacities, UnfeasibleProblemBehavior, ) -from antarest.study.business.table_mode_management import ( - FIELDS_INFO_BY_TYPE, - AssetType, - BindingConstraintOperator, - TableTemplateType, - TransmissionCapacity, -) -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency -from antarest.study.storage.rawstudy.model.filesystem.config.renewable import RenewableClusterGroup -from antarest.study.storage.rawstudy.model.filesystem.config.thermal import LawOption, LocalTSGenerationBehavior from antarest.study.storage.variantstudy.model.command.common import CommandName from tests.integration.assets import ASSETS_DIR from tests.integration.utils import wait_for @@ -298,7 +284,7 @@ def test_main(client: TestClient, admin_access_token: str, study_id: str) -> Non res = client.get("/v1/launcher/load", headers=admin_headers) assert res.status_code == 200, res.json() - launcher_load = LauncherLoadDTO.parse_obj(res.json()) + launcher_load = LauncherLoadDTO(**res.json()) assert launcher_load.allocated_cpu_rate == 100 / (os.cpu_count() or 1) assert launcher_load.cluster_load_rate == 100 / (os.cpu_count() or 1) assert launcher_load.nb_queued_jobs == 0 @@ -521,8 +507,8 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "args": { "name": "binding constraint 1", "enabled": True, - "time_step": BindingConstraintFrequency.HOURLY.value, - "operator": BindingConstraintOperator.LESS.value, + "time_step": "hourly", + "operator": "less", "coeffs": {"area 1.cluster 1": [2.0, 4]}, }, } @@ -539,8 +525,8 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "args": { "name": "binding constraint 2", "enabled": True, - "time_step": BindingConstraintFrequency.HOURLY.value, - "operator": BindingConstraintOperator.LESS.value, + "time_step": "hourly", + "operator": "less", "coeffs": {}, }, } @@ -797,7 +783,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "ntcBetweenPhysicalAreasOutAdequacyPatch": True, "checkCsrCostFunction": False, "includeHurdleCostCsr": False, - "priceTakingOrder": PriceTakingOrder.DENS.value, + "priceTakingOrder": "DENS", "thresholdInitiateCurtailmentSharingRule": 0.0, "thresholdDisplayLocalMatchingRuleViolations": 0.0, "thresholdCsrVariableBoundsRelaxation": 3, @@ -808,7 +794,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: headers=admin_headers, json={ "ntcBetweenPhysicalAreasOutAdequacyPatch": False, - "priceTakingOrder": PriceTakingOrder.LOAD.value, + "priceTakingOrder": "Load", "thresholdDisplayLocalMatchingRuleViolations": 1.1, }, ) @@ -820,7 +806,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "ntcBetweenPhysicalAreasOutAdequacyPatch": False, "checkCsrCostFunction": False, "includeHurdleCostCsr": False, - "priceTakingOrder": PriceTakingOrder.LOAD.value, + "priceTakingOrder": "Load", "thresholdInitiateCurtailmentSharingRule": 0.0, "thresholdDisplayLocalMatchingRuleViolations": 1.1, "thresholdCsrVariableBoundsRelaxation": 3, @@ -1161,7 +1147,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "otherDispatchPower": True, "filterSynthesis": {"hourly", "daily", "weekly", "monthly", "annual"}, "filterByYear": {"hourly", "daily", "weekly", "monthly", "annual"}, - "adequacyPatchMode": AdequacyPatchMode.OUTSIDE.value, + "adequacyPatchMode": "outside", } client.put( @@ -1175,7 +1161,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "otherDispatchPower": False, "filterSynthesis": ["monthly", "annual"], "filterByYear": ["hourly", "daily", "annual"], - "adequacyPatchMode": AdequacyPatchMode.INSIDE.value, + "adequacyPatchMode": "inside", }, ) res_properties_config = client.get(f"/v1/studies/{study_id}/areas/area 1/properties/form", headers=admin_headers) @@ -1190,7 +1176,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "otherDispatchPower": False, "filterSynthesis": {"monthly", "annual"}, "filterByYear": {"hourly", "daily", "annual"}, - "adequacyPatchMode": AdequacyPatchMode.INSIDE.value, + "adequacyPatchMode": "inside", } # Hydro form @@ -1327,452 +1313,6 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "ntc": {"stochasticTsStatus": False, "intraModal": False}, } - # --- TableMode START --- - - table_mode_url = f"/v1/studies/{study_id}/tablemode" - - # Table Mode - Area - - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.AREA.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.AREA]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1": { - "nonDispatchablePower": False, - "dispatchableHydroPower": False, - "otherDispatchablePower": False, - "averageUnsuppliedEnergyCost": 2.0, - "spreadUnsuppliedEnergyCost": 0.0, - "averageSpilledEnergyCost": 4.0, - "spreadSpilledEnergyCost": 0.0, - "filterSynthesis": "monthly, annual", - "filterYearByYear": "hourly, daily, annual", - "adequacyPatchMode": AdequacyPatchMode.INSIDE.value, - }, - "area 2": { - "nonDispatchablePower": True, - "dispatchableHydroPower": True, - "otherDispatchablePower": True, - "averageUnsuppliedEnergyCost": 0.0, - "spreadUnsuppliedEnergyCost": 0.0, - "averageSpilledEnergyCost": 0.0, - "spreadSpilledEnergyCost": 0.0, - "filterSynthesis": "hourly, daily, weekly, monthly, annual", - "filterYearByYear": "hourly, daily, weekly, monthly, annual", - "adequacyPatchMode": AdequacyPatchMode.OUTSIDE.value, - }, - } - - client.put( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.AREA.value, - }, - json={ - "area 1": { - "nonDispatchablePower": True, - "spreadSpilledEnergyCost": 1.1, - "filterYearByYear": "monthly, annual", - "adequacyPatchMode": AdequacyPatchMode.OUTSIDE.value, - }, - "area 2": { - "nonDispatchablePower": False, - "spreadSpilledEnergyCost": 3.0, - "filterSynthesis": "hourly", - "adequacyPatchMode": AdequacyPatchMode.INSIDE.value, - }, - }, - ) - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.AREA.value, - "columns": ",".join(list(FIELDS_INFO_BY_TYPE[TableTemplateType.AREA])), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1": { - "nonDispatchablePower": True, - "dispatchableHydroPower": False, - "otherDispatchablePower": False, - "averageUnsuppliedEnergyCost": 2.0, - "spreadUnsuppliedEnergyCost": 0.0, - "averageSpilledEnergyCost": 4.0, - "spreadSpilledEnergyCost": 1.1, - "filterSynthesis": "monthly, annual", - "filterYearByYear": "monthly, annual", - "adequacyPatchMode": AdequacyPatchMode.OUTSIDE.value, - }, - "area 2": { - "nonDispatchablePower": False, - "dispatchableHydroPower": True, - "otherDispatchablePower": True, - "averageUnsuppliedEnergyCost": 0.0, - "spreadUnsuppliedEnergyCost": 0.0, - "averageSpilledEnergyCost": 0.0, - "spreadSpilledEnergyCost": 3.0, - "filterSynthesis": "hourly", - "filterYearByYear": "hourly, daily, weekly, monthly, annual", - "adequacyPatchMode": AdequacyPatchMode.INSIDE.value, - }, - } - - # Table Mode - Link - - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.LINK.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.LINK]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1 / area 2": { - "hurdlesCost": False, - "loopFlow": False, - "usePhaseShifter": False, - "transmissionCapacities": "enabled", - "assetType": "ac", - "linkStyle": "plain", - "linkWidth": True, - "displayComments": True, - "filterSynthesis": "hourly, daily, weekly, monthly, annual", - "filterYearByYear": "hourly, daily, weekly, monthly, annual", - } - } - - client.put( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.LINK.value, - }, - json={ - "area 1 / area 2": { - "hurdlesCost": True, - "transmissionCapacities": TransmissionCapacity.IGNORE.value, - "assetType": AssetType.GAZ.value, - "filterSynthesis": "daily,annual", - } - }, - ) - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.LINK.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.LINK]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1 / area 2": { - "hurdlesCost": True, - "loopFlow": False, - "usePhaseShifter": False, - "transmissionCapacities": "ignore", - "assetType": "gaz", - "linkStyle": "plain", - "linkWidth": True, - "displayComments": True, - "filterSynthesis": "daily,annual", - "filterYearByYear": "hourly, daily, weekly, monthly, annual", - } - } - - # Table Mode - Cluster - - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.CLUSTER.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.CLUSTER]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1 / cluster 1": { - "group": "", - "enabled": True, - "mustRun": False, - "unitCount": 0, - "nominalCapacity": 0, - "minStablePower": 0, - "spinning": 0, - "minUpTime": 1, - "minDownTime": 1, - "co2": 0, - "marginalCost": 0, - "fixedCost": 0, - "startupCost": 0, - "marketBidCost": 0, - "spreadCost": 0, - "tsGen": "use global", - "volatilityForced": 0, - "volatilityPlanned": 0, - "lawForced": "uniform", - "lawPlanned": "uniform", - }, - "area 2 / cluster 2": { - "group": "", - "enabled": True, - "mustRun": False, - "unitCount": 0, - "nominalCapacity": 0, - "minStablePower": 0, - "spinning": 0, - "minUpTime": 1, - "minDownTime": 1, - "co2": 0, - "marginalCost": 0, - "fixedCost": 0, - "startupCost": 0, - "marketBidCost": 0, - "spreadCost": 0, - "tsGen": "use global", - "volatilityForced": 0, - "volatilityPlanned": 0, - "lawForced": "uniform", - "lawPlanned": "uniform", - }, - } - - client.put( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.CLUSTER.value, - }, - json={ - "area 1 / cluster 1": { - "enabled": False, - "unitCount": 3, - "spinning": 8, - "tsGen": LocalTSGenerationBehavior.FORCE_GENERATION.value, - "lawPlanned": LawOption.GEOMETRIC.value, - }, - "area 2 / cluster 2": { - "nominalCapacity": 2, - }, - }, - ) - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.CLUSTER.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.CLUSTER]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1 / cluster 1": { - "group": "", - "enabled": False, - "mustRun": False, - "unitCount": 3, - "nominalCapacity": 0, - "minStablePower": 0, - "spinning": 8, - "minUpTime": 1, - "minDownTime": 1, - "co2": 0, - "marginalCost": 0, - "fixedCost": 0, - "startupCost": 0, - "marketBidCost": 0, - "spreadCost": 0, - "tsGen": "force generation", - "volatilityForced": 0, - "volatilityPlanned": 0, - "lawForced": "uniform", - "lawPlanned": "geometric", - }, - "area 2 / cluster 2": { - "group": "", - "enabled": True, - "mustRun": False, - "unitCount": 0, - "nominalCapacity": 2, - "minStablePower": 0, - "spinning": 0, - "minUpTime": 1, - "minDownTime": 1, - "co2": 0, - "marginalCost": 0, - "fixedCost": 0, - "startupCost": 0, - "marketBidCost": 0, - "spreadCost": 0, - "tsGen": "use global", - "volatilityForced": 0, - "volatilityPlanned": 0, - "lawForced": "uniform", - "lawPlanned": "uniform", - }, - } - - # Table Mode - Renewable - - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.RENEWABLE.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.RENEWABLE]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1 / cluster renewable 1": { - "group": "", - "tsInterpretation": TimeSeriesInterpretation.POWER_GENERATION.value, - "enabled": True, - "unitCount": 0, - "nominalCapacity": 0, - }, - "area 2 / cluster renewable 2": { - "group": "", - "tsInterpretation": TimeSeriesInterpretation.POWER_GENERATION.value, - "enabled": True, - "unitCount": 0, - "nominalCapacity": 0, - }, - } - - client.put( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.RENEWABLE.value, - }, - json={ - "area 1 / cluster renewable 1": { - "tsInterpretation": TimeSeriesInterpretation.PRODUCTION_FACTOR.value, - "enabled": False, - }, - "area 2 / cluster renewable 2": { - "unitCount": 2, - "nominalCapacity": 13, - }, - }, - ) - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.RENEWABLE.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.RENEWABLE]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "area 1 / cluster renewable 1": { - "group": "", - "tsInterpretation": TimeSeriesInterpretation.PRODUCTION_FACTOR.value, - "enabled": False, - "unitCount": 0, - "nominalCapacity": 0, - }, - "area 2 / cluster renewable 2": { - "group": "", - "tsInterpretation": TimeSeriesInterpretation.POWER_GENERATION.value, - "enabled": True, - "unitCount": 2, - "nominalCapacity": 13, - }, - } - - # Table Mode - Binding Constraint - - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.BINDING_CONSTRAINT.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.BINDING_CONSTRAINT]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "binding constraint 1": { - "enabled": True, - "type": BindingConstraintFrequency.HOURLY.value, - "operator": BindingConstraintOperator.LESS.value, - "group": "default", - }, - "binding constraint 2": { - "enabled": True, - "type": BindingConstraintFrequency.HOURLY.value, - "operator": BindingConstraintOperator.LESS.value, - "group": "default", - }, - } - - client.put( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.BINDING_CONSTRAINT.value, - }, - json={ - "binding constraint 1": { - "enabled": False, - "operator": BindingConstraintOperator.BOTH.value, - }, - "binding constraint 2": { - "type": BindingConstraintFrequency.WEEKLY.value, - "operator": BindingConstraintOperator.EQUAL.value, - }, - }, - ) - res_table_data = client.get( - table_mode_url, - headers=admin_headers, - params={ - "table_type": TableTemplateType.BINDING_CONSTRAINT.value, - "columns": ",".join(FIELDS_INFO_BY_TYPE[TableTemplateType.BINDING_CONSTRAINT]), - }, - ) - res_table_data_json = res_table_data.json() - assert res_table_data_json == { - "binding constraint 1": { - "enabled": False, - "type": BindingConstraintFrequency.HOURLY.value, - "operator": BindingConstraintOperator.BOTH.value, - "group": "default", - }, - "binding constraint 2": { - "enabled": True, - "type": BindingConstraintFrequency.WEEKLY.value, - "operator": BindingConstraintOperator.EQUAL.value, - "group": "default", - }, - } - - res = client.get(f"/v1/studies/{study_id}/bindingconstraints/binding constraint 1", headers=admin_headers) - binding_constraint_1 = res.json() - assert res.status_code == 200, res.json() - - term = binding_constraint_1["terms"][0] - assert term["id"] == "area 1.cluster 1" - assert term["weight"] == 2.0 - assert term["offset"] == 4 - - # --- TableMode END --- - # Renewable form res = client.put( @@ -1780,7 +1320,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: headers=admin_headers, json={ "name": "cluster renewable 1 renamed", - "tsInterpretation": TimeSeriesInterpretation.PRODUCTION_FACTOR, + "tsInterpretation": "production-factor", "unitCount": 9, "enabled": False, "nominalCapacity": 3, @@ -1794,11 +1334,11 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: ) expected = { "enabled": False, - "group": RenewableClusterGroup.OTHER1, # Default group used when not specified. + "group": "Other RES 1", # Default group used when not specified. "id": "cluster renewable 1", "name": "cluster renewable 1 renamed", "nominalCapacity": 3.0, - "tsInterpretation": TimeSeriesInterpretation.PRODUCTION_FACTOR, + "tsInterpretation": "production-factor", "unitCount": 9, } assert res.status_code == 200, res.json() @@ -1886,11 +1426,11 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "color_r": 255, "color_g": 0, "color_b": 100, - "layers": 0, + "layers": "0", }, "layerX": {"0": 100}, "layerY": {"0": 100}, - "layerColor": {"0": "255 , 0 , 100"}, + "layerColor": {"0": "255, 0, 100"}, }, "area 2": { "ui": { @@ -1903,7 +1443,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: }, "layerX": {"0": 0, "1": 105}, "layerY": {"0": 0, "1": 105}, - "layerColor": {"0": "230 , 108 , 44", "1": "255 , 10 , 100"}, + "layerColor": {"0": "230, 108, 44", "1": "255, 10, 100"}, }, } @@ -1928,7 +1468,7 @@ def test_area_management(client: TestClient, admin_access_token: str) -> None: "min-stable-power": None, "min-up-time": None, "name": "cluster 2", - "nominalcapacity": 2, + "nominalcapacity": 0, "spinning": None, "spread-cost": None, "type": None, @@ -2054,7 +1594,7 @@ def test_import(client: TestClient, admin_access_token: str, study_id: str) -> N ).json() res = client.get(f"v1/studies/{uuid}", headers=admin_headers).json() assert res["groups"] == [{"id": "admin", "name": "admin"}] - assert res["public_mode"] == PublicMode.NONE + assert res["public_mode"] == "NONE" # Create user George who belongs to no group client.post( @@ -2074,7 +1614,7 @@ def test_import(client: TestClient, admin_access_token: str, study_id: str) -> N ).json() res = client.get(f"v1/studies/{uuid}", headers=georges_headers).json() assert res["groups"] == [] - assert res["public_mode"] == PublicMode.READ + assert res["public_mode"] == "READ" # Study importer works for 7z files res = client.post( @@ -2142,7 +1682,7 @@ def test_copy(client: TestClient, admin_access_token: str, study_id: str) -> Non # asserts that it has admin groups and PublicMode to NONE res = client.get(f"/v1/studies/{copied.json()}", headers=admin_headers).json() assert res["groups"] == [{"id": "admin", "name": "admin"}] - assert res["public_mode"] == PublicMode.NONE + assert res["public_mode"] == "NONE" # Connect with user George who belongs to no group res = client.post("/v1/login", json={"username": "George", "password": "mypass"}) @@ -2157,4 +1697,4 @@ def test_copy(client: TestClient, admin_access_token: str, study_id: str) -> Non # asserts that it has no groups and PublicMode to READ res = client.get(f"/v1/studies/{copied.json()}", headers=admin_headers).json() assert res["groups"] == [] - assert res["public_mode"] == PublicMode.READ + assert res["public_mode"] == "READ" diff --git a/tests/storage/business/test_arealink_manager.py b/tests/storage/business/test_arealink_manager.py index 4caee7b7bd..a8beff5fc0 100644 --- a/tests/storage/business/test_arealink_manager.py +++ b/tests/storage/business/test_arealink_manager.py @@ -11,7 +11,7 @@ from antarest.core.utils.fastapi_sqlalchemy import db from antarest.matrixstore.repository import MatrixContentRepository from antarest.matrixstore.service import SimpleMatrixService -from antarest.study.business.area_management import AreaCreationDTO, AreaManager, AreaType, AreaUI +from antarest.study.business.area_management import AreaCreationDTO, AreaManager, AreaType, UpdateAreaUi from antarest.study.business.link_management import LinkInfoDTO, LinkManager from antarest.study.model import Patch, PatchArea, PatchCluster, RawStudy, StudyAdditionalData from antarest.study.repository import StudyMetadataRepository @@ -111,7 +111,7 @@ def test_area_crud(empty_study: FileStudy, matrix_service: SimpleMatrixService): assert len(empty_study.config.areas.keys()) == 1 assert json.loads((empty_study.config.study_path / "patch.json").read_text())["areas"]["test"]["country"] is None - area_manager.update_area_ui(study, "test", AreaUI(x=100, y=200, color_rgb=(255, 0, 100))) + area_manager.update_area_ui(study, "test", UpdateAreaUi(x=100, y=200, color_rgb=(255, 0, 100))) assert empty_study.tree.get(["input", "areas", "test", "ui", "ui"]) == { "x": 100, "y": 200, @@ -157,7 +157,7 @@ def test_area_crud(empty_study: FileStudy, matrix_service: SimpleMatrixService): assert (empty_study.config.study_path / "patch.json").exists() assert json.loads((empty_study.config.study_path / "patch.json").read_text())["areas"]["test"]["country"] == "FR" - area_manager.update_area_ui(study, "test", AreaUI(x=100, y=200, color_rgb=(255, 0, 100))) + area_manager.update_area_ui(study, "test", UpdateAreaUi(x=100, y=200, color_rgb=(255, 0, 100))) variant_study_service.append_commands.assert_called_with( variant_id, [ @@ -194,7 +194,7 @@ def test_area_crud(empty_study: FileStudy, matrix_service: SimpleMatrixService): }, { "target": "input/areas/test/ui/layerColor/0", - "data": "255 , 0 , 100", + "data": "255,0,100", }, ], ), diff --git a/tests/storage/repository/filesystem/config/test_config_files.py b/tests/storage/repository/filesystem/config/test_config_files.py index d88c363d5f..f07ac8f3db 100644 --- a/tests/storage/repository/filesystem/config/test_config_files.py +++ b/tests/storage/repository/filesystem/config/test_config_files.py @@ -6,10 +6,7 @@ import pytest -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( - BindingConstraintDTO, - BindingConstraintFrequency, -) +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency from antarest.study.storage.rawstudy.model.filesystem.config.files import ( _parse_links_filtering, _parse_renewables, @@ -21,6 +18,7 @@ ) from antarest.study.storage.rawstudy.model.filesystem.config.model import ( Area, + BindingConstraintDTO, DistrictSet, FileStudyTreeConfig, Link, diff --git a/tests/study/business/areas/test_st_storage_management.py b/tests/study/business/areas/test_st_storage_management.py index 4c4db4e784..74089bc7cd 100644 --- a/tests/study/business/areas/test_st_storage_management.py +++ b/tests/study/business/areas/test_st_storage_management.py @@ -59,6 +59,11 @@ LIST_CFG = IniReader().read(io.StringIO(LIST_INI)) +ALL_STORAGES = { + "west": {"list": LIST_CFG}, + "east": {"list": {}}, +} + class TestSTStorageManager: @pytest.fixture(name="study_storage_service") @@ -99,6 +104,113 @@ def study_uuid_fixture(self, db_session: Session) -> str: db_session.commit() return t.cast(str, raw_study.id) + def test_get_all_storages__nominal_case( + self, + db_session: Session, + study_storage_service: StudyStorageService, + study_uuid: str, + ) -> None: + """ + This unit test is to verify the behavior of the `get_all_storages` + method in the `STStorageManager` class under nominal conditions. + It checks whether the method returns the expected storage lists + for each area, based on a specific configuration. + """ + # The study must be fetched from the database + study: RawStudy = db_session.query(Study).get(study_uuid) + + # Prepare the mocks + storage = study_storage_service.get_storage(study) + file_study = storage.get_raw(study) + file_study.tree = Mock( + spec=FileStudyTree, + get=Mock(return_value=ALL_STORAGES), + ) + + # Given the following arguments + manager = STStorageManager(study_storage_service) + + # run + all_storages = manager.get_all_storages_props(study) + + # Check + actual = { + area_id: [form.dict(by_alias=True) for form in clusters_by_ids.values()] + for area_id, clusters_by_ids in all_storages.items() + } + expected = { + "west": [ + { + "id": "storage1", + "enabled": None, + "group": STStorageGroup.BATTERY, + "name": "Storage1", + "injectionNominalCapacity": 1500.0, + "withdrawalNominalCapacity": 1500.0, + "reservoirCapacity": 20000.0, + "efficiency": 0.94, + "initialLevel": 0.5, + "initialLevelOptim": True, + }, + { + "id": "storage2", + "enabled": None, + "group": STStorageGroup.PSP_CLOSED, + "name": "Storage2", + "injectionNominalCapacity": 2000.0, + "withdrawalNominalCapacity": 1500.0, + "reservoirCapacity": 20000.0, + "efficiency": 0.78, + "initialLevel": 0.5, + "initialLevelOptim": False, + }, + { + "id": "storage3", + "enabled": None, + "group": STStorageGroup.PSP_CLOSED, + "name": "Storage3", + "injectionNominalCapacity": 1500.0, + "withdrawalNominalCapacity": 1500.0, + "reservoirCapacity": 21000.0, + "efficiency": 0.72, + "initialLevel": 1.0, + "initialLevelOptim": False, + }, + ], + } + assert actual == expected + + def test_get_all_storages__config_not_found( + self, + db_session: Session, + study_storage_service: StudyStorageService, + study_uuid: str, + ) -> None: + """ + This test verifies that when the `get_all_storages` method is called + with a study and the corresponding configuration is not found + (indicated by the `KeyError` raised by the mock), it correctly + raises the `STStorageConfigNotFound` exception with the expected error + message containing the study ID. + """ + # The study must be fetched from the database + study: RawStudy = db_session.query(Study).get(study_uuid) + + # Prepare the mocks + storage = study_storage_service.get_storage(study) + file_study = storage.get_raw(study) + file_study.tree = Mock( + spec=FileStudyTree, + get=Mock(side_effect=KeyError("Oops!")), + ) + + # Given the following arguments + manager = STStorageManager(study_storage_service) + + # run + with pytest.raises(STStorageConfigNotFound, match="not found"): + manager.get_all_storages_props(study) + def test_get_st_storages__nominal_case( self, db_session: Session, @@ -141,7 +253,7 @@ def test_get_st_storages__nominal_case( "name": "Storage1", "reservoirCapacity": 20000.0, "withdrawalNominalCapacity": 1500.0, - "enabled": True, # present with default value even if the study is in v8.6 + "enabled": None, }, { "efficiency": 0.78, @@ -153,7 +265,7 @@ def test_get_st_storages__nominal_case( "name": "Storage2", "reservoirCapacity": 20000.0, "withdrawalNominalCapacity": 1500.0, - "enabled": True, + "enabled": None, }, { "efficiency": 0.72, @@ -165,7 +277,7 @@ def test_get_st_storages__nominal_case( "name": "Storage3", "reservoirCapacity": 21000.0, "withdrawalNominalCapacity": 1500.0, - "enabled": True, + "enabled": None, }, ] assert actual == expected @@ -252,7 +364,7 @@ def test_get_st_storage__nominal_case( "name": "Storage1", "reservoirCapacity": 20000.0, "withdrawalNominalCapacity": 1500.0, - "enabled": True, + "enabled": None, } assert actual == expected diff --git a/tests/variantstudy/model/command/test_create_area.py b/tests/variantstudy/model/command/test_create_area.py index 62e01aeba4..330067db56 100644 --- a/tests/variantstudy/model/command/test_create_area.py +++ b/tests/variantstudy/model/command/test_create_area.py @@ -3,7 +3,7 @@ import pytest -from antarest.study.storage.rawstudy.model.filesystem.config.model import ENR_MODELLING, transform_name_to_id +from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.command_reverter import CommandReverter from antarest.study.storage.variantstudy.model.command.create_area import CreateArea @@ -14,7 +14,7 @@ class TestCreateArea: @pytest.mark.parametrize("version", [600, 650, 810, 830, 860]) - @pytest.mark.parametrize("enr_modelling", list(ENR_MODELLING)) + @pytest.mark.parametrize("enr_modelling", list(EnrModelling)) def test_apply( self, empty_study: FileStudy, @@ -132,7 +132,7 @@ def test_apply( assert (study_path / "input" / "thermal" / "clusters" / area_id / "list.ini").exists() # Renewable Clusters - if version >= 810 and empty_study.config.enr_modelling == ENR_MODELLING.CLUSTERS.value: + if version >= 810 and empty_study.config.enr_modelling == EnrModelling.CLUSTERS.value: assert (study_path / "input" / "renewables" / "clusters" / area_id).is_dir() assert (study_path / "input" / "renewables" / "clusters" / area_id / "list.ini").exists() diff --git a/tests/variantstudy/model/command/test_create_renewables_cluster.py b/tests/variantstudy/model/command/test_create_renewables_cluster.py index ecec2fd882..51e553bcb8 100644 --- a/tests/variantstudy/model/command/test_create_renewables_cluster.py +++ b/tests/variantstudy/model/command/test_create_renewables_cluster.py @@ -1,10 +1,11 @@ import configparser import re +from unittest import mock import pytest from pydantic import ValidationError -from antarest.study.storage.rawstudy.model.filesystem.config.model import ENR_MODELLING, transform_name_to_id +from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.command_reverter import CommandReverter from antarest.study.storage.variantstudy.model.command.common import CommandName @@ -16,7 +17,8 @@ class TestCreateRenewablesCluster: - def test_init(self, command_context: CommandContext): + # noinspection SpellCheckingInspection + def test_init(self, command_context: CommandContext) -> None: cl = CreateRenewablesCluster( area_id="foo", cluster_name="Cluster1", @@ -34,12 +36,13 @@ def test_init(self, command_context: CommandContext): assert cl.cluster_name == "Cluster1" assert cl.parameters == {"group": "Solar Thermal", "nominalcapacity": "2400", "unitcount": "2"} - def test_validate_cluster_name(self, command_context: CommandContext): + def test_validate_cluster_name(self, command_context: CommandContext) -> None: with pytest.raises(ValidationError, match="cluster_name"): CreateRenewablesCluster(area_id="fr", cluster_name="%", command_context=command_context, parameters={}) - def test_apply(self, empty_study: FileStudy, command_context: CommandContext): - empty_study.config.enr_modelling = ENR_MODELLING.CLUSTERS.value + def test_apply(self, empty_study: FileStudy, command_context: CommandContext) -> None: + empty_study.config.enr_modelling = EnrModelling.CLUSTERS.value + empty_study.config.version = 810 study_path = empty_study.config.study_path area_name = "DE" area_id = transform_name_to_id(area_name, lower=True) @@ -107,7 +110,8 @@ def test_apply(self, empty_study: FileStudy, command_context: CommandContext): flags=re.IGNORECASE, ) - def test_to_dto(self, command_context: CommandContext): + # noinspection SpellCheckingInspection + def test_to_dto(self, command_context: CommandContext) -> None: command = CreateRenewablesCluster( area_id="foo", cluster_name="Cluster1", @@ -127,7 +131,7 @@ def test_to_dto(self, command_context: CommandContext): } -def test_match(command_context: CommandContext): +def test_match(command_context: CommandContext) -> None: base = CreateRenewablesCluster( area_id="foo", cluster_name="foo", @@ -159,23 +163,25 @@ def test_match(command_context: CommandContext): assert base.get_inner_matrices() == [] -def test_revert(command_context: CommandContext): +def test_revert(command_context: CommandContext) -> None: base = CreateRenewablesCluster( - area_id="foo", - cluster_name="foo", + area_id="area_foo", + cluster_name="cl1", parameters={}, command_context=command_context, ) - assert CommandReverter().revert(base, [], None) == [ + file_study = mock.MagicMock(spec=FileStudy) + revert_cmd = CommandReverter().revert(base, [], file_study) + assert revert_cmd == [ RemoveRenewablesCluster( - area_id="foo", - cluster_id="foo", + area_id="area_foo", + cluster_id="cl1", command_context=command_context, ) ] -def test_create_diff(command_context: CommandContext): +def test_create_diff(command_context: CommandContext) -> None: base = CreateRenewablesCluster( area_id="foo", cluster_name="foo", diff --git a/tests/variantstudy/model/command/test_manage_binding_constraints.py b/tests/variantstudy/model/command/test_manage_binding_constraints.py index 2ca4015808..d79f744960 100644 --- a/tests/variantstudy/model/command/test_manage_binding_constraints.py +++ b/tests/variantstudy/model/command/test_manage_binding_constraints.py @@ -4,7 +4,10 @@ import pytest from antarest.study.storage.rawstudy.ini_reader import IniReader -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.business.command_extractor import CommandExtractor from antarest.study.storage.variantstudy.business.command_reverter import CommandReverter @@ -15,7 +18,6 @@ default_bc_hourly, default_bc_weekly_daily, ) -from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.create_binding_constraint import CreateBindingConstraint from antarest.study.storage.variantstudy.model.command.create_cluster import CreateCluster @@ -363,6 +365,8 @@ def test_revert(command_context: CommandContext): enabled=True, time_step=BindingConstraintFrequency.HOURLY, operator=BindingConstraintOperator.EQUAL, + filter_year_by_year="", + filter_synthesis="", coeffs={"a": [0.3]}, values=hourly_matrix_id, command_context=command_context, diff --git a/tests/variantstudy/model/command/test_remove_area.py b/tests/variantstudy/model/command/test_remove_area.py index 118d45e0d8..90c19d34b9 100644 --- a/tests/variantstudy/model/command/test_remove_area.py +++ b/tests/variantstudy/model/command/test_remove_area.py @@ -1,9 +1,11 @@ import pytest -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.create_binding_constraint import CreateBindingConstraint from antarest.study.storage.variantstudy.model.command.create_cluster import CreateCluster diff --git a/tests/variantstudy/model/command/test_remove_cluster.py b/tests/variantstudy/model/command/test_remove_cluster.py index faae51f5c7..f0dd04f2b1 100644 --- a/tests/variantstudy/model/command/test_remove_cluster.py +++ b/tests/variantstudy/model/command/test_remove_cluster.py @@ -2,10 +2,12 @@ import pytest from checksumdir import dirhash -from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import BindingConstraintFrequency +from antarest.study.storage.rawstudy.model.filesystem.config.binding_constraint import ( + BindingConstraintFrequency, + BindingConstraintOperator, +) from antarest.study.storage.rawstudy.model.filesystem.config.model import transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy -from antarest.study.storage.variantstudy.model.command.common import BindingConstraintOperator from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.create_binding_constraint import CreateBindingConstraint from antarest.study.storage.variantstudy.model.command.create_cluster import CreateCluster diff --git a/tests/variantstudy/model/command/test_remove_renewables_cluster.py b/tests/variantstudy/model/command/test_remove_renewables_cluster.py index 26eaa52837..42573e8b74 100644 --- a/tests/variantstudy/model/command/test_remove_renewables_cluster.py +++ b/tests/variantstudy/model/command/test_remove_renewables_cluster.py @@ -1,6 +1,6 @@ from checksumdir import dirhash -from antarest.study.storage.rawstudy.model.filesystem.config.model import ENR_MODELLING, transform_name_to_id +from antarest.study.storage.rawstudy.model.filesystem.config.model import EnrModelling, transform_name_to_id from antarest.study.storage.rawstudy.model.filesystem.factory import FileStudy from antarest.study.storage.variantstudy.model.command.create_area import CreateArea from antarest.study.storage.variantstudy.model.command.create_renewables_cluster import CreateRenewablesCluster @@ -11,7 +11,7 @@ class TestRemoveRenewablesCluster: def test_apply(self, empty_study: FileStudy, command_context: CommandContext): - empty_study.config.enr_modelling = ENR_MODELLING.CLUSTERS.value + empty_study.config.enr_modelling = EnrModelling.CLUSTERS.value empty_study.config.version = 810 area_name = "Area_name" area_id = transform_name_to_id(area_name) diff --git a/webapp/public/locales/en/main.json b/webapp/public/locales/en/main.json index f3d8e241a9..788dec17c1 100644 --- a/webapp/public/locales/en/main.json +++ b/webapp/public/locales/en/main.json @@ -389,8 +389,9 @@ "study.configuration.advancedParameters.simulationCores": "Simulation cores", "study.configuration.advancedParameters.renewableGenerationModeling": "Renewable generation modeling", "study.configuration.economicOpt": "Economic Opt.", - "study.configuration.geographicTrimmingAreas": "Geographic Trimming (areas)", - "study.configuration.geographicTrimmingLinks": "Geographic Trimming (links)", + "study.configuration.geographicTrimmingAreas": "Geographic Trimming (Areas)", + "study.configuration.geographicTrimmingLinks": "Geographic Trimming (Links)", + "study.configuration.geographicTrimmingBindingConstraints": "Geographic Trimming (Binding Constraints)", "study.modelization.properties": "Properties", "study.modelization.properties.energyCost": "Energy cost (€/Wh)", "study.modelization.properties.unsupplied": "Unsupplied", diff --git a/webapp/public/locales/fr/main.json b/webapp/public/locales/fr/main.json index bec0e911cc..8fc1b6c90c 100644 --- a/webapp/public/locales/fr/main.json +++ b/webapp/public/locales/fr/main.json @@ -391,6 +391,7 @@ "study.configuration.economicOpt": "Options économiques", "study.configuration.geographicTrimmingAreas": "Filtre géographique (zones)", "study.configuration.geographicTrimmingLinks": "Filtre géographique (liens)", + "study.configuration.geographicTrimmingBindingConstraints": "Filtre géographique (contraintes couplantes)", "study.modelization.properties": "Propriétés", "study.modelization.properties.energyCost": "Coût de l'énergie", "study.modelization.properties.unsupplied": "Non distribuée", diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/AdequacyPatch/index.tsx b/webapp/src/components/App/Singlestudy/explore/Configuration/AdequacyPatch/index.tsx index 9e301154bd..9793e24395 100644 --- a/webapp/src/components/App/Singlestudy/explore/Configuration/AdequacyPatch/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Configuration/AdequacyPatch/index.tsx @@ -53,7 +53,7 @@ function AdequacyPatch() { content: ( ), diff --git a/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx b/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx index eb9550e2bb..fd649e9e7d 100644 --- a/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx +++ b/webapp/src/components/App/Singlestudy/explore/Configuration/index.tsx @@ -31,6 +31,10 @@ function Configuration() { { id: 5, name: t("study.configuration.economicOpt") }, { id: 6, name: t("study.configuration.geographicTrimmingAreas") }, { id: 7, name: t("study.configuration.geographicTrimmingLinks") }, + { + id: 8, + name: t("study.configuration.geographicTrimmingBindingConstraints"), + }, ].filter(Boolean), [study.version, t], ); @@ -63,7 +67,7 @@ function Configuration() { () => ( ( ), @@ -91,7 +95,17 @@ function Configuration() { () => ( + ), + ], + [ + R.equals(8), + () => ( + ), diff --git a/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/CreateTemplateTableDialog.tsx b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/CreateTemplateTableDialog.tsx index 24f7accc77..9e5c223f6f 100644 --- a/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/CreateTemplateTableDialog.tsx +++ b/webapp/src/components/App/Singlestudy/explore/TableModeList/dialogs/CreateTemplateTableDialog.tsx @@ -43,7 +43,7 @@ function CreateTemplateTableDialog(props: Props) { config={{ defaultValues: { name: "", - type: "area", + type: "areas", columns: [], }, }} diff --git a/webapp/src/components/common/TableMode.tsx b/webapp/src/components/common/TableMode.tsx index e92ff18011..e5770d79d2 100644 --- a/webapp/src/components/common/TableMode.tsx +++ b/webapp/src/components/common/TableMode.tsx @@ -1,3 +1,4 @@ +import { useEffect, useState } from "react"; import { StudyMetadata } from "../../common/types"; import usePromise from "../../hooks/usePromise"; import { @@ -12,6 +13,8 @@ import { import { SubmitHandlerPlus } from "./Form/types"; import TableForm from "./TableForm"; import UsePromiseCond from "./utils/UsePromiseCond"; +import GridOffIcon from "@mui/icons-material/GridOff"; +import SimpleContent from "./page/SimpleContent"; export interface TableModeProps { studyId: StudyMetadata["id"]; @@ -21,10 +24,31 @@ export interface TableModeProps { function TableMode(props: TableModeProps) { const { studyId, type, columns } = props; + const [filteredColumns, setFilteredColumns] = useState(columns); const res = usePromise( - () => getTableMode({ studyId, type, columns }), - [studyId, type, JSON.stringify(columns)], + () => getTableMode({ studyId, tableType: type, columns }), + [studyId, type, columns.join(",")], + ); + + // Filter columns based on the data received, because the API may return + // fewer columns than requested depending on the study version + useEffect( + () => { + const dataKeys = Object.keys(res.data || {}); + + if (dataKeys.length === 0) { + setFilteredColumns([]); + return; + } + + const data = res.data!; + const dataRowKeys = Object.keys(data[dataKeys[0]]); + + setFilteredColumns(columns.filter((col) => dataRowKeys.includes(col))); + }, + // eslint-disable-next-line react-hooks/exhaustive-deps + [res.data, columns.join(",")], ); //////////////////////////////////////////////////////////////// @@ -32,7 +56,7 @@ function TableMode(props: TableModeProps) { //////////////////////////////////////////////////////////////// const handleSubmit = (data: SubmitHandlerPlus) => { - return setTableMode({ studyId, type, data: data.dirtyValues }); + return setTableMode({ studyId, tableType: type, data: data.dirtyValues }); }; //////////////////////////////////////////////////////////////// @@ -42,14 +66,18 @@ function TableMode(props: TableModeProps) { return ( ( - - )} + ifResolved={(data) => + filteredColumns.length > 0 ? ( + + ) : ( + } title="study.results.noData" /> + ) + } /> ); } diff --git a/webapp/src/services/api/studies/tableMode/constants.ts b/webapp/src/services/api/studies/tableMode/constants.ts index 70526c7484..e75e163cc3 100644 --- a/webapp/src/services/api/studies/tableMode/constants.ts +++ b/webapp/src/services/api/studies/tableMode/constants.ts @@ -1,20 +1,20 @@ -const AREA = "area"; -const LINK = "link"; -const CLUSTER = "cluster"; -const RENEWABLE = "renewable"; -const BINDING_CONSTRAINT = "binding constraint"; +const AREAS = "areas"; +const LINKS = "links"; +const THERMALS = "thermals"; +const RENEWABLES = "renewables"; +const ST_STORAGES = "st-storages"; +const BINDING_CONSTRAINTS = "binding-constraints"; export const TABLE_MODE_TYPES = [ - AREA, - LINK, - CLUSTER, - RENEWABLE, - BINDING_CONSTRAINT, + AREAS, + LINKS, + THERMALS, + RENEWABLES, + BINDING_CONSTRAINTS, ] as const; export const TABLE_MODE_COLUMNS_BY_TYPE = { - [AREA]: [ - // Optimization - Nodal optimization + [AREAS]: [ "nonDispatchablePower", "dispatchableHydroPower", "otherDispatchablePower", @@ -22,13 +22,12 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { "spreadUnsuppliedEnergyCost", "averageSpilledEnergyCost", "spreadSpilledEnergyCost", - // Optimization - Filtering "filterSynthesis", "filterYearByYear", - // Adequacy patch + // Since v8.3 "adequacyPatchMode", ], - [LINK]: [ + [LINKS]: [ "hurdlesCost", "loopFlow", "usePhaseShifter", @@ -36,38 +35,79 @@ export const TABLE_MODE_COLUMNS_BY_TYPE = { "assetType", "linkStyle", "linkWidth", + "comments", "displayComments", "filterSynthesis", "filterYearByYear", ], - [CLUSTER]: [ + [THERMALS]: [ "group", "enabled", - "mustRun", "unitCount", "nominalCapacity", + "genTs", "minStablePower", - "spinning", "minUpTime", "minDownTime", - "co2", - "marginalCost", - "fixedCost", - "startupCost", - "marketBidCost", - "spreadCost", - "tsGen", + "mustRun", + "spinning", "volatilityForced", "volatilityPlanned", "lawForced", "lawPlanned", + "marginalCost", + "spreadCost", + "fixedCost", + "startupCost", + "marketBidCost", + "co2", + // Since v8.6 + "nh3", + "so2", + "nox", + "pm25", + "pm5", + "pm10", + "nmvoc", + "op1", + "op2", + "op3", + "op4", + "op5", + // Since v8.7 + "costGeneration", + "efficiency", + "variableOMCost", ], - [RENEWABLE]: [ + [RENEWABLES]: [ + // Since v8.1 "group", - "tsInterpretation", "enabled", + "tsInterpretation", "unitCount", "nominalCapacity", ], - [BINDING_CONSTRAINT]: ["type", "operator", "enabled"], + [ST_STORAGES]: [ + // Since v8.6 + "group", + "injectionNominalCapacity", + "withdrawalNominalCapacity", + "reservoirCapacity", + "efficiency", + "initialLevel", + "initialLevelOptim", + // Since v8.8 + "enabled", + ], + [BINDING_CONSTRAINTS]: [ + "enabled", + "timeStep", + "operator", + "comments", + // Since v8.3 + "filterSynthesis", + "filterYearByYear", + // Since v8.7 + "group", + ], } as const; diff --git a/webapp/src/services/api/studies/tableMode/index.ts b/webapp/src/services/api/studies/tableMode/index.ts index 68cd8cc895..03915b259e 100644 --- a/webapp/src/services/api/studies/tableMode/index.ts +++ b/webapp/src/services/api/studies/tableMode/index.ts @@ -6,31 +6,24 @@ import type { TableData, TableModeType, } from "./types"; -import { toColumnApiName } from "./utils"; -const TABLE_MODE_API_URL = `v1/studies/{studyId}/tablemode`; +const TABLE_MODE_API_URL = `v1/studies/{studyId}/table-mode/{tableType}`; export async function getTableMode( params: GetTableModeParams, ) { - const { studyId, type, columns } = params; - const url = format(TABLE_MODE_API_URL, { studyId }); + const { studyId, tableType, columns } = params; + const url = format(TABLE_MODE_API_URL, { studyId, tableType }); const res = await client.get(url, { - params: { - table_type: type, - columns: columns.map(toColumnApiName).join(","), - }, + params: columns.length > 0 ? { columns: columns.join(",") } : {}, }); return res.data; } export async function setTableMode(params: SetTableModeParams) { - const { studyId, type, data } = params; - const url = format(TABLE_MODE_API_URL, { studyId }); - - await client.put(url, data, { - params: { table_type: type }, - }); + const { studyId, tableType, data } = params; + const url = format(TABLE_MODE_API_URL, { studyId, tableType }); + await client.put(url, data); } diff --git a/webapp/src/services/api/studies/tableMode/types.ts b/webapp/src/services/api/studies/tableMode/types.ts index def8344b9e..e20a167e27 100644 --- a/webapp/src/services/api/studies/tableMode/types.ts +++ b/webapp/src/services/api/studies/tableMode/types.ts @@ -15,12 +15,12 @@ export type TableData = Record< export interface GetTableModeParams { studyId: StudyMetadata["id"]; - type: T; + tableType: T; columns: TableModeColumnsForType; } export interface SetTableModeParams { studyId: StudyMetadata["id"]; - type: TableModeType; + tableType: TableModeType; data: DeepPartial; } diff --git a/webapp/src/services/api/studies/tableMode/utils.ts b/webapp/src/services/api/studies/tableMode/utils.ts deleted file mode 100644 index 35ccd7c8a3..0000000000 --- a/webapp/src/services/api/studies/tableMode/utils.ts +++ /dev/null @@ -1,11 +0,0 @@ -import { snakeCase } from "lodash"; -import { TableModeColumnsForType, TableModeType } from "./types"; - -export function toColumnApiName( - column: TableModeColumnsForType[number], -) { - if (column === "co2") { - return "co2"; - } - return snakeCase(column); -} diff --git a/webapp/src/services/api/studydata.ts b/webapp/src/services/api/studydata.ts index 6558a3d15f..a39b55f20b 100644 --- a/webapp/src/services/api/studydata.ts +++ b/webapp/src/services/api/studydata.ts @@ -1,5 +1,4 @@ import { - AllClustersAndLinks, LinkCreationInfoDTO, LinkInfoWithUI, UpdateAreaUi, @@ -16,7 +15,7 @@ export const createArea = async ( uuid: string, name: string, ): Promise => { - const res = await client.post(`/v1/studies/${uuid}/areas?uuid=${uuid}`, { + const res = await client.post(`/v1/studies/${uuid}/areas`, { name, type: "AREA", }); @@ -27,10 +26,7 @@ export const createLink = async ( uuid: string, linkCreationInfo: LinkCreationInfoDTO, ): Promise => { - const res = await client.post( - `/v1/studies/${uuid}/links?uuid=${uuid}`, - linkCreationInfo, - ); + const res = await client.post(`/v1/studies/${uuid}/links`, linkCreationInfo); return res.data; }; @@ -41,7 +37,7 @@ export const updateAreaUI = async ( areaUi: UpdateAreaUi, ): Promise => { const res = await client.put( - `/v1/studies/${uuid}/areas/${areaId}/ui?uuid=${uuid}&area_id=${areaId}&layer=${layerId}`, + `/v1/studies/${uuid}/areas/${areaId}/ui?layer=${layerId}`, areaUi, ); return res.data; @@ -51,9 +47,7 @@ export const deleteArea = async ( uuid: string, areaId: string, ): Promise => { - const res = await client.delete( - `/v1/studies/${uuid}/areas/${areaId}?uuid=${uuid}&area_id=${areaId}`, - ); + const res = await client.delete(`/v1/studies/${uuid}/areas/${areaId}`); return res.data; }; @@ -63,7 +57,7 @@ export const deleteLink = async ( areaIdTo: string, ): Promise => { const res = await client.delete( - `/v1/studies/${uuid}/links/${areaIdFrom}/${areaIdTo}?uuid=${uuid}&area_from=${areaIdFrom}&area_to=${areaIdTo}`, + `/v1/studies/${uuid}/links/${areaIdFrom}/${areaIdTo}`, ); return res.data; }; @@ -156,13 +150,6 @@ export const createBindingConstraint = async ( return res.data; }; -export const getClustersAndLinks = async ( - uuid: string, -): Promise => { - const res = await client.get(`/v1/studies/${uuid}/linksandclusters`); - return res.data; -}; - interface GetAllLinksParams { uuid: string; withUi?: boolean; @@ -176,10 +163,7 @@ export const getAllLinks = async ( params: T, ): Promise>> => { const { uuid, withUi } = params; - const res = await client.get( - `/v1/studies/${uuid}/links${withUi ? `?with_ui=${withUi}` : ""}`, - ); + const withUiStr = withUi ? "with_ui=true" : ""; + const res = await client.get(`/v1/studies/${uuid}/links?${withUiStr}`); return res.data; }; - -export default {};